id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
/Nimbus_Splash-1.1.0.tar.gz/Nimbus Splash-1.1.0/nimbus_splash/cli.py | import argparse
from . import job
import subprocess
import os
import re
import xyz_py as xyzp
from . import utils as ut
def gen_job_func(uargs):
'''
Wrapper for CLI gen_job call
Parameters
----------
uargs : argparser object
User arguments
Returns
-------
None
'''
# Currently available nodes
supported_nodes = [
'spot-fsv2-2',
'spot-fsv2-4',
'spot-fsv2-8',
'spot-fsv2-16',
'spot-fsv2-32',
'paygo-fsv2-2',
'paygo-fsv2-4',
'paygo-fsv2-8',
'paygo-fsv2-16',
'paygo-fsv2-32',
'paygo-hb-60',
'paygo-hbv2-120',
'paygo-hbv3-120',
'paygo-hc-44',
# 'paygo-ncv3-12',
# 'paygo-ncv3-24',
# 'paygo-ncv3-6',
# 'paygo-ncv3r-24',
# 'paygo-ndv2-40',
'spot-hb-60',
'spot-hbv2-120',
'spot-hbv3-120',
'spot-hc-44',
# 'spot-ncv3-12',
# 'spot-ncv3-24',
# 'spot-ncv3-6',
# 'spot-ncv3r-24',
# 'spot-ndv2-40',
# 'vis-ncv3-12',
# 'vis-ncv3-24',
# 'vis-ncv3-6',
# 'vis-ndv2-40'
]
cores_per_node = {
node: int(node.split('-')[-1])
for node in supported_nodes
}
total_node_memory = {
'spot-fsv2-2': 3500 * 2,
'spot-fsv2-4': 3500 * 4,
'spot-fsv2-8': 3500 * 8,
'spot-fsv2-16': 3500 * 16,
'spot-fsv2-32': 3500 * 32,
'paygo-fsv2-2': 3500 * 2,
'paygo-fsv2-4': 3500 * 4,
'paygo-fsv2-8': 3500 * 8,
'paygo-fsv2-16': 3500 * 16,
'paygo-fsv2-32': 3500 * 32,
'paygo-hbv2-120': 456000,
'paygo-hbv3-120': 448000,
'paygo-hc-44': 352000,
'paygo-hb-60': 228000,
# 'paygo-ncv3-6': 112000,
# 'paygo-ncv3-12': 224000,
# 'paygo-ncv3-24': 448000,
# 'paygo-ncv3r-24': 18500,
# 'paygo-ndv2-40': 672000,
'spot-hbv2-120': 456000,
'spot-hbv3-120': 448000,
'spot-hb-60': 228000,
'spot-hc-44': 352000,
# 'spot-ncv3-6': 112000,
# 'spot-ncv3-12': 224000,
# 'spot-ncv3-24': 448000,
# 'spot-ncv3r-24': 18500,
# 'spot-ndv2-40': 672000,
# 'vis-ncv3-6': 112000,
# 'vis-ncv3-12': 224000,
# 'vis-ncv3-24': 448000,
# 'vis-ndv2-40': 672000
}
if uargs.node_type in supported_nodes:
node = uargs.node_type
else:
ut.red_exit("Node type unsupported")
# Write job file
for file in uargs.input_files:
if not os.path.isabs(file):
file = os.path.join(os.getcwd(), file)
# Check input exists
if not os.path.exists(file):
ut.red_exit("Cannot locate {}".format(file))
# Check contents of input file and find any file dependencies
dependencies = job.parse_input_contents(
file,
total_node_memory[node],
cores_per_node[node]
)
if len(uargs.extra_dependencies):
dependencies['extra'] = uargs.extra_dependencies
# Look for old gbw in results directory, if it exists
results_name = ut.gen_results_name(file)
if 'gbw' not in dependencies and os.path.exists(results_name) and not uargs.no_guess: # noqa
old_gbw = '{}.gbw'.format(
os.path.split(os.path.splitext(file)[0])[1]
)
# If a file is located, send it to compute node for orca
# to use as guess MO
if os.path.exists(os.path.join(results_name, old_gbw)):
dependencies['gbw'] = old_gbw
ut.cprint(
f'Using {old_gbw} in {results_name} as input MO in {file}',
'black_yellowbg'
)
# Check dependencies exist
dependency_paths = job.locate_dependencies(dependencies, file)
if uargs.verbose:
print(dependencies)
print(dependency_paths)
job_file = job.write_file(
file, node, uargs.time, verbose=True,
dependency_paths=ut.flatten_recursive(
list(dependency_paths.values())
)
)
# Submit to queue
if not uargs.no_start:
subprocess.call("sbatch {}".format(job_file), shell=True)
return
def rst_opt_func(uargs, job_args):
'''
Wrapper for command line call to rst_opt
Restarts optimisation calculation when only the output file is available
'''
path, raw_file = os.path.split(uargs.output_file)
head = os.path.splitext(raw_file)[0]
# Extract coordinates from output file
labels, coords, opt_yn = ut.get_opt_coords(uargs.output_file)
# Extract input information from output file
input_info = ut.get_input_section(uargs.output_file)
# Create rst folder
new_folder = os.path.join(path, 'rst')
os.mkdir(new_folder)
# Create -rst xyz file
new_xyz = os.path.join(new_folder, "{}-rst.xyz".format(head))
xyzp.save_xyz(new_xyz, labels, coords, verbose=False)
# Edit xyz file name in input_info
input_info = re.sub(
r"[\-a-z0-9A-Z_]+\.xyz",
"{}-rst.xyz".format(head),
input_info
)
# If optimised, delete opt keyword from input
if opt_yn:
input_info = re.sub(
r"\bopt\b(?!-)(?!\.)",
"",
input_info
)
ut.cprint(
'Optimisation complete, restarting only for frequencies',
'blue'
)
# Create -rst input file
new_input = os.path.join(new_folder, "{}-rst.inp".format(head))
with open(new_input, 'w') as f:
f.write(input_info)
# Run gen_job on new calculation
read_args(
[
"gen_job",
new_input,
*job_args
]
)
return
def read_args(arg_list=None):
'''
Reader for command line arguments. Uses subReaders for individual programs
Parameters
----------
args : argparser object
command line arguments
Returns
-------
None
'''
description = '''
A package for working with Orca on Bath's Cloud HPC service
'''
epilog = '''
To display options for a specific program, use splash \
PROGRAMFILETYPE -h
'''
parser = argparse.ArgumentParser(
description=description,
epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter
)
subparsers = parser.add_subparsers(dest='prog')
gen_job = subparsers.add_parser(
'gen_job',
description='Generate Nimbus SLURM submission script'
)
gen_job.set_defaults(func=gen_job_func)
gen_job.add_argument(
'input_files',
nargs='+',
type=str,
help='Orca input file name(s)'
)
default_compute = ut.get_envvar('DEF_COMP_INST')
if not len(default_compute):
default_compute = 'spot-fsv2-16'
gen_job.add_argument(
'-nt',
'--node_type',
default=default_compute,
type=str,
help=f'Node to run on, default is {default_compute}'
)
gen_job.add_argument(
'-t',
'--time',
type=str,
default='24:00:00',
help='Time for job, formatted as HH:MM:SS, default 24:00:00'
)
gen_job.add_argument(
'-ns',
'--no_start',
action='store_true',
help='If specified, jobs are not submitted to nimbus queue'
)
gen_job.add_argument(
'-v',
'--verbose',
action='store_true',
help='If specified, debug information is printed to screen'
)
gen_job.add_argument(
'-ng',
'--no_guess',
action='store_true',
help=(
'If specified, gbw files found in results directory will not be'
'used automatically'
)
)
gen_job.add_argument(
'-ed',
'--extra_dependencies',
nargs='+',
default='',
type=str,
help=(
'Extra dependencies (files) which will be copied to the compute'
' node. Relative path to the file(s) must be given.'
)
)
rst_opt = subparsers.add_parser(
'rst_opt',
description=(
'Restart optimisation from output file alone.\n'
'Use only if you are missing a previous _results directory'
)
)
rst_opt.set_defaults(func=rst_opt_func)
rst_opt.add_argument(
'output_file',
type=str,
help='Orca output file name(s) (must contain coordinates from optimisation)' # noqa
)
# If argument list is none, then call function func
# which is assigned to help function
parser.set_defaults(func=lambda user_args: parser.print_help())
# read sub-parser
_args, _ = parser.parse_known_args(arg_list)
# select parsing option based on sub-parser
if _args.prog in ['rst_opt']:
args, job_args = parser.parse_known_args(arg_list)
args.func(args, job_args)
else:
args = parser.parse_args(arg_list)
args.func(args)
return args
def interface():
read_args()
return | PypiClean |
/HybridUI-0.0.1.tar.gz/HybridUI-0.0.1/hybrid/style.py | import json
from dataclasses import dataclass
from typing import List, Dict
from typing import List, TypedDict
class NStyle(TypedDict):
name: str
value: str
styles: List[NStyle]
@dataclass
class Style:
alignContent: str = None
alignItems: str = None
alignSelf: str = None
all: str = None
animation: str = None
animationDelay: str = None
animationDirection: str = None
animationDuration: str = None
animationFillMode: str = None
animationIterationCount: str = None
animationName: str = None
animationPlayState: str = None
animationTimingFunction: str = None
backfaceVisibility: str = None
background: str = None
backgroundAttachment: str = None
backgroundBlendMode: str = None
backgroundClip: str = None
backgroundColor: str = None
backgroundImage: str = None
backgroundOrigin: str = None
backgroundPosition: str = None
backgroundRepeat: str = None
backgroundSize: str = None
blockSize: str = None
border: str = None
borderBlock: str = None
borderBlockColor: str = None
borderBlockEnd: str = None
borderBlockEndColor: str = None
borderBlockEndStyle: str = None
borderBlockEndWidth: str = None
borderBlockStart: str = None
borderBlockStartColor: str = None
borderBlockStartStyle: str = None
borderBlockStartWidth: str = None
borderBlockStyle: str = None
borderBlockWidth: str = None
borderBottom: str = None
borderBottomColor: str = None
borderBottomLeftRadius: str = None
borderBottomRightRadius: str = None
borderBottomStyle: str = None
borderBottomWidth: str = None
borderCollapse: str = None
borderColor: str = None
borderEndEndRadius: str = None
borderEndStartRadius: str = None
borderImage: str = None
borderImageOutset: str = None
borderImageRepeat: str = None
borderImageSlice: str = None
borderImageSource: str = None
borderImageWidth: str = None
borderInline: str = None
borderInlineColor: str = None
borderInlineEnd: str = None
borderInlineEndColor: str = None
borderInlineEndStyle: str = None
borderInlineEndWidth: str = None
borderInlineStart: str = None
borderInlineStartColor: str = None
borderInlineStartStyle: str = None
borderInlineStartWidth: str = None
borderInlineStyle: str = None
borderInlineWidth: str = None
borderLeft: str = None
borderLeftColor: str = None
borderLeftStyle: str = None
borderLeftWidth: str = None
borderRadius: str = None
borderRight: str = None
borderRightColor: str = None
borderRightStyle: str = None
borderRightWidth: str = None
borderSpacing: str = None
borderStartEndRadius: str = None
borderStartStartRadius: str = None
borderStyle: str = None
borderTop: str = None
borderTopColor: str = None
borderTopLeftRadius: str = None
borderTopRightRadius: str = None
borderTopStyle: str = None
borderTopWidth: str = None
borderWidth: str = None
bottom: str = None
boxDecorationBreak: str = None
boxShadow: str = None
boxSizing: str = None
breakAfter: str = None
breakBefore: str = None
breakInside: str = None
captionSide: str = None
caretColor: str = None
clear: str = None
clip: str = None
color: str = None
columnCount: str = None
columnFill: str = None
columnGap: str = None
columnRule: str = None
columnRuleColor: str = None
columnRuleStyle: str = None
columnRuleWidth: str = None
columnSpan: str = None
columnWidth: str = None
columns: str = None
content: str = None
counterIncrement: str = None
counterReset: str = None
cursor: str = None
direction: str = None
display: str = None
emptyCells: str = None
filter: str = None
flex: str = None
flexBasis: str = None
flexDirection: str = None
flexFlow: str = None
flexGrow: str = None
flexShrink: str = None
flexWrap: str = None
float: str = None
font: str = None
fontFamily: str = None
fontFeatureSettings: str = None
fontKerning: str = None
fontLanguageOverride: str = None
fontOpticalSizing: str = None
fontSize: str = None
fontSizeAdjust: str = None
fontStretch: str = None
fontStyle: str = None
fontSynthesis: str = None
fontVariant: str = None
fontVariantAlternates: str = None
fontVariantCaps: str = None
fontVariantEastAsian: str = None
fontVariantLigatures: str = None
fontVariantNumeric: str = None
fontVariantPosition: str = None
fontWeight: str = None
gap: str = None
grid: str = None
gridArea: str = None
gridAutoColumns: str = None
gridAutoFlow: str = None
gridAutoRows: str = None
gridColumn: str = None
gridColumnEnd: str = None
gridColumnStart: str = None
gridRow: str = None
gridRowEnd: str = None
gridRowStart: str = None
gridTemplate: str = None
gridTemplateAreas: str = None
gridTemplateColumns: str = None
gridTemplateRows: str = None
hangingPunctuation: str = None
height: str = None
hyphens: str = None
imageOrientation: str = None
imageRendering: str = None
imageResolution: str = None
imeMode: str = None
initialLetter: str = None
initialLetterAlign: str = None
inlineSize: str = None
inset: str = None
insetBlock: str = None
insetBlockEnd: str = None
insetBlockStart: str = None
insetInline: str = None
insetInlineEnd: str = None
insetInlineStart: str = None
isolation: str = None
justifyContent: str = None
justifyItems: str = None
justifySelf: str = None
left: str = None
letterSpacing: str = None
lineBreak: str = None
lineHeight: str = None
listStyle: str = None
listStyleImage: str = None
listStylePosition: str = None
listStyleType: str = None
margin: str = None
marginBlock: str = None
marginBlockEnd: str = None
marginBlockStart: str = None
marginBottom: str = None
marginInline: str = None
marginInlineEnd: str = None
marginInlineStart: str = None
marginLeft: str = None
marginRight: str = None
marginTop: str = None
mask: str = None
maskClip: str = None
maskComposite: str = None
maskImage: str = None
maskMode: str = None
maskOrigin: str = None
maskPosition: str = None
maskRepeat: str = None
maskSize: str = None
maskType: str = None
maxBlockSize: str = None
maxHeight: str = None
maxInlineSize: str = None
maxWidth: str = None
minBlockSize: str = None
minHeight: str = None
minInlineSize: str = None
minWidth: str = None
mixBlendMode: str = None
objectFit: str = None
objectPosition: str = None
offset: str = None
offsetAnchor: str = None
offsetBlock: str = None
offsetBlockEnd: str = None
offsetBlockStart: str = None
offsetDistance: str = None
offsetInline: str = None
offsetInlineEnd: str = None
offsetInlineStart: str = None
offsetPath: str = None
offsetPosition: str = None
offsetRotate: str = None
opacity: str = None
order: str = None
orphans: str = None
outline: str = None
outlineColor: str = None
outlineOffset: str = None
outlineStyle: str = None
outlineWidth: str = None
overflow: str = None
overflowAnchor: str = None
overflowBlock: str = None
overflowInline: str = None
overflowWrap: str = None
overflowX: str = None
overflowY: str = None
overscrollBehavior: str = None
overscrollBehaviorBlock: str = None
overscrollBehaviorInline: str = None
overscrollBehaviorX: str = None
overscrollBehaviorY: str = None
padding: str = None
paddingBlock: str = None
paddingBlockEnd: str = None
paddingBlockStart: str = None
paddingBottom: str = None
paddingInline: str = None
paddingInlineEnd: str = None
paddingInlineStart: str = None
paddingLeft: str = None
paddingRight: str = None
paddingTop: str = None
pageBreakAfter: str = None
pageBreakBefore: str = None
pageBreakInside: str = None
paintOrder: str = None
perspective: str = None
perspectiveOrigin: str = None
placeContent: str = None
placeItems: str = None
placeSelf: str = None
pointerEvents: str = None
position: str = None
quotes: str = None
resize: str = None
right: str = None
rotate: str = None
rowGap: str = None
rubyAlign: str = None
rubyMerge: str = None
rubyPosition: str = None
scale: str = None
scrollBehavior: str = None
scrollMargin: str = None
scrollMarginBlock: str = None
scrollMarginBlockEnd: str = None
scrollMarginBlockStart: str = None
scrollMarginBottom: str = None
scrollMarginInline: str = None
scrollMarginInlineEnd: str = None
scrollMarginInlineStart: str = None
scrollMarginLeft: str = None
scrollMarginRight: str = None
scrollMarginTop: str = None
scrollPadding: str = None
scrollPaddingBlock: str = None
scrollPaddingBlockEnd: str = None
scrollPaddingBlockStart: str = None
scrollPaddingBottom: str = None
scrollPaddingInline: str = None
scrollPaddingInlineEnd: str = None
scrollPaddingInlineStart: str = None
scrollPaddingLeft: str = None
scrollPaddingRight: str = None
scrollPaddingTop: str = None
scrollSnapAlign: str = None
scrollSnapStop: str = None
scrollSnapType: str = None
shapeImageThreshold: str = None
shapeMargin: str = None
shapeOutside: str = None
tabSize: str = None
tableLayout: str = None
textAlign: str = None
textAlignLast: str = None
textCombineUpright: str = None
textDecoration: str = None
textDecorationColor: str = None
textDecorationLine: str = None
textDecorationSkipInk: str = None
textDecorationStyle: str = None
textDecorationThickness: str = None
textEmphasis: str = None
textEmphasisColor: str = None
textEmphasisPosition: str = None
textEmphasisStyle: str = None
textIndent: str = None
textJustify: str = None
textOrientation: str = None
textOverflow: str = None
textRendering: str = None
textShadow: str = None
textSizeAdjust: str = None
textTransform: str = None
textUnderlineOffset: str = None
textUnderlinePosition: str = None
top: str = None
touchAction: str = None
transform: str = None
transformBox: str = None
transformOrigin: str = None
transformStyle: str = None
transition: str = None
transitionDelay: str = None
transitionDuration: str = None
transitionProperty: str = None
transitionTimingFunction: str = None
unicodeBidi: str = None
userSelect: str = None
verticalAlign: str = None
visibility: str = None
whiteSpace: str = None
widows: str = None
width: str = None
willChange: str = None
wordBreak: str = None
wordSpacing: str = None
writingMode: str = None
zIndex: str = None
def ReactStyle(self) -> Dict[str, str]:
style_dict = {}
for field_name, field_value in self.__dict__.items():
if field_value is not None:
style_dict[field_name] = field_value
return style_dict
def AntdStyle(self) -> List[Dict[str, str]]:
style_list = []
for field_name, field_value in self.__dict__.items():
if field_value is not None:
style_list.append({'name': field_name, 'value': field_value})
return style_list | PypiClean |
/MegEngine-1.13.1-cp37-cp37m-macosx_10_14_x86_64.whl/megengine/core/tensor/utils.py | import collections
import itertools
from typing import Iterable, Union
import numpy as np
from .. import _config
from .._imperative_rt import make_const
from .._imperative_rt.core2 import (
Const,
Tensor,
_get_convert_inputs,
_set_convert_inputs,
apply,
astensor1d_cpp,
astype_cpp,
convert_inputs_cpp,
convert_single_value_cpp,
dtype_promotion,
get_device,
make_shape_tuple,
)
from .._imperative_rt.ops import SubgraphBuilder as _SubgraphBuilder
from .._imperative_rt.ops import jit_supported
from .._wrap import as_device
from ..autodiff.grad import Function
from ..ops import builtin
from .amp import _get_amp_high_prec_dtype, _get_amp_low_prec_dtype
from .dtype import is_dtype_equal, is_quantize
jit_supported = False
def get_convert_inputs():
r"""get the curerent state of `_enable_convert_inputs`"""
return _get_convert_inputs()
def set_convert_inputs(flag):
r"""This function is a temporary workaround for reducing the overhead of operator
invocations. The function `convert_inputs` is disabled if the global state
`_enable_convert_inputs` is set to `False`, otherwise enabled. This function is for
internal use only, and should be removed when the tensor-like system is refactored.
"""
return _set_convert_inputs(flag)
def convert_single_value(v, *, dtype=None, device=None):
return convert_single_value_cpp(v, dtype, device)
def convert_inputs(*args, device=None):
if not _get_convert_inputs():
return args
return convert_inputs_cpp(*args, device)
def cast_tensors(*args, promote=False):
if promote:
dtype = _get_amp_high_prec_dtype()
else:
dtype = _get_amp_low_prec_dtype()
return tuple(arg.astype(dtype) if arg is not None else None for arg in args)
def result_type(*args):
dtypes = []
for i in args:
if isinstance(i, Tensor):
dtypes.append(i.dtype)
continue
try:
dtypes.append(np.dtype(i))
except TypeError:
pass
return np.result_type(*dtypes)
def isscalar(x):
if isinstance(x, Tensor):
return x._isscalar()
return np.isscalar(x)
def astensor1d(x, *reference, dtype=None, device=None):
"""Convert something to 1D tensor. Support following types
* sequence of scalar literal / tensor
* numpy array
* tensor (returned as is, regardless of dtype and device)
"""
return astensor1d_cpp(x, dtype, device, reference)
def _normalize_axis(
ndim: int, axis: Union[int, Iterable], reverse=False
) -> Union[int, list]:
def convert(x):
x_org = x
if x < 0:
x = ndim + x
assert (
x >= 0 and x < ndim
), "axis {} is out of bounds for tensor of dimension {}".format(x_org, ndim)
return x
if isinstance(axis, int):
return convert(axis)
elif isinstance(axis, Iterable):
axis_org = axis
axis = list(sorted(map(convert, axis), reverse=reverse))
for i in range(len(axis) - 1):
assert axis[i] != axis[i + 1], "axis {} contains duplicated indices".format(
axis_org
)
return axis
raise
_opr_map = {
("-", 1): builtin.Elemwise(mode="negate"),
("abs", 1): builtin.Elemwise(mode="abs"),
("exp", 1): builtin.Elemwise(mode="exp"),
("log1p", 1): builtin.Elemwise(mode="log1p"),
("relu", 1): builtin.Elemwise(mode="relu"),
("cond_leq_mov", 3): builtin.Elemwise(mode="cond_leq_mov"),
("fma3", 3): builtin.Elemwise(mode="FUSE_MUL_ADD3"),
("fma4", 4): builtin.Elemwise(mode="FUSE_MUL_ADD4"),
("[?:]", 2): builtin.Subtensor(items=[(0, True, False, False, False)]),
("[:?]", 2): builtin.Subtensor(items=[(0, False, True, False, False)]),
}
for name, mode in [
("+", "add"),
("-", "sub"),
("*", "mul"),
("/", "true_div"),
("//", "floor_div"),
("**", "pow"),
("max", "max"),
("min", "min"),
("additive", "add"),
("exp", "EXP"),
("switch_gt0", "switch_gt0"),
("abs_grad", "abs_grad"),
]:
_opr_map[(name, 2)] = builtin.Elemwise(mode=mode)
def subgraph(
name, dtype, device, nr_inputs, gopt_level=None, jit_fusion=False, custom_grad=False
):
if not device.physical_name.startswith("gpu"):
jit_fusion = False
if jit_fusion and not jit_supported:
jit_fusion = False # jit unusable, fallback to graph compile
gopt_level = 2
def as_op(op, nargs):
if isinstance(op, str):
assert (op, nargs) in _opr_map, "unknown operator"
op = _opr_map[(op, nargs)]
return op
def decorator(func):
builder = _SubgraphBuilder(name)
def apply_expr(op, *args, nr_out=None):
op = as_op(op, len(args))
results = builder.apply(op, args, 1 if nr_out is None else nr_out)
if nr_out is None:
assert len(results) == 1
return results[0]
else:
assert len(results) == nr_out
return results
def apply_const(value, dtype=dtype, device=device):
return builder.apply_const(value, dtype, device)
def build(builder, outputs, outputs_has_grad):
builder = type(builder)(builder)
builder.outputs(outputs)
builder.outputs_has_grad(outputs_has_grad)
if jit_fusion:
assert gopt_level is None
op = lambda: builder.jit_fuse()
elif gopt_level is None:
op = lambda: builder.get()
else:
op = lambda: builder.compile(gopt_level)
return op
inputs = [builder.input() for _ in range(nr_inputs)]
if not custom_grad:
outputs, outputs_has_grad = func(inputs, apply_expr, apply_const)
return build(builder, outputs, outputs_has_grad)
else:
gen = func(inputs, apply_expr, apply_const)
outputs = gen.send(None)
nr_outputs = len(outputs)
forward_fn = build(builder, outputs, [False] * nr_outputs)
output_grads = [builder.input() for _ in range(nr_outputs)]
input_grads = gen.send(output_grads)
assert len(input_grads) == nr_inputs
input_grads_mask = [input_grad is not None for input_grad in input_grads]
indices = [
i - 1 if mask else None
for i, mask in zip(
itertools.accumulate(input_grads_mask), input_grads_mask
)
]
encoded_input_grads = [grad for grad in input_grads if grad is not None]
backward_fn = build(
builder, encoded_input_grads, [True] * len(encoded_input_grads)
)
class SubgraphOp(Function):
def __init__(self):
self.inputs = None
self.output_shapes = None
def forward(self, *inputs):
self.inputs = inputs
outputs = apply(forward_fn(), *inputs)
if len(outputs) > 1:
self.output_shapes = [output.shape for output in outputs]
return outputs
def backward(self, *output_grads):
inputs = self.inputs
any_valid = False
all_valid = True
for output_grad in output_grads:
if output_grad is None:
all_valid = False
else:
any_valid = True
if not any_valid:
input_grads = [None] * len(indices)
else:
if not all_valid:
assert self.output_shapes is not None
from ...functional import zeros
output_grads = [
zeros(self.output_shapes[i]) if grad is None else grad
for i, grad in enumerate(output_grads)
]
self = None
encoded_input_grads = apply(
backward_fn(), *inputs, *output_grads
)
input_grads = [
encoded_input_grads[i] if i is not None else None
for i in indices
]
return input_grads
gen.close()
return SubgraphOp
return decorator
def interpret_subgraph(func, dtype, device):
def as_op(op, nargs):
if isinstance(op, str) and (op, nargs) in _opr_map:
op = _opr_map[(op, nargs)]
return op
def decorated_func(*args):
def apply_expr(op, *args, nr_out=None):
op = as_op(op, len(args))
results = apply(op, *args)
if nr_out is None:
assert len(results) == 1
return results[0]
else:
assert len(results) == nr_out
return results
def apply_const(value, dtype=dtype, device=device):
return Const(value, dtype, device)
outputs, outputs_has_grad = func(args, apply_expr, apply_const)
outputs = [
output if has_grad else output.detach()
for output, has_grad in zip(outputs, outputs_has_grad)
]
return outputs
return decorated_func
def subgraph_fn(
name,
dtype,
device,
nr_inputs,
gopt_level=None,
jit_fusion=False,
custom_grad=False,
*,
interpret=False
):
def decorator(func):
if not interpret:
op = subgraph(
name,
dtype,
device,
nr_inputs,
gopt_level=gopt_level,
jit_fusion=jit_fusion,
custom_grad=custom_grad,
)(func)
def wrapped_func(*args):
if custom_grad:
outputs = op()(*args)
else:
outputs = apply(op(), *args)
return outputs
return wrapped_func
else:
return interpret_subgraph(func, dtype, device)
return decorator | PypiClean |
/LazySearch-1.0rc3.tar.gz/LazySearch-1.0rc3/lazysearch/lazysearch.py | import os
import uuid
import tantivy
import datetime
import diskcache
from collections.abc import MutableMapping
from . import simple_pattern
def flatten_dict(dictionary, parent_key=(), meta={}):
items = []
for key, value in dictionary.items():
if not isinstance(key, str):
if key not in meta:
uid = f"_key.{uuid.uuid4()}"
meta[uid] = key
key = uid
new_key = parent_key + (key,)
if isinstance(value, MutableMapping):
items.extend(flatten_dict(value, new_key, meta))
elif isinstance(value, list):
for k, v in enumerate(value):
items.extend(flatten_dict({k: v}, new_key, meta))
else:
items.append((list(new_key), value))
return items
def get_schema(tokenizer):
schema = tantivy.SchemaBuilder()
schema.add_text_field(name="_id", tokenizer_name="raw", stored=True)
schema.add_text_field(name="key", tokenizer_name="raw", stored=True)
schema.add_text_field(name="value", tokenizer_name=tokenizer, stored=False)
schema.add_text_field(name="patterns", tokenizer_name="raw", stored=False)
schema.add_unsigned_field(name="num", stored=False, fast="single")
schema = schema.build()
return schema
class Index:
def __init__(
self,
index_name,
index_path=None,
tokenizer="default",
tantivy_ram_size_in_gb=0.25,
tantivy_threads=0,
):
if not index_path:
index_path = os.path.join(
os.path.expanduser("~"), os.path.join("LazySearch", index_name)
)
schema = get_schema(tokenizer)
index_path = os.path.join(index_path, "index")
try:
if not os.path.exists(index_path):
os.makedirs(index_path)
except:
pass
self.INDEX = tantivy.Index(schema, index_path, reuse=True)
self.WRITER = self.INDEX.writer(
heap_size=int(tantivy_ram_size_in_gb * 1073741824),
num_threads=tantivy_threads,
)
self.SEARCHER = self.INDEX.searcher()
meta_index_path = os.path.join(index_path, "meta")
try:
if not os.path.exists(meta_index_path):
os.makedirs(meta_index_path)
except:
pass
self.META = diskcache.Index(meta_index_path)
def index(self, in_data):
if not isinstance(in_data, list):
in_data = [in_data]
uuids = []
for _d in in_data:
_id = _d.get("_id")
if _id:
del _d["_id"]
else:
_id = f"{uuid.uuid4()}"
self.META[f"id.{_id}"] = _d
_d = flatten_dict(_d, meta=self.META)
for key, value in _d:
if isinstance(value, str):
self.WRITER.add_document(
tantivy.Document.from_dict(
{
"_id": _id,
"key": key,
"value": value,
"patterns": simple_pattern.string_to_word_patterns(
value
),
"num": [],
}
)
)
elif isinstance(value, datetime.datetime):
self.WRITER.add_document(
tantivy.Document.from_dict(
{
"_id": _id,
"key": key,
"value": [],
"patterns": [],
"num": value.timestamp(),
}
)
)
elif isinstance(value, (int, float)):
self.WRITER.add_document(
tantivy.Document.from_dict(
{
"_id": _id,
"key": key,
"value": [],
"patterns": [],
"num": value,
}
)
)
else:
self.WRITER.add_document(
tantivy.Document.from_dict(
{
"_id": _id,
"key": key,
"value": json.dumps(value),
"patterns": simple_pattern.string_to_word_patterns(
value
),
"num": [],
}
)
)
uuids.append(_id)
return uuids
def commit(self, reload_only=False):
if not reload_only:
self.WRITER.commit()
self.INDEX.reload()
self.SEARCHER = self.INDEX.searcher()
def search(
self,
string_query=None,
int_query=None,
datetime_query=None,
pattern_query=None,
start=0,
n=20,
):
query = []
if (
string_query is None
and int_query is None
and datetime_query is None
and pattern_query is None
):
query.append("value:*")
else:
if string_query:
query.append(f"value:{string_query}")
if pattern_query:
query.append(
" OR ".join(
[
f'patterns:"{__}"'
for __ in simple_pattern.string_to_word_patterns(
pattern_query
)
]
)
)
query = [f"({__})" for __ in query]
query = " AND ".join(query)
parsed_query = self.INDEX.parse_query(query, ["value", "patterns"])
_ = self.SEARCHER.search(parsed_query, limit=n, offset=start, count=True)
current_results, total_count = _.hits, _.count
start = start + len(current_results)
for score, doc_address in current_results:
_ = self.SEARCHER.doc(doc_address)
_id = _["_id"][0]
_found_in_key = [
self.META[__] if __.startswith("_key.") else __ for __ in _["key"]
]
yield {
"_id": _id,
"source": self.META[f"id.{_id}"],
"score": score,
"total_count": total_count,
"found_in_key": _found_in_key,
}
while start + n < total_count:
current_results = self.SEARCHER.search(
parsed_query, limit=n, offset=start, count=False
).hits
for score, doc_address in current_results:
_id = self.SEARCHER.doc[doc_address]._id
yield {
"_id": _id,
"source": self.META[f"id.{_id}"],
"score": score,
"total_count": total_count,
}
if __name__ == "__main__":
test_index = Index("test", index_path="./test_index")
print(
test_index.index(
[{"_id": "1", "d": ["a b"]}, {"c": "a", "d": [1, 2, {99: 100}]}]
)
)
print(
test_index.index(
{
"_id": "3",
"pawan": [{"name": "kalyan pawan"}],
"brother": "chiranjeevi",
"imp_days": [datetime.datetime.now(), [datetime.datetime.now()]],
}
)
)
test_index.commit()
print("-----------------")
for result in test_index.search(pattern_query="xawan"):
print(result) | PypiClean |
/IsPycharmRun-1.0.tar.gz/IsPycharmRun-1.0/poco/utils/simplerpc/jsonrpc/six.py |
# Copyright (c) 2010-2013 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.4.1"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
class Module_six_moves_urllib_parse(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib.parse")
class Module_six_moves_urllib_error(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib_error")
sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
class Module_six_moves_urllib_request(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib_request")
sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
class Module_six_moves_urllib_response(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib_response")
sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
class Module_six_moves_urllib_robotparser(types.ModuleType):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
sys.modules[__name__ + ".moves.urllib_robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib_robotparser")
sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
parse = sys.modules[__name__ + ".moves.urllib_parse"]
error = sys.modules[__name__ + ".moves.urllib_error"]
request = sys.modules[__name__ + ".moves.urllib_request"]
response = sys.modules[__name__ + ".moves.urllib_response"]
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d, **kw):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)(**kw))
def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)(**kw))
def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)(**kw))
def iterlists(d, **kw):
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
return iter(getattr(d, _iterlists)(**kw))
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper | PypiClean |
/BGWpy-3.2.2.tar.gz/BGWpy-3.2.2/Documentation/Tutorial/.ipynb_checkpoints/Tutorial_Abinit-checkpoint.ipynb | # Running BerkeleyGW with BGWpy #
In this notebook, we assume that you are somewhat familiar with the BerkeleyGW software: what problem it solves, and what is the general workflow to run it. We also assume that you have a basic knowledge of Python and its terminology.
Before you begin, make sure that you have the following packages installed:
* Jupyter Notebook
* Abinit
* BerkeleyGW
* BGWpy
To run BGWpy, you'll also need the `bin` directories of BerkeleyGW and Abinit installations located in your `PATH` environment variable.
## Checking your configuration ##
The following cell is used to generate information that we'll need, should we have to debug this notebook. You don't need to run it, but it may be useful to look at for educational purposes.
```
import sys
import os
import BGWpy.config as defaults
print("Python kernel:\n {} ".format(sys.executable))
print("Python version:\n {} ".format(sys.version))
print("Current working directory:\n {} ".format(os.getcwd()))
print("Configuration file:\n {} ".format(defaults.config_file))
print("Use HDF5?:\n {} ".format(defaults.flavors['use_hdf5']))
print("Use complex version of BerkeleyGW?:\n {}".format(defaults.flavors['flavor_complex']))
print("DFT Flavor:\n {} ".format(defaults.flavors['dft_flavor']))
print("Default MPI settings:\n {} ".format(defaults.default_mpi))
print("Default runscript settings:\n {} ".format(defaults.default_runscript))
print("Paths in $PATH:")
for i in os.environ['PATH'].split(":"):
print(" {}".format(i))
```
Pay attention to the `use_hdf5` flag. It should reflect whether you compiled BerkeleyGW with HDF5 support or not. If the information above is not consistent with what you have, then you should edit your `~/.BGWpyrc` file accordingly. This is important because the file names that BGW expects from a calculation depends on it. If you don't have HDF5, then you should remove all the '.h5' extensions from file names. It is highly recommended, however, that you build BGW with HDF5 support, as it could become mandatory in the future.
If you don't have a `~/.BGWpyrc` yet, you can copy it from the `BGWpy/config` directory, or simply run the script `BGWpy_make_config_file.py`.
# Load Libraries #
First, we load two external packages which BGWpy uses: `numpy` and `pymatgen`.
```
import pymatgen
import numpy as np
```
Next, we load the `Structure` class from the BGWpy package. But really this is the Structure object from the `pymatgen` package.
```
from BGWpy import Structure
```
Next, we load the classes which create and run Abinit calculations.
```
from BGWpy import AbinitScfTask, AbinitBgwFlow
```
Finally, we load the classes with create and run BerkeleyGW calculations.
```
from BGWpy import EpsilonTask, SigmaTask, KernelTask, AbsorptionTask
```
Make sure that both the BerkeleyGW and Abinit binary folders are in the PATH folder
# Create the Structure #
For this tutorial, we'll calculate the many-body properties of the GaAs primitive cell. All files that you will need have been provided for you in the `Data` subdirectory.
SHOW PICTURE HERE. (Even better if can play using `pymatgen`...)
Geometries are specified in BGWpy using pymatgen's `Structure` class, which may be imported directly from BGWpy or through pymatgen.
There are a number of ways that we can import geometries into BGWpy using the `Structure` class. For example, we can load them from a pre-existing CIF file:
```
structure = Structure.from_file('../Data/Structures/GaAs.cif')
print(structure)
```
We can also load them from a previous pymatgen Structure which has been exported to a file in the JSON format:
```
Structure.from_file('../Data/Structures/GaAs.json')
print(structure)
```
We can even use pymatgen to directly create the structure in a Python script:
```
acell_angstrom = 5.6535
rprim = np.array([[.0,.5,.5],[.5,.0,.5],[.5,.5,.0]]) * acell_angstrom
structure = pymatgen.Structure(
lattice = pymatgen.core.lattice.Lattice(rprim),
species= ['Ga', 'As'],
coords = [3*[.0], 3*[.25]],
)
print(structure)
```
For more information about pymatgen, please consult its official documentation.
# Generating the Ground State Density #
To begin, we will run a ground state DFT calculation to self-consistency to generate the ground state charge density for the calculation. This ground state charge density will be fed into all wavefunction calculations in the next step. We use Abinit in this notebook, however BerkeleyGW and BGWpy supports a number of other DFT packages.
First, we will create a object of the `AbinitScfTask` task to prepare the needed variables:
```
task = AbinitScfTask(
dirname = 'Runs/11-Density',
structure = Structure.from_file('../Data/Structures/GaAs.json'),
prefix = 'GaAs', # File names prefix. You don't really need to specify this with abinit.
pseudo_dir = '../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
ecut = 5.0, # Wavefunctions cutoff energy
# These are the default parameters for the MPI runner.
# You can specify them here, but it's better to store this info in
# the configuration file ~/.BGWpyrc
nproc=1,
nproc_per_node=1,
mpirun='mpirun',
nproc_flag='-n',
nproc_per_node_flag='--npernode',
)
```
As you can see, BGWpy has a number of parameters that you will need to set. However, many of these parameters are consistent from calculation to calculation, so we'll store them in dictionaries that we can reuse for future steps.
First, a dictionary to store all variables that will be used across all Abinit calculations:
```
structure_and_pseudos = dict(
structure = Structure.from_file('../Data/Structures/GaAs.json'),
pseudo_dir = '../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
)
```
Next, a dictionary to store the variables which are used only for this particular SCF task:
```
scf_settings = dict(
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
ecut = 5.0, # Wavefunctions cutoff energy
)
```
And finally, a dictionary to store the settings related to running calculations with MPI.
```
mpi_settings = dict( # Then again, you should store those settings in ~/.BGWpyrc
nproc=1,
nproc_per_node=1,
mpirun='mpirun',
nproc_flag='-n',
nproc_per_node_flag='--npernode',
)
```
Note that all these dictionaries correspond to arguments for the `AbinitScfTask`, stored as key/value pairs. This allows us to use dictionary unpacking to considerably tidy up our code:
```
scf_task = AbinitScfTask(
dirname='Runs/11-Density',
**scf_settings,
**structure_and_pseudos,
**mpi_settings,
)
```
Now that we've created the `AbinitScfTask` task, we can use the `write` method to write the needed input files to disk:
```
scf_task.write()
```
If you receive an error message stating that an executable could not be found, you likely do not have the needed BerkeleyGW and Abinit binary folders in your `PATH` environment variable.
Let's take a look at the folder that was created by this task using Jupyter's built-in `!ls` magic command:
```
!ls 'Runs/11-Density'
```
In our new folder, there are several new directories:
* `GaAs.files`, the list of files used by Abinit.
* `GaAs.in`, the Abinit input variables.
* `run.sh`, the execution script.
and folders used by abinit for the input data files, outputs, and temporary files:
* `input_data`
* `out_data`
* `tmp_data`
Now that we've created the needed input files, let's run the `run.sh` script using the `run` method. Note that this step will take a few seconds, as it will run Abinit in the background.
```
scf_task.run()
```
Finally, we can check the status of the calculation using the `report` method. You should see a message telling you that it's been completed.
```
scf_task.report()
```
It is possible to access the data files produced by this task with
```
charge_density_fname = scf_task.get_odat('DEN')
vxc_fname = scf_task.get_odat('VXC')
print("Charge density file name: {}".format(charge_density_fname))
print("Exchange-correlation potential file name: {}".format(vxc_fname))
```
This won't be necessary, however, when we get to use the `AbinitBgwFlow`.
# Generating the Wavefunctions #
Now that we've generated the ground state density, we'll used this to generate the wavefunctions that we'll feed into BerkeleyGW. This may be done with the ` AbinitBgwFlow` class. As mentioned in the introduction, we'll need up to 6 different types of wavefunction files.
## WFN ##
`WFN` is the "standard" k-shifted wavefunction file which is read by the `Epsilon` calculation, and thus is needed for all BerkeleyGW calculations.
It (and all other wavefunction files) are generated using the `AbinitBgwFlow` class. The only difference between these wavefunction types are the parameter values used:
```
task = AbinitBgwFlow(
dirname = 'Runs/12-Wfn',
structure = Structure.from_file('../Data/Structures/GaAs.json'),
prefix = 'GaAs',
pseudo_dir = '../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
ecut = 5.0, # Wavefunctions cutoff energy
nband = 9, # Number of bands
input_variables = {'autoparal' : 1}, # Any extra input variables we want to specify
charge_density_fname = '11-Density/out_data/odat_DEN',
vxc_fname = '11-Density/out_data/odat_VXC',
# These are the default parameters for the MPI runner.
# Please adapt them to your needs.
nproc = 1,
nproc_per_node = 1,
mpirun = 'mpirun',
nproc_flag = '-n',
nproc_per_node_flag = '--npernode',
)
```
As before, we will break up these arguments into sets of dictionaries: the settings common to all wavefunction calculations
```
wfn_common_settings = dict(
ecut = 5.0, # Wavefunctions cutoff energy
input_variables = {'autoparal' : 1}, # Any extra input variables we want to specify
charge_density_fname = charge_density_fname,
vxc_fname = vxc_fname,
)
```
and the arguments specific to the current wavefunction calculation
```
wfn_settings = dict(
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
nband = 9, # Number of bands
**wfn_common_settings)
```
Reusing dictionaries of settings previously defined,
We can now create the instance of the `AbinitBgwFlow` class:
```
wfn_flow = AbinitBgwFlow(
dirname='Runs/12-Wfn',
**wfn_settings,
**structure_and_pseudos,
**mpi_settings)
```
As before, we'll write the input files to disc then run the calculation:
```
wfn_flow.write()
wfn_flow.run()
wfn_flow.report()
```
The output specifies that we've actually run two calculations here: a `WFN` calculation where we calculate wavefunctions using Abinit, and `Abi2BGW` where we convert the resulting Abinit-specific output files into a format readable by BerkeleyGW. Unlike in the density case where we ran a single task, here we're running two tasks (`WFN` and `Abi2BGW`) in a workflow (hence the name `AbiBgwFlow`).
## WFNq ##
Next, we'll create `WFNq`, which is the "standard" k-shifted and q-shifted wavefunction file which is read by the `Epsilon` calculation, and thus is needed for all BerkeleyGW calculations.
The only dictionary we need to create is are the settings specific to the `WFNq` wavefunction:
```
wfnq_settings = dict(
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
qshift = [.001,.0,.0],# k-points q-shift
**wfn_common_settings)
```
And then we can prepare the calculation:
```
wfnq_flow = AbinitBgwFlow(
dirname='Runs/13-Wfnq',
**wfnq_settings,
**structure_and_pseudos,
**mpi_settings)
```
Create it, and run it:
```
wfnq_flow.write()
wfnq_flow.run()
wfnq_flow.report()
```
## Wfn_co ##
Next, we'll create `WFN_co`, which is the wavefunction on a coarser (and unshifted) grid than `WFN`. This is used by `Sigma`, `Kernel`, and `Absorption`, and thus will be needed by most BerkeleyGW calculations. we will also use this calculation to generate the ground state density and exchange-correlation energy density that will be used by `Sigma`.
Once again, we set up the dictionary with our needed variables:
```
wfn_co_settings = dict(
ngkpt = [2,2,2], # k-points grid
kshift = [.0,.0,.0], # k-points shift
nband = 9, # Number of bands
rhog_flag = True, # Also convert the charge density for BGW.
vxcg_flag = True, # Also convert vxc for BGW.
**wfn_common_settings)
```
Note that there's a new flag `rhog_flag` which tells `AbinitBgwFlow` to generate additional density-related files,
while the vxcg_flag tells the `Abi2BGW` task to read and convert the `VXC` file.
Now we can prepare the calculation:
```
wfn_co_flow = AbinitBgwFlow(
dirname = 'Runs/14-Wfn_co',
**wfn_co_settings,
**structure_and_pseudos,
**mpi_settings)
```
And create and run it:
```
wfn_co_flow.write()
wfn_co_flow.run()
wfn_co_flow.report()
```
## WFN_fi ##
Next, we'll create `WFN_fi`, the k-shifted `WFN` on a finer grid than `WFN`. This is used during interpolation in the `Absorption` executable and thus is only needed if you need to solve the BSE equations. (Symmetry is also turned off for this calculation.)
```
wfn_fi_settings = dict(
nband = 9, # Number of bands
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
symkpt = False, # Do not reduce the k-point grid with symmetries.
**wfn_common_settings)
wfn_fi_flow = AbinitBgwFlow(
dirname = 'Runs/15-Wfn_fi',
**wfn_fi_settings,
**structure_and_pseudos,
**mpi_settings)
wfn_fi_flow.write()
wfn_fi_flow.run()
wfn_fi_flow.report()
```
## WFNq_fi ##
FINALLY, we'll create `WFNq_fi`, the k-shifted and q-shifted `WFN` on a finer grid than `WFN`. Like `WFN_fi`, this is used during interpolation in the `Absorption` executable and thus is only needed if you need to solve the BSE equations. (And symmetry is turned off, as before.)
Let's go through the steps again:
```
wfnq_fi_settings = dict(
nband = 9, # Number of bands
ngkpt = [2,2,2], # k-points grid
kshift = [.5,.5,.5], # k-points shift
qshift = [.001,.0,.0],# k-points q-shift
symkpt = False, # Do not reduce the k-point grid with symmetries.
**wfn_common_settings)
wfnq_fi_flow = AbinitBgwFlow(
dirname = 'Runs/16-Wfnq_fi',
**wfnq_fi_settings,
**structure_and_pseudos,
**mpi_settings)
wfnq_fi_flow.write()
wfnq_fi_flow.run()
wfnq_fi_flow.report()
```
# Running GW #
Now the moment you've been waiting for, when we actually run a GW calculation!
## Epsilon ##
Our first step is to run an `Epsilon` calculation, where we'll generate the dielectric matrix (to be precise, the inverse of the dielectric matrix.)
Because BerkeleyGW uses a file-based communication system, we'll need to specify the location of the wavefunction files that we previously calculated:
```
epsilon_input_files = dict(
wfn_fname='Runs/12-Wfn/wfn.cplx',
wfnq_fname='Runs/13-Wfnq/wfn.cplx',
)
```
Actually, we can set the file name above using a property of the flow
```
epsilon_input_files = dict(
wfn_fname=wfn_flow.wfn_fname,
wfnq_fname=wfnq_flow.wfn_fname,
)
```
As well as the settings for an `Epsilon` calculation:
```
epsilon_settings = dict(
ngkpt = wfn_settings['ngkpt'], # 'ngkpt': [2, 2, 2],
qshift = wfnq_settings['qshift'], # 'qshift': [.001, .0, .0],
ecuteps = 10.0,
)
```
And then we can prepare the Epsilon calculation using an `EpsilonTask` object (reusing our `mpi_settings` dictionary from before):
```
epsilon_task = EpsilonTask(
dirname='Runs/21-Epsilon',
structure=structure,
**epsilon_input_files,
**epsilon_settings,
**mpi_settings)
```
Let's run the calculation:
```
epsilon_task.write()
epsilon_task.run()
epsilon_task.report()
```
## Sigma ##
Now that we've calculated the (inverse) dielectric matrix and needed wavefunctions, we have everything we need to calculate the GW self-energy. This is done with the `Sigma` executable, which takes as inputs the results from our `WFN_co` and `Epsilon` calculations:
```
sigma_input_files = dict(
wfn_co_fname='Runs/14-Wfn_co/wfn.cplx',
rho_fname='Runs/14-Wfn_co/rho.cplx',
vxc_fname='Runs/14-Wfn_co/vxc.cplx',
eps0mat_fname='Runs/21-Epsilon/eps0mat.h5',
epsmat_fname='Runs/21-Epsilon/epsmat.h5',
)
```
Then again, making use of the object properties, we can get the above file names with
```
sigma_input_files = dict(
wfn_co_fname=wfn_co_flow.wfn_fname,
rho_fname=wfn_co_flow.rho_fname,
vxc_fname=wfn_co_flow.vxc_fname,
eps0mat_fname=epsilon_task.eps0mat_fname,
epsmat_fname=epsilon_task.epsmat_fname,
)
```
Specify the settings:
```
sigma_settings = dict(
ngkpt = wfn_co_settings['ngkpt'], # ngkpt': [2,2,2],
ibnd_min = 1, # Minimum band for GW corrections
ibnd_max = 8, # Maximum band for GW corrections
extra_lines = ['dont_use_vxcdat'],
#'extra_lines' : ['dont_use_vxcdat', 'dont_use_hdf5'],
)
```
Prepare the calculation:
```
sigma_task = SigmaTask(
dirname='Runs/22-Sigma',
structure=structure,
**sigma_input_files,
**sigma_settings,
**mpi_settings)
```
And finally run it.
```
# Execution
sigma_task.write()
sigma_task.run()
sigma_task.report()
```
If you see an `Unfinised` status, something went wrong, and you should inspect the content of the run directory, in particular the main output file `Runs/22-Sigma/sigma.out` .
Make sure you are using the latest version of BerkeleyGW.
If you see a `Completed` status, then congratulations! You have successfully ran a BerkeleyGW calculation from start to finish.
# Running BSE #
For those of you that want to go further, BerkeleyGW can calculate excitionic properties on the GW+BSE level of theory. This is done with the `KernelTask` and `AbsorptionTask` classes.
## Kernel ##
`Kernel` takes in as inputs the results of `WFN_co` and `Epsilon`:
```
kernel_input_files = dict(
wfn_co_fname=wfn_co_flow.wfn_fname,
eps0mat_fname=epsilon_task.eps0mat_fname,
epsmat_fname=epsilon_task.epsmat_fname,
)
```
We can specify its settings:
```
kernel_settings = dict(
ngkpt = wfn_co_settings['ngkpt'],
ecuteps = epsilon_settings['ecuteps'],
nbnd_val = 4,
nbnd_cond = 4,
# These extra lines will be added verbatim to the input file.
extra_lines = ['use_symmetries_coarse_grid', 'screening_semiconductor'],
)
```
Prepare the calculation:
```
kernel_task = KernelTask(
dirname='Runs/23-Kernel',
structure=structure,
**kernel_input_files,
**kernel_settings,
**mpi_settings)
```
And finally run it:
```
kernel_task.write()
kernel_task.run()
kernel_task.report()
```
## Absorption ##
Finally, we solve the BSE equation via the `Absorption` executable. It has as inputs the results of `WFN_co`, `WFNq_fi`, and `WFN_fi`, as well as all previous BerkleyGW executables `Epsilon`, `Sigma`, and `Kernel`:
```
absorption_input_files = dict(
wfn_co_fname = 'Runs/14-Wfn_co/wfn.cplx',
wfn_fi_fname = 'Runs/15-Wfn_fi/wfn.cplx',
wfnq_fi_fname = 'Runs/16-Wfnq_fi/wfn.cplx',
eps0mat_fname = 'Runs/21-Epsilon/eps0mat.h5',
epsmat_fname = 'Runs/21-Epsilon/epsmat.h5',
eqp_fname = 'Runs/22-Sigma/eqp1.dat',
bsemat_fname = 'Runs/23-Kernel/bsemat.h5'
# If you don't use hdf5, the BSE matrix is written in two separate files.
#bsexmat_fname = 'Runs/23-Kernel/bsexmat',
#bsedmat_fname = 'Runs/23-Kernel/bsedmat',
)
```
Or, using the appropriate variables,
```
absorption_input_files = dict(
wfn_co_fname = wfn_co_flow.wfn_fname,
wfn_fi_fname = wfn_fi_flow.wfn_fname,
wfnq_fi_fname = wfnq_fi_flow.wfn_fname,
eps0mat_fname = epsilon_task.eps0mat_fname,
epsmat_fname = epsilon_task.epsmat_fname,
eqp_fname = sigma_task.eqp1_fname,
bsemat_fname = kernel_task.bsemat_fname,
# If you don't use hdf5, the BSE matrix is written in two separate files.
#bsexmat_fname = kernel_task.bsexmat_fname,
#bsedmat_fname = kernel_task.bsedmat_fname,
)
```
Next, we set the calculation settings. There are...a lot of those.
```
absorption_settings = dict(
ngkpt = [2, 2, 2], # k-points grid
nbnd_val = 4, # Number of valence bands
nbnd_cond = 4, # Number of conduction bands
nbnd_val_co = 4, # Number of valence bands on the coarse grid
nbnd_cond_co = 4, # Number of conduction bands on the coarse grid
nbnd_val_fi = 4, # Number of valence bands on the fine grid
nbnd_cond_fi = 4, # Number of conduction bands on the fine grid
# These extra lines will be added verbatim to the input file.
extra_lines = [
'use_symmetries_coarse_grid',
'no_symmetries_fine_grid',
'no_symmetries_shifted_grid',
'screening_semiconductor',
'use_velocity',
'gaussian_broadening',
'eqp_co_corrections',
],
# These extra variables will be added to the input file as '{variable} {value}'.
extra_variables = {
'energy_resolution': 0.15,
},
)
```
But preparing the calculation is as simple as always:
```
absorption_task = AbsorptionTask(
dirname='Runs/24-Absorption',
structure=structure,
**absorption_input_files,
**absorption_settings,
**mpi_settings)
```
And, at last, we can run it.
```
absorption_task.write()
absorption_task.run()
absorption_task.report()
```
Congratulations yet again! You've run a full GW+BSE calculation!
# Using workflows #
Can we do all of these steps at once? Yes we can!
```
from BGWpy import GWFlow, BSEFlow
flow = GWFlow(
dirname='Runs/32-GW',
dft_flavor='abinit',
structure = Structure.from_file('../Data/Structures/GaAs.json'),
prefix = 'GaAs',
pseudo_dir = '../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
ecut = 10.0,
nbnd = 9,
ngkpt = [2,2,2],
kshift = [.5,.5,.5],
qshift = [.001,.0,.0],
ibnd_min = 1,
ibnd_max = 8,
ecuteps = 7.5,
# Extra lines and extra variables
epsilon_extra_lines = [],
epsilon_extra_variables = {},
sigma_extra_lines = ['screening_semiconductor'],
sigma_extra_variables = {},
**mpi_settings)
```
Let's execute the whole thing.
```
flow.write()
flow.run()
flow.report()
```
Likewise, for the BSE
```
flow = BSEFlow(
dirname='Runs/33-BSE',
dft_flavor='abinit',
structure = Structure.from_file('../Data/Structures/GaAs.json'),
prefix = 'GaAs',
pseudo_dir = '../Data/Pseudos',
pseudos = ['31-Ga.pspnc', '33-As.pspnc'],
ecut = 5.0,
nbnd = 12,
nbnd_fine = 9,
ngkpt = [2,2,2],
kshift = [.5,.5,.5],
qshift = [.001,.0,.0],
# Fine grids
ngkpt_fine = [4,4,4],
kshift_fine = [.0,.0,.0],
ibnd_min = 1,
ibnd_max = 8,
ecuteps = 10.0,
sigma_extra_lines = ['screening_semiconductor'],
# Kernel variables
nbnd_val = 4,
nbnd_cond = 4,
kernel_extra_lines = [
'use_symmetries_coarse_grid',
'screening_semiconductor',
],
# Absorption variables
nbnd_val_co=4,
nbnd_cond_co=4,
nbnd_val_fi=4,
nbnd_cond_fi=4,
absorption_extra_lines = [
'use_symmetries_coarse_grid',
'no_symmetries_fine_grid',
'no_symmetries_shifted_grid',
'screening_semiconductor',
'use_velocity',
'gaussian_broadening',
'eqp_co_corrections',
],
absorption_extra_variables = {
'energy_resolution' : 0.15,
},
**mpi_settings)
flow.write()
flow.run()
flow.report()
```
## Custom workflows ##
For a realistic GW or BSE calculation, in general, you don't run every steps all at once like we did. You actually perform a **convergence study**, in which you gradually increase the parameters until the calculation is converged. For example, in a GW calculation, we have the following convergence studies to perform:
* Convergence of the k-points grids for epsilon
* Convergence of the q-points grid for sigma
* Convergence on the number of bands for epsilon
* Convergence on the number of bands for sigma
* Convergence on the size of the dielectric matrix
For these, you will need to construct your own workflow. Here is an example.
```
from os.path import join as pjoin
from BGWpy import Workflow
workflow = Workflow(dirname='Runs/50-Workflow')
epsilon_input_files = dict(
wfn_fname=wfn_flow.wfn_fname,
wfnq_fname=wfnq_flow.wfn_fname,
)
sigma_input_files = dict(
wfn_co_fname=wfn_co_flow.wfn_fname,
rho_fname=wfn_co_flow.rho_fname,
vxc_fname=wfn_co_flow.vxc_fname,
)
ecuteps_l = [5.0, 7.5, 10.0]
for i, ecuteps in enumerate(ecuteps_l):
epsilon_settings['ecuteps'] = ecuteps
epsilon_task = EpsilonTask(
dirname=pjoin(workflow.dirname, 'Epsilon{}'.format(i)),
structure=structure,
**epsilon_input_files,
**epsilon_settings,
**mpi_settings)
sigma_task = SigmaTask(
dirname=pjoin(workflow.dirname, 'Sigma{}'.format(i)),
structure=structure,
eps0mat_fname=epsilon_task.eps0mat_fname,
epsmat_fname=epsilon_task.epsmat_fname,
**sigma_input_files,
**sigma_settings,
**mpi_settings)
workflow.add_tasks([epsilon_task, sigma_task])
workflow.write()
workflow.run()
workflow.report()
```
Note that you could also run and report each task sequentially with
```
for task in workflow.tasks:
task.run()
task.report()
```
And of course, you should now check the result of the calculations in the different output files, and plot the convergence of the quasiparticle energies as a function of ecuteps.
# Closing word #
`BGWpy` allows you to use pre-defined workflows and custom ones. However, it is your own responsibility to check every input parameter, and verify the convergence of the results. Happy computing!
| PypiClean |
/LabExT_pkg-2.2.0.tar.gz/LabExT_pkg-2.2.0/docs/first_simple_measurement.md | # Main Window and a First Measurement
This section serves as a small introduction to LabExT, its capabilities and how to use it effieciently. To install
LabExT properly, please follow the installation, see [here](./installation.md).
## Measurements, Instruments and the LabExT workflow
First, we would like to establish basic terminology, that is used throughout this guide.
| term | description | example |
|-------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------|
| Device | Devices are structures located on a chip. Also called the DUT (device under test.) | A microring resonator coupled to a wave guide with grating couplers on either side realized on a silicon-photonic chip. |
| Measurement | A measurement defines a single routine or algorithm, that is run on a single device. | Recording of an insertion loss curve, i.e. a laser sweeps its wavelength and the optical power after the device is recorded with an optical power meter. |
| Instrument | Instruments are the equipment found in a laboratory, used to conduct scientific research with. These instruments are somehow connected to the computer using any VISA compatible connection. | Lasers, power meters and oscilloscopes among many others. |
| Experiment | Experiments are series of measurements performed after each other, with possible automatic movement of an attached motorized stage. | Executing the same insertion loss measurement across multiple devices on a single chip. |
### The LabExT workflow
LabExT follows a structured way to perform its automated experiments. In the flowchart below, you will find a small
overview over a possible flow of these experiments. In the following few sections we will investigate this example by
example.

## The Main Window
Once you open LabExT, you are greeted with the main window. It consists of six main elements, which are shown in the
following graphic:

### Measuring a Single Device
To measure a single device, the following steps are needed:
1. Set the chip's name
2. Select the directory to save measurement data to
3. Use the Live- Viewer to align fibers
4. Create a new, single measurement:
1. Specify device details
2. Select measurement
3. Select instruments address and channel
4. Specify measurement settings
5. Press run
6. Wait for the measurement to finish
7. Plot the finished measurement and select axes
8. Add flag or comments to the finished measurement
9. Re-load previously saved measurement to inspect
#### Set the chip's name
To set the chips name, locate the 'chip name' field in the main windows control panel. Simply edit the line to change
the chip's name.

#### Select the directory to save measurement data to
This field lies underneath the 'chip name' field. Press the browse button to find the directory.

#### Use the Live- Viewer to align fibers
To start the live viewer, locate the tab 'view' in LabExT's menu. Then, select 'start live instrument view' to open the
live viewer. Use the live viewer to align the fibers correctly on the device. Consult the chapter 'Live Viewer' on how
to properly use this feature.
#### Create a new, single measurement
To create a new measurement, locate the 'New single measurement' button, or press 'Ctrl + N' on the keyboard.

Once the new window appears, fill in the information in the following order:
1) Specify device details
2) Select measurement
3) Select instrument address and channel
4) Specify measurement settings
The graphic below helps you locate the needed fields.

Finally, press the 'Save Measurement to Queue' button.
#### Press run
To run all experiments, press the run button in the control panel, or simply press 'F5'. Afterwards, wait for the
measurement to finish. If supported, you can watch the progress live in the 'Live Plot' section of the main window.

#### Select the finished measurement
To display a finished measurement, select it in the 'Select Measurements' part of the main window. In the control panel,
you can change what axis should be displayed.

## The Live Viewer
The Live- Viewer can help you with setting up and debugging of instruments. It is located in the dropdown menu 'View ->
Start Live Instrument View'. The Live- Viewer itself is grouped into two sections. To the left there is a big plot, and
to the right lies the control panel. Below you will find an explanation on how to use the Live- Viewer.

| PypiClean |
/Lespy-0.1.2-py3-none-any.whl/lespy/core/base.py | import typing as t
from lespy.http.request import Request
from lespy.http.response.base import ResponseBase
from lespy.http.response import Response, JSONResponse
from lespy.core.router import Route
from lespy.confs import MIDDLEWARES
from lespy.exceptions import RouteNotFound
class Base:
def __init__(self):
pass
def __call__(self, environ, start_response) -> ResponseBase:
request = Request(environ)
request.url_for = self.url_for
response = self._get_response(request)
start_response(response.full_status, response.headers)
return response
def _get_response(self, request: Request) -> ResponseBase:
try:
route, params = self._find_rule(request.path, request.method)
request.PARAMS = params
except RouteNotFound:
response = Response('Page not found.', status_code=404)
except:
response = Response('Internal error.', status_code=500)
else:
if isinstance((response := route.callback(request)), (int, str)):
response = Response(str(response), content_type='text/plain')
elif isinstance(response, (dict, list)):
response = JSONResponse(response)
return self._resolve_middlewares('response', request, response) # type: ignore
def _resolve_middlewares(self, step: str, req: Request, res: t.Optional[ResponseBase] = None) -> t.Union[Request, ResponseBase]:
_is_res = step == 'response'
if (middlewares := MIDDLEWARES.get_middlewares(step)):
for middleware in middlewares:
if _is_res:
res = middleware(req, res)
continue
req = middleware(req)
return res if _is_res else req # type: ignore
def _find_rule(self, path: str, method: str) -> t.Tuple[Route, t.Dict[str, t.Any]]:
raise NotImplementedError
def url_for(self, name: str, **params) -> str:
raise NotImplementedError | PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/plugins/standard/KivyPlugin.py | from nuitka.Options import isStandaloneMode
from nuitka.plugins.PluginBase import NuitkaPluginBase
class NuitkaPluginKivy(NuitkaPluginBase):
"""This class represents the main logic of the plugin."""
plugin_name = "kivy"
plugin_desc = "Required by 'kivy' package."
@staticmethod
def isAlwaysEnabled():
return True
@classmethod
def isRelevant(cls):
"""One time only check: may this plugin be required?
Returns:
True if this is a standalone compilation.
"""
return isStandaloneMode()
def _getKivyInformation(self):
setup_codes = r"""
import kivy.core.image
import kivy.core.text
# Prevent Window from being created at compile time.
kivy.core.core_select_lib=(lambda *args, **kwargs: None)
import kivy.core.window
# Kivy has packages designed to provide these on Windows
try:
from kivy_deps.sdl2 import dep_bins as sdl2_dep_bins
except ImportError:
sdl2_dep_bins = []
try:
from kivy_deps.glew import dep_bins as glew_dep_bins
except ImportError:
glew_dep_bins = []
"""
info = self.queryRuntimeInformationMultiple(
info_name="kivy_info",
setup_codes=setup_codes,
values=(
("libs_loaded", "kivy.core.image.libs_loaded"),
("window_impl", "kivy.core.window.window_impl"),
("label_libs", "kivy.core.text.label_libs"),
("sdl2_dep_bins", "sdl2_dep_bins"),
("glew_dep_bins", "glew_dep_bins"),
),
)
if info is None:
self.sysexit("Error, it seems Kivy is not installed.")
return info
def getImplicitImports(self, module):
# Using branches to dispatch, pylint: disable=too-many-branches
full_name = module.getFullName()
if full_name == "kivy.core.image":
for module_name in self._getKivyInformation().libs_loaded:
yield full_name.getChildNamed(module_name)
elif full_name == "kivy.core.window":
# TODO: It seems only one is actually picked, so this could be made
# to also reflect decision making.
for _, module_name, _ in self._getKivyInformation().window_impl:
yield full_name.getChildNamed(module_name)
elif full_name == "kivy.core.text":
for _, module_name, _ in self._getKivyInformation().label_libs:
yield full_name.getChildNamed(module_name)
elif full_name == "kivy.core.window.window_sdl2":
yield "kivy.core.window._window_sdl2"
elif full_name == "kivy.core.window._window_sdl2":
yield "kivy.core.window.window_info"
elif full_name == "kivy.core.window.window_x11":
yield "kivy.core.window.window_info"
elif full_name == "kivy.graphics.cgl":
yield "kivy.graphics.cgl_backend"
elif full_name == "kivy.graphics.cgl_backend":
yield "kivy.graphics.cgl_backend.cgl_glew"
elif full_name == "kivy.graphics.cgl_backend.cgl_glew":
yield "kivy.graphics.cgl_backend.cgl_gl"
elif full_name == "kivymd.app":
yield self.locateModules("kivymd.uix")
def getExtraDlls(self, module):
"""Copy extra shared libraries or data for this installation.
Args:
module: module object
Yields:
DLL entry point objects
"""
full_name = module.getFullName()
if full_name == "kivy":
kivy_info = self._getKivyInformation()
kivy_dlls = []
for dll_folder in kivy_info.sdl2_dep_bins + kivy_info.glew_dep_bins:
kivy_dlls.extend(self.locateDLLsInDirectory(dll_folder))
for full_path, target_filename, _dll_extension in kivy_dlls:
yield self.makeDllEntryPoint(
source_path=full_path,
dest_path=target_filename,
package_name=full_name,
reason="needed by 'kivy'",
)
self.reportFileCount(full_name, len(kivy_dlls)) | PypiClean |
/MeshPy-2022.1.3.tar.gz/MeshPy-2022.1.3/examples/MESH_README.txt | -------------------------------------------------------------------------------
Mesh licensing
-------------------------------------------------------------------------------
This file summarizes the licenses surrounding data files in this directory.
When using them for research or demonstration purposes, please be mindful of
proper attribution.
-------------------------------------------------------------------------------
ka-6d.ply:
The mesh 'ka-6d.ply' is a derivative work of a mesh in FlightGear and is thus
licensed under the GNU GPL.
(C) Flightgear Developers & Contributors
(C) Andreas Kloeckner
-------------------------------------------------------------------------------
| PypiClean |
/Django_patch-2.2.19-py3-none-any.whl/django/db/backends/base/introspection.py | from collections import namedtuple
# Structure returned by DatabaseIntrospection.get_table_list()
TableInfo = namedtuple('TableInfo', ['name', 'type'])
# Structure returned by the DB-API cursor.description interface (PEP 249)
FieldInfo = namedtuple('FieldInfo', 'name type_code display_size internal_size precision scale null_ok default')
class BaseDatabaseIntrospection:
"""Encapsulate backend-specific introspection utilities."""
data_types_reverse = {}
def __init__(self, connection):
self.connection = connection
def get_field_type(self, data_type, description):
"""
Hook for a database backend to use the cursor description to
match a Django field type to a database column.
For Oracle, the column data_type on its own is insufficient to
distinguish between a FloatField and IntegerField, for example.
"""
return self.data_types_reverse[data_type]
def identifier_converter(self, name):
"""
Apply a conversion to the identifier for the purposes of comparison.
The default identifier converter is for case sensitive comparison.
"""
return name
def table_names(self, cursor=None, include_views=False):
"""
Return a list of names of all tables that exist in the database.
Sort the returned table list by Python's default sorting. Do NOT use
the database's ORDER BY here to avoid subtle differences in sorting
order between databases.
"""
def get_names(cursor):
return sorted(ti.name for ti in self.get_table_list(cursor)
if include_views or ti.type == 't')
if cursor is None:
with self.connection.cursor() as cursor:
return get_names(cursor)
return get_names(cursor)
def get_table_list(self, cursor):
"""
Return an unsorted list of TableInfo named tuples of all tables and
views that exist in the database.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
def django_table_names(self, only_existing=False, include_views=True):
"""
Return a list of all table names that have associated Django models and
are in INSTALLED_APPS.
If only_existing is True, include only the tables in the database.
"""
from django.apps import apps
from django.db import router
tables = set()
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
tables.add(model._meta.db_table)
tables.update(
f.m2m_db_table() for f in model._meta.local_many_to_many
if f.remote_field.through._meta.managed
)
tables = list(tables)
if only_existing:
existing_tables = set(self.table_names(include_views=include_views))
tables = [
t
for t in tables
if self.identifier_converter(t) in existing_tables
]
return tables
def installed_models(self, tables):
"""
Return a set of all models represented by the provided list of table
names.
"""
from django.apps import apps
from django.db import router
all_models = []
for app_config in apps.get_app_configs():
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
tables = set(map(self.identifier_converter, tables))
return {
m for m in all_models
if self.identifier_converter(m._meta.db_table) in tables
}
def sequence_list(self):
"""
Return a list of information about all DB sequences for all models in
all apps.
"""
from django.apps import apps
from django.db import router
sequence_list = []
with self.connection.cursor() as cursor:
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
if model._meta.swapped:
continue
sequence_list.extend(self.get_sequences(cursor, model._meta.db_table, model._meta.local_fields))
for f in model._meta.local_many_to_many:
# If this is an m2m using an intermediate table,
# we don't need to reset the sequence.
if f.remote_field.through._meta.auto_created:
sequence = self.get_sequences(cursor, f.m2m_db_table())
sequence_list.extend(sequence or [{'table': f.m2m_db_table(), 'column': None}])
return sequence_list
def get_sequences(self, cursor, table_name, table_fields=()):
"""
Return a list of introspected sequences for table_name. Each sequence
is a dict: {'table': <table_name>, 'column': <column_name>}. An optional
'name' key can be added if the backend supports named sequences.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_sequences() method')
def get_key_columns(self, cursor, table_name):
"""
Backends can override this to return a list of:
(column_name, referenced_table_name, referenced_column_name)
for all key columns in given table.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
def get_primary_key_column(self, cursor, table_name):
"""
Return the name of the primary key column for the given table.
"""
for constraint in self.get_constraints(cursor, table_name).values():
if constraint['primary_key']:
return constraint['columns'][0]
return None
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index)
across one or more columns.
Return a dict mapping constraint names to their attributes,
where attributes is a dict with keys:
* columns: List of columns this covers
* primary_key: True if primary key, False otherwise
* unique: True if this is a unique constraint, False otherwise
* foreign_key: (table, column) of target, or None
* check: True if check constraint, False otherwise
* index: True if index, False otherwise.
* orders: The order (ASC/DESC) defined for the columns of indexes
* type: The type of the index (btree, hash, etc.)
Some backends may return special constraint names that don't exist
if they don't name constraints of a certain type (e.g. SQLite)
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_constraints() method') | PypiClean |
/Dero-0.15.0-py3-none-any.whl/dero/data/compareids/models/steps.py | from itertools import product
from .datasets import DataSubject, DataCombination
from .flowchart import Edge
from .interface import PipelineOptions
class Step:
def __init__(self, data_subject: DataSubject):
self.data_subject = data_subject
self.all_subjects = [data_subject]
self._subgraphs = None
def subgraphs(self, options: [PipelineOptions, None] = None):
if options is None:
options = PipelineOptions() #initialize with defaults
if self._subgraphs is None:
self._subgraphs = [subject.to_subgraph(options) for subject in self.all_subjects]
return self._subgraphs
def __repr__(self):
return f'<Step(data_subject={self.data_subject})>'
class MergeStep(Step):
def __init__(self, data_subject: DataSubject, merge_into_data_subject: DataSubject):
self.merge_into_subject = merge_into_data_subject
super().__init__(data_subject)
self.all_subjects = [data_subject, merge_into_data_subject]
self._combined_subject = None
def subgraphs(self, options: [PipelineOptions, None] = None):
if options is None:
options = PipelineOptions() #initialize with defaults
if self._subgraphs is None:
subgraphs = []
subgraphs.append(self._merge_subgraph(options))
subgraphs.extend(super().subgraphs(options))
self._subgraphs = subgraphs
return self._subgraphs
def _merge_subgraph(self, options: PipelineOptions):
data_sources = []
for orig_source, merge_source in product(self.data_subject.sources, self.merge_into_subject.sources):
name = orig_source.name + ' ' + self.data_subject.name + '/' + merge_source.name + ' ' + self.merge_into_subject.name
data_sources.append(DataCombination(orig_source, merge_source, name=name))
subject_name = self.data_subject.name + '/' + self.merge_into_subject.name
self._combined_subject = DataSubject(*data_sources, name=subject_name)
return self._combined_subject.to_subgraph(options)
@property
def combined_subject(self):
if self._combined_subject is None:
self._merge_subgraph(PipelineOptions())
return self._combined_subject
def __repr__(self):
return f'<MergeStep(data_subject={self.data_subject}, merge_subject={self.merge_into_subject})>'
class Process:
def __init__(self, *steps: [Step]):
self.steps = steps
def __getitem__(self, item):
return self.steps[item]
def to_subgraphs(self, options: [PipelineOptions, None] = None):
if options is None:
options = PipelineOptions() #initialize with defaults
subgraphs = []
for step in self.steps:
if isinstance(step, MergeStep):
subgraphs.extend(step.subgraphs(options)) #contains last subgraph, combined subgraph, and to merge subgraph
elif isinstance(step, Step):
continue #step subgraph will be handled within merge step subgraph
else:
raise ValueError(f'must pass Step or MergeStep, got type {type(step)}')
return subgraphs
def to_edges(self, options: [PipelineOptions, None] = None, **edge_kwargs):
if options is None:
options = PipelineOptions() #initialize with defaults
edges = []
for i, step in enumerate(self.steps):
if i == 0:
continue #don't need to create edges with beginning step
edges.extend(self._create_edges_for_step(step, options, **edge_kwargs))
return edges
def _create_edges_for_step(self, step: MergeStep, options: PipelineOptions, **edge_kwargs):
# For current, need to split combined nodes and merge subgraphs
combined_subgraph, last_subgraph, merge_subgraph = step.subgraphs(options)
edges = []
last_source_name, merge_source_name = _combined_source_name_to_individual_source_names(combined_subgraph.name)
for combined_node in combined_subgraph.nodes:
if options.data_sources: #extract node names by parsing combined name
last_node_name, merge_node_name = _combined_name_to_input_and_merge_name(combined_node.name, last_source_name, merge_source_name)
else: #now the source names are representative of the node names
last_node_name = last_source_name
merge_node_name = merge_source_name
last_node = last_subgraph.nodes[last_node_name]
merge_node = merge_subgraph.nodes[merge_node_name]
if options.data_sources:
edges.append(Edge(last_node, combined_node, **edge_kwargs))
edges.append(Edge(merge_node, combined_node, **edge_kwargs))
else:
edges.append(Edge(last_node, combined_node, for_subgraphs=(last_subgraph, combined_subgraph)))
edges.append(Edge(merge_node, combined_node, for_subgraphs=(merge_subgraph, combined_subgraph)))
return edges
def __repr__(self):
return f'<Process(steps={self.steps})>'
def _to_combined_name(input_name, merge_name):
return f'{input_name}/{merge_name}'
def _combined_name_to_input_and_merge_name(combined_name, last_source_name, merge_source_name):
combined_name = combined_name.strip('DUMMY').replace(last_source_name, '').replace(merge_source_name, '')
return _combined_source_name_to_individual_source_names(combined_name)
def _combined_source_name_to_individual_source_names(combined_name):
if '/' not in combined_name:
raise ValueError(f'passed regular name instead of combined name. combined name must have /. got {combined_name}')
parts = combined_name.split('/')
return '/'.join(parts[:-1]).strip(), parts[-1].strip() | PypiClean |
/GNN4LP-0.1.0-py3-none-any.whl/src/graph_att_gae/train.py | import scipy.sparse as sp
import numpy as np
import torch
import time
import os
from configparser import ConfigParser
import sys
sys.path.append('/home/shiyan/project/gnn4lp/')
from src.util.load_data import load_data_with_features, load_data_without_features, sparse_to_tuple, mask_test_edges, preprocess_graph
from src.util.loss import gae_loss_function, vgae_loss_function
from src.util.metrics import get_roc_score
from src.util import define_optimizer
from src.graph_att_gae.model import GATModelVAE
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class Train():
def __init__(self):
pass
def train_model(self, config_path):
if os.path.exists(config_path) and (os.path.split(config_path)[1].split('.')[0] == 'config') and (
os.path.splitext(config_path)[1].split('.')[1] == 'cfg'):
# load config file
config = ConfigParser()
config.read(config_path)
section = config.sections()[0]
# data catalog path
data_catalog = config.get(section, "data_catalog")
# node cites path
node_cites_path = config.get(section, "node_cites_path")
node_cites_path = os.path.join(data_catalog, node_cites_path)
# node features path
node_features_path = config.get(section, 'node_features_path')
node_features_path = os.path.join(data_catalog, node_features_path)
# model save/load path
model_path = config.get(section, "model_path")
# model param config
with_feats = config.getboolean(section, 'with_feats') # 是否带有节点特征
hidden_dim1 = config.getint(section, "hidden_dim1")
hidden_dim2 = config.getint(section, "hidden_dim2")
hidden_dim3 = config.getint(section, 'hidden_dim3')
dropout = config.getfloat(section, "dropout")
vae_bool = config.getboolean(section, 'vae_bool')
alpha = config.getfloat(section, 'alpha')
lr = config.getfloat(section, "lr")
lr_decay = config.getfloat(section, 'lr_decay')
weight_decay = config.getfloat(section, "weight_decay")
gamma = config.getfloat(section, "gamma")
momentum = config.getfloat(section, "momentum")
eps = config.getfloat(section, "eps")
clip = config.getfloat(section, "clip")
epochs = config.getint(section, "epochs")
optimizer_name = config.get(section, "optimizer")
if with_feats:
# 加载带节点特征的数据集
adj, features = load_data_with_features(node_cites_path, node_features_path)
else:
# 加载不带节点特征的数据集
adj = load_data_without_features(node_cites_path)
features = sp.identity(adj.shape[0])
num_nodes = adj.shape[0]
num_edges = adj.sum()
features = sparse_to_tuple(features)
num_features = features[2][1]
# 去除对角线元素
# 下边的右部分为:返回adj_orig的对角元素(一维),并增加一维,抽出adj_orig的对角元素并构建只有这些对角元素的对角矩阵
adj_orig = adj - sp.dia_matrix((adj.diagonal()[np.newaxis, :], [0]), shape=adj.shape)
adj_orig.eliminate_zeros()
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj_orig)
adj = adj_train
# 返回D^{-0.5}SD^{-0.5}的coords, data, shape,其中S=A+I
adj_norm = preprocess_graph(adj)
adj_label = adj_train + sp.eye(adj_train.shape[0])
# adj_label = sparse_to_tuple(adj_label)
adj_label = torch.FloatTensor(adj_label.toarray()).to(DEVICE)
'''
注意,adj的每个元素非1即0。pos_weight是用于训练的邻接矩阵中负样本边(既不存在的边)和正样本边的倍数(即比值),这个数值在二分类交叉熵损失函数中用到,
如果正样本边所占的比例和负样本边所占比例失衡,比如正样本边很多,负样本边很少,那么在求loss的时候可以提供weight参数,将正样本边的weight设置小一点,负样本边的weight设置大一点,
此时能够很好的平衡两类在loss中的占比,任务效果可以得到进一步提升。参考:https://www.zhihu.com/question/383567632
负样本边的weight都为1,正样本边的weight都为pos_weight
'''
pos_weight = float(adj.shape[0] * adj.shape[0] - num_edges) / num_edges
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# create model
print('create model ...')
model = GATModelVAE(num_features, hidden_dim1=hidden_dim1, hidden_dim2=hidden_dim2, hidden_dim3=hidden_dim3, dropout=dropout, alpha=alpha, vae_bool=vae_bool)
# define optimizer
if optimizer_name == 'adam':
optimizer = define_optimizer.define_optimizer_adam(model, lr=lr, weight_decay=weight_decay)
elif optimizer_name == 'adamw':
optimizer = define_optimizer.define_optimizer_adamw(model, lr=lr, weight_decay=weight_decay)
elif optimizer_name == 'sgd':
optimizer = define_optimizer.define_optimizer_sgd(model, lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimizer_name == 'adagrad':
optimizer = define_optimizer.define_optimizer_adagrad(model, lr=lr, lr_decay=lr_decay,
weight_decay=weight_decay)
elif optimizer_name == 'rmsprop':
optimizer = define_optimizer.define_optimizer_rmsprop(model, lr=lr, weight_decay=weight_decay,
momentum=momentum)
elif optimizer_name == 'adadelta':
optimizer = define_optimizer.define_optimizer_adadelta(model, lr=lr, weight_decay=weight_decay)
else:
raise NameError('No define optimization function name!')
model = model.to(DEVICE)
# 稀疏张量被表示为一对致密张量:一维张量和二维张量的索引。可以通过提供这两个张量来构造稀疏张量
adj_norm = torch.sparse.FloatTensor(torch.LongTensor(adj_norm[0].T),
torch.FloatTensor(adj_norm[1]),
torch.Size(adj_norm[2]))
features = torch.sparse.FloatTensor(torch.LongTensor(features[0].T),
torch.FloatTensor(features[1]),
torch.Size(features[2])).to_dense()
adj_norm = adj_norm.to(DEVICE)
features = features.to(DEVICE)
norm = torch.FloatTensor(np.array(norm)).to(DEVICE)
pos_weight = torch.tensor(pos_weight).to(DEVICE)
num_nodes = torch.tensor(num_nodes).to(DEVICE)
print('start training...')
best_valid_roc_score = float('-inf')
hidden_emb = None
model.train()
for epoch in range(epochs):
t = time.time()
optimizer.zero_grad()
recovered, mu, logvar = model(features, adj_norm)
if vae_bool:
loss = vgae_loss_function(preds=recovered, labels=adj_label,
mu=mu, logvar=logvar, n_nodes=num_nodes,
norm=norm, pos_weight=pos_weight)
else:
loss = gae_loss_function(preds=recovered, labels=adj_label, norm=norm, pos_weight=pos_weight)
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), clip)
cur_loss = loss.item()
optimizer.step()
hidden_emb = mu.data.cpu().numpy()
# 评估验证集,val set
roc_score, ap_score = get_roc_score(hidden_emb, adj_orig, val_edges, val_edges_false)
# 保存最好的roc score
if roc_score > best_valid_roc_score:
best_valid_roc_score = roc_score
# 不需要保存整个model,只需保存hidden_emb,因为后面的解码是用hidden_emb内积的形式作推断
np.save(model_path, hidden_emb)
print("Epoch:", '%04d' % (epoch + 1), "train_loss = ", "{:.5f}".format(cur_loss),
"val_roc_score = ", "{:.5f}".format(roc_score),
"average_precision_score = ", "{:.5f}".format(ap_score),
"time=", "{:.5f}".format(time.time() - t)
)
print("Optimization Finished!")
# 评估测试集,test set
roc_score, ap_score = get_roc_score(hidden_emb, adj_orig, test_edges, test_edges_false)
print('test roc score: {}'.format(roc_score))
print('test ap score: {}'.format(ap_score))
else:
raise FileNotFoundError('File config.cfg not found : ' + config_path)
if __name__ == '__main__':
config_path = os.path.join(os.getcwd(), 'config.cfg')
train = Train()
train.train_model(config_path) | PypiClean |
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/@types/node/ts4.8/buffer.d.ts | declare module 'buffer' {
import { BinaryLike } from 'node:crypto';
import { ReadableStream as WebReadableStream } from 'node:stream/web';
export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean;
export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean;
export const INSPECT_MAX_BYTES: number;
export const kMaxLength: number;
export const kStringMaxLength: number;
export const constants: {
MAX_LENGTH: number;
MAX_STRING_LENGTH: number;
};
export type TranscodeEncoding = 'ascii' | 'utf8' | 'utf16le' | 'ucs2' | 'latin1' | 'binary';
/**
* Re-encodes the given `Buffer` or `Uint8Array` instance from one character
* encoding to another. Returns a new `Buffer` instance.
*
* Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if
* conversion from `fromEnc` to `toEnc` is not permitted.
*
* Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`.
*
* The transcoding process will use substitution characters if a given byte
* sequence cannot be adequately represented in the target encoding. For instance:
*
* ```js
* import { Buffer, transcode } from 'buffer';
*
* const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii');
* console.log(newBuf.toString('ascii'));
* // Prints: '?'
* ```
*
* Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced
* with `?` in the transcoded `Buffer`.
* @since v7.1.0
* @param source A `Buffer` or `Uint8Array` instance.
* @param fromEnc The current encoding.
* @param toEnc To target encoding.
*/
export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer;
export const SlowBuffer: {
/** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */
new (size: number): Buffer;
prototype: Buffer;
};
/**
* Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using
* a prior call to `URL.createObjectURL()`.
* @since v16.7.0
* @experimental
* @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`.
*/
export function resolveObjectURL(id: string): Blob | undefined;
export { Buffer };
/**
* @experimental
*/
export interface BlobOptions {
/**
* @default 'utf8'
*/
encoding?: BufferEncoding | undefined;
/**
* The Blob content-type. The intent is for `type` to convey
* the MIME media type of the data, however no validation of the type format
* is performed.
*/
type?: string | undefined;
}
/**
* A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across
* multiple worker threads.
* @since v15.7.0, v14.18.0
*/
export class Blob {
/**
* The total size of the `Blob` in bytes.
* @since v15.7.0, v14.18.0
*/
readonly size: number;
/**
* The content-type of the `Blob`.
* @since v15.7.0, v14.18.0
*/
readonly type: string;
/**
* Creates a new `Blob` object containing a concatenation of the given sources.
*
* {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into
* the 'Blob' and can therefore be safely modified after the 'Blob' is created.
*
* String sources are also copied into the `Blob`.
*/
constructor(sources: Array<BinaryLike | Blob>, options?: BlobOptions);
/**
* Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of
* the `Blob` data.
* @since v15.7.0, v14.18.0
*/
arrayBuffer(): Promise<ArrayBuffer>;
/**
* Creates and returns a new `Blob` containing a subset of this `Blob` objects
* data. The original `Blob` is not altered.
* @since v15.7.0, v14.18.0
* @param start The starting index.
* @param end The ending index.
* @param type The content-type for the new `Blob`
*/
slice(start?: number, end?: number, type?: string): Blob;
/**
* Returns a promise that fulfills with the contents of the `Blob` decoded as a
* UTF-8 string.
* @since v15.7.0, v14.18.0
*/
text(): Promise<string>;
/**
* Returns a new (WHATWG) `ReadableStream` that allows the content of the `Blob` to be read.
* @since v16.7.0
*/
stream(): WebReadableStream;
}
export interface FileOptions {
/**
* One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be
* converted to the platform native line-ending as specified by `require('node:os').EOL`.
*/
endings?: 'native' | 'transparent';
/** The File content-type. */
type?: string;
/** The last modified date of the file. `Default`: Date.now(). */
lastModified?: number;
}
/**
* A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files.
* @experimental
* @since v18.13.0
*/
export class File extends Blob {
constructor(sources: Array<BinaryLike | Blob>, fileName: string, options?: FileOptions);
/**
* The name of the `File`.
* @since v18.13.0
*/
readonly name: string;
/**
* The last modified date of the `File`.
* @since v18.13.0
*/
readonly lastModified: number;
}
export import atob = globalThis.atob;
export import btoa = globalThis.btoa;
import { Blob as NodeBlob } from 'buffer';
// This conditional type will be the existing global Blob in a browser, or
// the copy below in a Node environment.
type __Blob = typeof globalThis extends { onmessage: any; Blob: infer T } ? T : NodeBlob;
global {
namespace NodeJS {
export { BufferEncoding };
}
// Buffer class
type BufferEncoding =
| 'ascii'
| 'utf8'
| 'utf-8'
| 'utf16le'
| 'ucs2'
| 'ucs-2'
| 'base64'
| 'base64url'
| 'latin1'
| 'binary'
| 'hex';
type WithImplicitCoercion<T> =
| T
| {
valueOf(): T;
};
/**
* Raw data is stored in instances of the Buffer class.
* A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized.
* Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex'
*/
interface BufferConstructor {
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
* @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead.
*/
new (str: string, encoding?: BufferEncoding): Buffer;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`).
*/
new (size: number): Buffer;
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
*/
new (array: Uint8Array): Buffer;
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}/{SharedArrayBuffer}.
*
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
* @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead.
*/
new (arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer;
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
* @deprecated since v10.0.0 - Use `Buffer.from(array)` instead.
*/
new (array: ReadonlyArray<any>): Buffer;
/**
* Copies the passed {buffer} data onto a new {Buffer} instance.
*
* @param buffer The buffer to copy.
* @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead.
*/
new (buffer: Buffer): Buffer;
/**
* Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`.
* Array entries outside that range will be truncated to fit into it.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
* const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
* ```
*
* A `TypeError` will be thrown if `array` is not an `Array` or another type
* appropriate for `Buffer.from()` variants.
*
* `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does.
* @since v5.10.0
*/
from(
arrayBuffer: WithImplicitCoercion<ArrayBuffer | SharedArrayBuffer>,
byteOffset?: number,
length?: number,
): Buffer;
/**
* Creates a new Buffer using the passed {data}
* @param data data to create a new Buffer
*/
from(data: Uint8Array | ReadonlyArray<number>): Buffer;
from(data: WithImplicitCoercion<Uint8Array | ReadonlyArray<number> | string>): Buffer;
/**
* Creates a new Buffer containing the given JavaScript string {str}.
* If provided, the {encoding} parameter identifies the character encoding.
* If not provided, {encoding} defaults to 'utf8'.
*/
from(
str:
| WithImplicitCoercion<string>
| {
[Symbol.toPrimitive](hint: 'string'): string;
},
encoding?: BufferEncoding,
): Buffer;
/**
* Creates a new Buffer using the passed {data}
* @param values to create a new Buffer
*/
of(...items: number[]): Buffer;
/**
* Returns `true` if `obj` is a `Buffer`, `false` otherwise.
*
* ```js
* import { Buffer } from 'buffer';
*
* Buffer.isBuffer(Buffer.alloc(10)); // true
* Buffer.isBuffer(Buffer.from('foo')); // true
* Buffer.isBuffer('a string'); // false
* Buffer.isBuffer([]); // false
* Buffer.isBuffer(new Uint8Array(1024)); // false
* ```
* @since v0.1.101
*/
isBuffer(obj: any): obj is Buffer;
/**
* Returns `true` if `encoding` is the name of a supported character encoding,
* or `false` otherwise.
*
* ```js
* import { Buffer } from 'buffer';
*
* console.log(Buffer.isEncoding('utf8'));
* // Prints: true
*
* console.log(Buffer.isEncoding('hex'));
* // Prints: true
*
* console.log(Buffer.isEncoding('utf/8'));
* // Prints: false
*
* console.log(Buffer.isEncoding(''));
* // Prints: false
* ```
* @since v0.9.1
* @param encoding A character encoding name to check.
*/
isEncoding(encoding: string): encoding is BufferEncoding;
/**
* Returns the byte length of a string when encoded using `encoding`.
* This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account
* for the encoding that is used to convert the string into bytes.
*
* For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input.
* For strings that contain non-base64/hex-encoded data (e.g. whitespace), the
* return value might be greater than the length of a `Buffer` created from the
* string.
*
* ```js
* import { Buffer } from 'buffer';
*
* const str = '\u00bd + \u00bc = \u00be';
*
* console.log(`${str}: ${str.length} characters, ` +
* `${Buffer.byteLength(str, 'utf8')} bytes`);
* // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes
* ```
*
* When `string` is a
* `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/-
* Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop-
* er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned.
* @since v0.1.90
* @param string A value to calculate the length of.
* @param [encoding='utf8'] If `string` is a string, this is its encoding.
* @return The number of bytes contained within `string`.
*/
byteLength(
string: string | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
encoding?: BufferEncoding,
): number;
/**
* Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together.
*
* If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned.
*
* If `totalLength` is not provided, it is calculated from the `Buffer` instances
* in `list` by adding their lengths.
*
* If `totalLength` is provided, it is coerced to an unsigned integer. If the
* combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is
* truncated to `totalLength`.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Create a single `Buffer` from a list of three `Buffer` instances.
*
* const buf1 = Buffer.alloc(10);
* const buf2 = Buffer.alloc(14);
* const buf3 = Buffer.alloc(18);
* const totalLength = buf1.length + buf2.length + buf3.length;
*
* console.log(totalLength);
* // Prints: 42
*
* const bufA = Buffer.concat([buf1, buf2, buf3], totalLength);
*
* console.log(bufA);
* // Prints: <Buffer 00 00 00 00 ...>
* console.log(bufA.length);
* // Prints: 42
* ```
*
* `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does.
* @since v0.7.11
* @param list List of `Buffer` or {@link Uint8Array} instances to concatenate.
* @param totalLength Total length of the `Buffer` instances in `list` when concatenated.
*/
concat(list: ReadonlyArray<Uint8Array>, totalLength?: number): Buffer;
/**
* Copies the underlying memory of `view` into a new `Buffer`.
* @since v18.16.0
* @param view The `TypedArray` to copy.
* @param offset The starting offset within `view`.
* @param length The number of elements from `view` to copy.
*/
copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer;
/**
* Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from('1234');
* const buf2 = Buffer.from('0123');
* const arr = [buf1, buf2];
*
* console.log(arr.sort(Buffer.compare));
* // Prints: [ <Buffer 30 31 32 33>, <Buffer 31 32 33 34> ]
* // (This result is equal to: [buf2, buf1].)
* ```
* @since v0.11.13
* @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details.
*/
compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1;
/**
* Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.alloc(5);
*
* console.log(buf);
* // Prints: <Buffer 00 00 00 00 00>
* ```
*
* If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown.
*
* If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.alloc(5, 'a');
*
* console.log(buf);
* // Prints: <Buffer 61 61 61 61 61>
* ```
*
* If both `fill` and `encoding` are specified, the allocated `Buffer` will be
* initialized by calling `buf.fill(fill, encoding)`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
*
* console.log(buf);
* // Prints: <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
* ```
*
* Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance
* contents will never contain sensitive data from previous allocations, including
* data that might not have been allocated for `Buffer`s.
*
* A `TypeError` will be thrown if `size` is not a number.
* @since v5.10.0
* @param size The desired length of the new `Buffer`.
* @param [fill=0] A value to pre-fill the new `Buffer` with.
* @param [encoding='utf8'] If `fill` is a string, this is its encoding.
*/
alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer;
/**
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown.
*
* The underlying memory for `Buffer` instances created in this way is _not_
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(10);
*
* console.log(buf);
* // Prints (contents may vary): <Buffer a0 8b 28 3f 01 00 00 00 50 32>
*
* buf.fill(0);
*
* console.log(buf);
* // Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
* ```
*
* A `TypeError` will be thrown if `size` is not a number.
*
* The `Buffer` module pre-allocates an internal `Buffer` instance of
* size `Buffer.poolSize` that is used as a pool for the fast allocation of new`Buffer` instances created using `Buffer.allocUnsafe()`,`Buffer.from(array)`, `Buffer.concat()`, and the
* deprecated`new Buffer(size)` constructor only when `size` is less than or equal
* to `Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two).
*
* Use of this pre-allocated internal memory pool is a key difference between
* calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
* Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less
* than or equal to half `Buffer.poolSize`. The
* difference is subtle but can be important when an application requires the
* additional performance that `Buffer.allocUnsafe()` provides.
* @since v5.10.0
* @param size The desired length of the new `Buffer`.
*/
allocUnsafe(size: number): Buffer;
/**
* Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. A zero-length `Buffer` is created
* if `size` is 0.
*
* The underlying memory for `Buffer` instances created in this way is _not_
* _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize
* such `Buffer` instances with zeroes.
*
* When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
* allocations under 4 KB are sliced from a single pre-allocated `Buffer`. This
* allows applications to avoid the garbage collection overhead of creating many
* individually allocated `Buffer` instances. This approach improves both
* performance and memory usage by eliminating the need to track and clean up as
* many individual `ArrayBuffer` objects.
*
* However, in the case where a developer may need to retain a small chunk of
* memory from a pool for an indeterminate amount of time, it may be appropriate
* to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and
* then copying out the relevant bits.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Need to keep around a few small chunks of memory.
* const store = [];
*
* socket.on('readable', () => {
* let data;
* while (null !== (data = readable.read())) {
* // Allocate for retained data.
* const sb = Buffer.allocUnsafeSlow(10);
*
* // Copy the data into the new allocation.
* data.copy(sb, 0, 0, 10);
*
* store.push(sb);
* }
* });
* ```
*
* A `TypeError` will be thrown if `size` is not a number.
* @since v5.12.0
* @param size The desired length of the new `Buffer`.
*/
allocUnsafeSlow(size: number): Buffer;
/**
* This is the size (in bytes) of pre-allocated internal `Buffer` instances used
* for pooling. This value may be modified.
* @since v0.11.3
*/
poolSize: number;
}
interface Buffer extends Uint8Array {
/**
* Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did
* not contain enough space to fit the entire string, only part of `string` will be
* written. However, partially encoded characters will not be written.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.alloc(256);
*
* const len = buf.write('\u00bd + \u00bc = \u00be', 0);
*
* console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`);
* // Prints: 12 bytes: ½ + ¼ = ¾
*
* const buffer = Buffer.alloc(10);
*
* const length = buffer.write('abcd', 8);
*
* console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`);
* // Prints: 2 bytes : ab
* ```
* @since v0.1.90
* @param string String to write to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write `string`.
* @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`).
* @param [encoding='utf8'] The character encoding of `string`.
* @return Number of bytes written.
*/
write(string: string, encoding?: BufferEncoding): number;
write(string: string, offset: number, encoding?: BufferEncoding): number;
write(string: string, offset: number, length: number, encoding?: BufferEncoding): number;
/**
* Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`.
*
* If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8,
* then each invalid byte is replaced with the replacement character `U+FFFD`.
*
* The maximum length of a string instance (in UTF-16 code units) is available
* as {@link constants.MAX_STRING_LENGTH}.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.allocUnsafe(26);
*
* for (let i = 0; i < 26; i++) {
* // 97 is the decimal ASCII value for 'a'.
* buf1[i] = i + 97;
* }
*
* console.log(buf1.toString('utf8'));
* // Prints: abcdefghijklmnopqrstuvwxyz
* console.log(buf1.toString('utf8', 0, 5));
* // Prints: abcde
*
* const buf2 = Buffer.from('tést');
*
* console.log(buf2.toString('hex'));
* // Prints: 74c3a97374
* console.log(buf2.toString('utf8', 0, 3));
* // Prints: té
* console.log(buf2.toString(undefined, 0, 3));
* // Prints: té
* ```
* @since v0.1.90
* @param [encoding='utf8'] The character encoding to use.
* @param [start=0] The byte offset to start decoding at.
* @param [end=buf.length] The byte offset to stop decoding at (not inclusive).
*/
toString(encoding?: BufferEncoding, start?: number, end?: number): string;
/**
* Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls
* this function when stringifying a `Buffer` instance.
*
* `Buffer.from()` accepts objects in the format returned from this method.
* In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]);
* const json = JSON.stringify(buf);
*
* console.log(json);
* // Prints: {"type":"Buffer","data":[1,2,3,4,5]}
*
* const copy = JSON.parse(json, (key, value) => {
* return value && value.type === 'Buffer' ?
* Buffer.from(value) :
* value;
* });
*
* console.log(copy);
* // Prints: <Buffer 01 02 03 04 05>
* ```
* @since v0.9.2
*/
toJSON(): {
type: 'Buffer';
data: number[];
};
/**
* Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from('ABC');
* const buf2 = Buffer.from('414243', 'hex');
* const buf3 = Buffer.from('ABCD');
*
* console.log(buf1.equals(buf2));
* // Prints: true
* console.log(buf1.equals(buf3));
* // Prints: false
* ```
* @since v0.11.13
* @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`.
*/
equals(otherBuffer: Uint8Array): boolean;
/**
* Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order.
* Comparison is based on the actual sequence of bytes in each `Buffer`.
*
* * `0` is returned if `target` is the same as `buf`
* * `1` is returned if `target` should come _before_`buf` when sorted.
* * `-1` is returned if `target` should come _after_`buf` when sorted.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from('ABC');
* const buf2 = Buffer.from('BCD');
* const buf3 = Buffer.from('ABCD');
*
* console.log(buf1.compare(buf1));
* // Prints: 0
* console.log(buf1.compare(buf2));
* // Prints: -1
* console.log(buf1.compare(buf3));
* // Prints: -1
* console.log(buf2.compare(buf1));
* // Prints: 1
* console.log(buf2.compare(buf3));
* // Prints: 1
* console.log([buf1, buf2, buf3].sort(Buffer.compare));
* // Prints: [ <Buffer 41 42 43>, <Buffer 41 42 43 44>, <Buffer 42 43 44> ]
* // (This result is equal to: [buf1, buf3, buf2].)
* ```
*
* The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]);
* const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]);
*
* console.log(buf1.compare(buf2, 5, 9, 0, 4));
* // Prints: 0
* console.log(buf1.compare(buf2, 0, 6, 4));
* // Prints: -1
* console.log(buf1.compare(buf2, 5, 6, 5));
* // Prints: 1
* ```
*
* `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`.
* @since v0.11.13
* @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`.
* @param [targetStart=0] The offset within `target` at which to begin comparison.
* @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive).
* @param [sourceStart=0] The offset within `buf` at which to begin comparison.
* @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive).
*/
compare(
target: Uint8Array,
targetStart?: number,
targetEnd?: number,
sourceStart?: number,
sourceEnd?: number,
): -1 | 0 | 1;
/**
* Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`.
*
* [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available
* for all TypedArrays, including Node.js `Buffer`s, although it takes
* different function arguments.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Create two `Buffer` instances.
* const buf1 = Buffer.allocUnsafe(26);
* const buf2 = Buffer.allocUnsafe(26).fill('!');
*
* for (let i = 0; i < 26; i++) {
* // 97 is the decimal ASCII value for 'a'.
* buf1[i] = i + 97;
* }
*
* // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`.
* buf1.copy(buf2, 8, 16, 20);
* // This is equivalent to:
* // buf2.set(buf1.subarray(16, 20), 8);
*
* console.log(buf2.toString('ascii', 0, 25));
* // Prints: !!!!!!!!qrst!!!!!!!!!!!!!
* ```
*
* ```js
* import { Buffer } from 'buffer';
*
* // Create a `Buffer` and copy data from one region to an overlapping region
* // within the same `Buffer`.
*
* const buf = Buffer.allocUnsafe(26);
*
* for (let i = 0; i < 26; i++) {
* // 97 is the decimal ASCII value for 'a'.
* buf[i] = i + 97;
* }
*
* buf.copy(buf, 0, 4, 10);
*
* console.log(buf.toString());
* // Prints: efghijghijklmnopqrstuvwxyz
* ```
* @since v0.1.90
* @param target A `Buffer` or {@link Uint8Array} to copy into.
* @param [targetStart=0] The offset within `target` at which to begin writing.
* @param [sourceStart=0] The offset within `buf` from which to begin copying.
* @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive).
* @return The number of bytes copied.
*/
copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
/**
* Returns a new `Buffer` that references the same memory as the original, but
* offset and cropped by the `start` and `end` indices.
*
* This method is not compatible with the `Uint8Array.prototype.slice()`,
* which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('buffer');
*
* const copiedBuf = Uint8Array.prototype.slice.call(buf);
* copiedBuf[0]++;
* console.log(copiedBuf.toString());
* // Prints: cuffer
*
* console.log(buf.toString());
* // Prints: buffer
*
* // With buf.slice(), the original buffer is modified.
* const notReallyCopiedBuf = buf.slice();
* notReallyCopiedBuf[0]++;
* console.log(notReallyCopiedBuf.toString());
* // Prints: cuffer
* console.log(buf.toString());
* // Also prints: cuffer (!)
* ```
* @since v0.3.0
* @deprecated Use `subarray` instead.
* @param [start=0] Where the new `Buffer` will start.
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
*/
slice(start?: number, end?: number): Buffer;
/**
* Returns a new `Buffer` that references the same memory as the original, but
* offset and cropped by the `start` and `end` indices.
*
* Specifying `end` greater than `buf.length` will return the same result as
* that of `end` equal to `buf.length`.
*
* This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray).
*
* Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte
* // from the original `Buffer`.
*
* const buf1 = Buffer.allocUnsafe(26);
*
* for (let i = 0; i < 26; i++) {
* // 97 is the decimal ASCII value for 'a'.
* buf1[i] = i + 97;
* }
*
* const buf2 = buf1.subarray(0, 3);
*
* console.log(buf2.toString('ascii', 0, buf2.length));
* // Prints: abc
*
* buf1[0] = 33;
*
* console.log(buf2.toString('ascii', 0, buf2.length));
* // Prints: !bc
* ```
*
* Specifying negative indexes causes the slice to be generated relative to the
* end of `buf` rather than the beginning.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('buffer');
*
* console.log(buf.subarray(-6, -1).toString());
* // Prints: buffe
* // (Equivalent to buf.subarray(0, 5).)
*
* console.log(buf.subarray(-6, -2).toString());
* // Prints: buff
* // (Equivalent to buf.subarray(0, 4).)
*
* console.log(buf.subarray(-5, -2).toString());
* // Prints: uff
* // (Equivalent to buf.subarray(1, 4).)
* ```
* @since v3.0.0
* @param [start=0] Where the new `Buffer` will start.
* @param [end=buf.length] Where the new `Buffer` will end (not inclusive).
*/
subarray(start?: number, end?: number): Buffer;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian.
*
* `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeBigInt64BE(0x0102030405060708n, 0);
*
* console.log(buf);
* // Prints: <Buffer 01 02 03 04 05 06 07 08>
* ```
* @since v12.0.0, v10.20.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeBigInt64BE(value: bigint, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian.
*
* `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeBigInt64LE(0x0102030405060708n, 0);
*
* console.log(buf);
* // Prints: <Buffer 08 07 06 05 04 03 02 01>
* ```
* @since v12.0.0, v10.20.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeBigInt64LE(value: bigint, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian.
*
* This function is also available under the `writeBigUint64BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeBigUInt64BE(0xdecafafecacefaden, 0);
*
* console.log(buf);
* // Prints: <Buffer de ca fa fe ca ce fa de>
* ```
* @since v12.0.0, v10.20.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeBigUInt64BE(value: bigint, offset?: number): number;
/**
* @alias Buffer.writeBigUInt64BE
* @since v14.10.0, v12.19.0
*/
writeBigUint64BE(value: bigint, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeBigUInt64LE(0xdecafafecacefaden, 0);
*
* console.log(buf);
* // Prints: <Buffer de fa ce ca fe fa ca de>
* ```
*
* This function is also available under the `writeBigUint64LE` alias.
* @since v12.0.0, v10.20.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeBigUInt64LE(value: bigint, offset?: number): number;
/**
* @alias Buffer.writeBigUInt64LE
* @since v14.10.0, v12.19.0
*/
writeBigUint64LE(value: bigint, offset?: number): number;
/**
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined
* when `value` is anything other than an unsigned integer.
*
* This function is also available under the `writeUintLE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(6);
*
* buf.writeUIntLE(0x1234567890ab, 0, 6);
*
* console.log(buf);
* // Prints: <Buffer ab 90 78 56 34 12>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
* @return `offset` plus the number of bytes written.
*/
writeUIntLE(value: number, offset: number, byteLength: number): number;
/**
* @alias Buffer.writeUIntLE
* @since v14.9.0, v12.19.0
*/
writeUintLE(value: number, offset: number, byteLength: number): number;
/**
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined
* when `value` is anything other than an unsigned integer.
*
* This function is also available under the `writeUintBE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(6);
*
* buf.writeUIntBE(0x1234567890ab, 0, 6);
*
* console.log(buf);
* // Prints: <Buffer 12 34 56 78 90 ab>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
* @return `offset` plus the number of bytes written.
*/
writeUIntBE(value: number, offset: number, byteLength: number): number;
/**
* @alias Buffer.writeUIntBE
* @since v14.9.0, v12.19.0
*/
writeUintBE(value: number, offset: number, byteLength: number): number;
/**
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined
* when `value` is anything other than a signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(6);
*
* buf.writeIntLE(0x1234567890ab, 0, 6);
*
* console.log(buf);
* // Prints: <Buffer ab 90 78 56 34 12>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
* @return `offset` plus the number of bytes written.
*/
writeIntLE(value: number, offset: number, byteLength: number): number;
/**
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a
* signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(6);
*
* buf.writeIntBE(0x1234567890ab, 0, 6);
*
* console.log(buf);
* // Prints: <Buffer 12 34 56 78 90 ab>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`.
* @return `offset` plus the number of bytes written.
*/
writeIntBE(value: number, offset: number, byteLength: number): number;
/**
* Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readBigUint64BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]);
*
* console.log(buf.readBigUInt64BE(0));
* // Prints: 4294967295n
* ```
* @since v12.0.0, v10.20.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
*/
readBigUInt64BE(offset?: number): bigint;
/**
* @alias Buffer.readBigUInt64BE
* @since v14.10.0, v12.19.0
*/
readBigUint64BE(offset?: number): bigint;
/**
* Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readBigUint64LE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]);
*
* console.log(buf.readBigUInt64LE(0));
* // Prints: 18446744069414584320n
* ```
* @since v12.0.0, v10.20.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
*/
readBigUInt64LE(offset?: number): bigint;
/**
* @alias Buffer.readBigUInt64LE
* @since v14.10.0, v12.19.0
*/
readBigUint64LE(offset?: number): bigint;
/**
* Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed
* values.
* @since v12.0.0, v10.20.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
*/
readBigInt64BE(offset?: number): bigint;
/**
* Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed
* values.
* @since v12.0.0, v10.20.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`.
*/
readBigInt64LE(offset?: number): bigint;
/**
* Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting
* up to 48 bits of accuracy.
*
* This function is also available under the `readUintLE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
*
* console.log(buf.readUIntLE(0, 6).toString(16));
* // Prints: ab9078563412
* ```
* @since v0.11.15
* @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
*/
readUIntLE(offset: number, byteLength: number): number;
/**
* @alias Buffer.readUIntLE
* @since v14.9.0, v12.19.0
*/
readUintLE(offset: number, byteLength: number): number;
/**
* Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting
* up to 48 bits of accuracy.
*
* This function is also available under the `readUintBE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
*
* console.log(buf.readUIntBE(0, 6).toString(16));
* // Prints: 1234567890ab
* console.log(buf.readUIntBE(1, 6).toString(16));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.11.15
* @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
*/
readUIntBE(offset: number, byteLength: number): number;
/**
* @alias Buffer.readUIntBE
* @since v14.9.0, v12.19.0
*/
readUintBE(offset: number, byteLength: number): number;
/**
* Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value
* supporting up to 48 bits of accuracy.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
*
* console.log(buf.readIntLE(0, 6).toString(16));
* // Prints: -546f87a9cbee
* ```
* @since v0.11.15
* @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
*/
readIntLE(offset: number, byteLength: number): number;
/**
* Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value
* supporting up to 48 bits of accuracy.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]);
*
* console.log(buf.readIntBE(0, 6).toString(16));
* // Prints: 1234567890ab
* console.log(buf.readIntBE(1, 6).toString(16));
* // Throws ERR_OUT_OF_RANGE.
* console.log(buf.readIntBE(1, 0).toString(16));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.11.15
* @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`.
* @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`.
*/
readIntBE(offset: number, byteLength: number): number;
/**
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
*
* This function is also available under the `readUint8` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([1, -2]);
*
* console.log(buf.readUInt8(0));
* // Prints: 1
* console.log(buf.readUInt8(1));
* // Prints: 254
* console.log(buf.readUInt8(2));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`.
*/
readUInt8(offset?: number): number;
/**
* @alias Buffer.readUInt8
* @since v14.9.0, v12.19.0
*/
readUint8(offset?: number): number;
/**
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readUint16LE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56]);
*
* console.log(buf.readUInt16LE(0).toString(16));
* // Prints: 3412
* console.log(buf.readUInt16LE(1).toString(16));
* // Prints: 5634
* console.log(buf.readUInt16LE(2).toString(16));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
*/
readUInt16LE(offset?: number): number;
/**
* @alias Buffer.readUInt16LE
* @since v14.9.0, v12.19.0
*/
readUint16LE(offset?: number): number;
/**
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readUint16BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56]);
*
* console.log(buf.readUInt16BE(0).toString(16));
* // Prints: 1234
* console.log(buf.readUInt16BE(1).toString(16));
* // Prints: 3456
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
*/
readUInt16BE(offset?: number): number;
/**
* @alias Buffer.readUInt16BE
* @since v14.9.0, v12.19.0
*/
readUint16BE(offset?: number): number;
/**
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readUint32LE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]);
*
* console.log(buf.readUInt32LE(0).toString(16));
* // Prints: 78563412
* console.log(buf.readUInt32LE(1).toString(16));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readUInt32LE(offset?: number): number;
/**
* @alias Buffer.readUInt32LE
* @since v14.9.0, v12.19.0
*/
readUint32LE(offset?: number): number;
/**
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`.
*
* This function is also available under the `readUint32BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]);
*
* console.log(buf.readUInt32BE(0).toString(16));
* // Prints: 12345678
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readUInt32BE(offset?: number): number;
/**
* @alias Buffer.readUInt32BE
* @since v14.9.0, v12.19.0
*/
readUint32BE(offset?: number): number;
/**
* Reads a signed 8-bit integer from `buf` at the specified `offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed values.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([-1, 5]);
*
* console.log(buf.readInt8(0));
* // Prints: -1
* console.log(buf.readInt8(1));
* // Prints: 5
* console.log(buf.readInt8(2));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.0
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`.
*/
readInt8(offset?: number): number;
/**
* Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed values.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0, 5]);
*
* console.log(buf.readInt16LE(0));
* // Prints: 1280
* console.log(buf.readInt16LE(1));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
*/
readInt16LE(offset?: number): number;
/**
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed values.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0, 5]);
*
* console.log(buf.readInt16BE(0));
* // Prints: 5
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`.
*/
readInt16BE(offset?: number): number;
/**
* Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed values.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0, 0, 0, 5]);
*
* console.log(buf.readInt32LE(0));
* // Prints: 83886080
* console.log(buf.readInt32LE(1));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readInt32LE(offset?: number): number;
/**
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`.
*
* Integers read from a `Buffer` are interpreted as two's complement signed values.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([0, 0, 0, 5]);
*
* console.log(buf.readInt32BE(0));
* // Prints: 5
* ```
* @since v0.5.5
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readInt32BE(offset?: number): number;
/**
* Reads a 32-bit, little-endian float from `buf` at the specified `offset`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([1, 2, 3, 4]);
*
* console.log(buf.readFloatLE(0));
* // Prints: 1.539989614439558e-36
* console.log(buf.readFloatLE(1));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.11.15
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readFloatLE(offset?: number): number;
/**
* Reads a 32-bit, big-endian float from `buf` at the specified `offset`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([1, 2, 3, 4]);
*
* console.log(buf.readFloatBE(0));
* // Prints: 2.387939260590663e-38
* ```
* @since v0.11.15
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`.
*/
readFloatBE(offset?: number): number;
/**
* Reads a 64-bit, little-endian double from `buf` at the specified `offset`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
*
* console.log(buf.readDoubleLE(0));
* // Prints: 5.447603722011605e-270
* console.log(buf.readDoubleLE(1));
* // Throws ERR_OUT_OF_RANGE.
* ```
* @since v0.11.15
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`.
*/
readDoubleLE(offset?: number): number;
/**
* Reads a 64-bit, big-endian double from `buf` at the specified `offset`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
*
* console.log(buf.readDoubleBE(0));
* // Prints: 8.20788039913184e-304
* ```
* @since v0.11.15
* @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`.
*/
readDoubleBE(offset?: number): number;
reverse(): this;
/**
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the
* byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
*
* console.log(buf1);
* // Prints: <Buffer 01 02 03 04 05 06 07 08>
*
* buf1.swap16();
*
* console.log(buf1);
* // Prints: <Buffer 02 01 04 03 06 05 08 07>
*
* const buf2 = Buffer.from([0x1, 0x2, 0x3]);
*
* buf2.swap16();
* // Throws ERR_INVALID_BUFFER_SIZE.
* ```
*
* One convenient use of `buf.swap16()` is to perform a fast in-place conversion
* between UTF-16 little-endian and UTF-16 big-endian:
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('This is little-endian UTF-16', 'utf16le');
* buf.swap16(); // Convert to big-endian UTF-16 text.
* ```
* @since v5.10.0
* @return A reference to `buf`.
*/
swap16(): Buffer;
/**
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the
* byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
*
* console.log(buf1);
* // Prints: <Buffer 01 02 03 04 05 06 07 08>
*
* buf1.swap32();
*
* console.log(buf1);
* // Prints: <Buffer 04 03 02 01 08 07 06 05>
*
* const buf2 = Buffer.from([0x1, 0x2, 0x3]);
*
* buf2.swap32();
* // Throws ERR_INVALID_BUFFER_SIZE.
* ```
* @since v5.10.0
* @return A reference to `buf`.
*/
swap32(): Buffer;
/**
* Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_.
* Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]);
*
* console.log(buf1);
* // Prints: <Buffer 01 02 03 04 05 06 07 08>
*
* buf1.swap64();
*
* console.log(buf1);
* // Prints: <Buffer 08 07 06 05 04 03 02 01>
*
* const buf2 = Buffer.from([0x1, 0x2, 0x3]);
*
* buf2.swap64();
* // Throws ERR_INVALID_BUFFER_SIZE.
* ```
* @since v6.3.0
* @return A reference to `buf`.
*/
swap64(): Buffer;
/**
* Writes `value` to `buf` at the specified `offset`. `value` must be a
* valid unsigned 8-bit integer. Behavior is undefined when `value` is anything
* other than an unsigned 8-bit integer.
*
* This function is also available under the `writeUint8` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeUInt8(0x3, 0);
* buf.writeUInt8(0x4, 1);
* buf.writeUInt8(0x23, 2);
* buf.writeUInt8(0x42, 3);
*
* console.log(buf);
* // Prints: <Buffer 03 04 23 42>
* ```
* @since v0.5.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`.
* @return `offset` plus the number of bytes written.
*/
writeUInt8(value: number, offset?: number): number;
/**
* @alias Buffer.writeUInt8
* @since v14.9.0, v12.19.0
*/
writeUint8(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is
* anything other than an unsigned 16-bit integer.
*
* This function is also available under the `writeUint16LE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeUInt16LE(0xdead, 0);
* buf.writeUInt16LE(0xbeef, 2);
*
* console.log(buf);
* // Prints: <Buffer ad de ef be>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
* @return `offset` plus the number of bytes written.
*/
writeUInt16LE(value: number, offset?: number): number;
/**
* @alias Buffer.writeUInt16LE
* @since v14.9.0, v12.19.0
*/
writeUint16LE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an
* unsigned 16-bit integer.
*
* This function is also available under the `writeUint16BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeUInt16BE(0xdead, 0);
* buf.writeUInt16BE(0xbeef, 2);
*
* console.log(buf);
* // Prints: <Buffer de ad be ef>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
* @return `offset` plus the number of bytes written.
*/
writeUInt16BE(value: number, offset?: number): number;
/**
* @alias Buffer.writeUInt16BE
* @since v14.9.0, v12.19.0
*/
writeUint16BE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is
* anything other than an unsigned 32-bit integer.
*
* This function is also available under the `writeUint32LE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeUInt32LE(0xfeedface, 0);
*
* console.log(buf);
* // Prints: <Buffer ce fa ed fe>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeUInt32LE(value: number, offset?: number): number;
/**
* @alias Buffer.writeUInt32LE
* @since v14.9.0, v12.19.0
*/
writeUint32LE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an
* unsigned 32-bit integer.
*
* This function is also available under the `writeUint32BE` alias.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeUInt32BE(0xfeedface, 0);
*
* console.log(buf);
* // Prints: <Buffer fe ed fa ce>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeUInt32BE(value: number, offset?: number): number;
/**
* @alias Buffer.writeUInt32BE
* @since v14.9.0, v12.19.0
*/
writeUint32BE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset`. `value` must be a valid
* signed 8-bit integer. Behavior is undefined when `value` is anything other than
* a signed 8-bit integer.
*
* `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(2);
*
* buf.writeInt8(2, 0);
* buf.writeInt8(-2, 1);
*
* console.log(buf);
* // Prints: <Buffer 02 fe>
* ```
* @since v0.5.0
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`.
* @return `offset` plus the number of bytes written.
*/
writeInt8(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is
* anything other than a signed 16-bit integer.
*
* The `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(2);
*
* buf.writeInt16LE(0x0304, 0);
*
* console.log(buf);
* // Prints: <Buffer 04 03>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
* @return `offset` plus the number of bytes written.
*/
writeInt16LE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is
* anything other than a signed 16-bit integer.
*
* The `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(2);
*
* buf.writeInt16BE(0x0102, 0);
*
* console.log(buf);
* // Prints: <Buffer 01 02>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`.
* @return `offset` plus the number of bytes written.
*/
writeInt16BE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is
* anything other than a signed 32-bit integer.
*
* The `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeInt32LE(0x05060708, 0);
*
* console.log(buf);
* // Prints: <Buffer 08 07 06 05>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeInt32LE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is
* anything other than a signed 32-bit integer.
*
* The `value` is interpreted and written as a two's complement signed integer.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeInt32BE(0x01020304, 0);
*
* console.log(buf);
* // Prints: <Buffer 01 02 03 04>
* ```
* @since v0.5.5
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeInt32BE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is
* undefined when `value` is anything other than a JavaScript number.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeFloatLE(0xcafebabe, 0);
*
* console.log(buf);
* // Prints: <Buffer bb fe 4a 4f>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeFloatLE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is
* undefined when `value` is anything other than a JavaScript number.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(4);
*
* buf.writeFloatBE(0xcafebabe, 0);
*
* console.log(buf);
* // Prints: <Buffer 4f 4a fe bb>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`.
* @return `offset` plus the number of bytes written.
*/
writeFloatBE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything
* other than a JavaScript number.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeDoubleLE(123.456, 0);
*
* console.log(buf);
* // Prints: <Buffer 77 be 9f 1a 2f dd 5e 40>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeDoubleLE(value: number, offset?: number): number;
/**
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything
* other than a JavaScript number.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(8);
*
* buf.writeDoubleBE(123.456, 0);
*
* console.log(buf);
* // Prints: <Buffer 40 5e dd 2f 1a 9f be 77>
* ```
* @since v0.11.15
* @param value Number to be written to `buf`.
* @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`.
* @return `offset` plus the number of bytes written.
*/
writeDoubleBE(value: number, offset?: number): number;
/**
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given,
* the entire `buf` will be filled:
*
* ```js
* import { Buffer } from 'buffer';
*
* // Fill a `Buffer` with the ASCII character 'h'.
*
* const b = Buffer.allocUnsafe(50).fill('h');
*
* console.log(b.toString());
* // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh
* ```
*
* `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or
* integer. If the resulting integer is greater than `255` (decimal), `buf` will be
* filled with `value & 255`.
*
* If the final write of a `fill()` operation falls on a multi-byte character,
* then only the bytes of that character that fit into `buf` are written:
*
* ```js
* import { Buffer } from 'buffer';
*
* // Fill a `Buffer` with character that takes up two bytes in UTF-8.
*
* console.log(Buffer.allocUnsafe(5).fill('\u0222'));
* // Prints: <Buffer c8 a2 c8 a2 c8>
* ```
*
* If `value` contains invalid characters, it is truncated; if no valid
* fill data remains, an exception is thrown:
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.allocUnsafe(5);
*
* console.log(buf.fill('a'));
* // Prints: <Buffer 61 61 61 61 61>
* console.log(buf.fill('aazz', 'hex'));
* // Prints: <Buffer aa aa aa aa aa>
* console.log(buf.fill('zz', 'hex'));
* // Throws an exception.
* ```
* @since v0.5.0
* @param value The value with which to fill `buf`.
* @param [offset=0] Number of bytes to skip before starting to fill `buf`.
* @param [end=buf.length] Where to stop filling `buf` (not inclusive).
* @param [encoding='utf8'] The encoding for `value` if `value` is a string.
* @return A reference to `buf`.
*/
fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this;
/**
* If `value` is:
*
* * a string, `value` is interpreted according to the character encoding in`encoding`.
* * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety.
* To compare a partial `Buffer`, use `buf.subarray`.
* * a number, `value` will be interpreted as an unsigned 8-bit integer
* value between `0` and `255`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('this is a buffer');
*
* console.log(buf.indexOf('this'));
* // Prints: 0
* console.log(buf.indexOf('is'));
* // Prints: 2
* console.log(buf.indexOf(Buffer.from('a buffer')));
* // Prints: 8
* console.log(buf.indexOf(97));
* // Prints: 8 (97 is the decimal ASCII value for 'a')
* console.log(buf.indexOf(Buffer.from('a buffer example')));
* // Prints: -1
* console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8)));
* // Prints: 8
*
* const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le');
*
* console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le'));
* // Prints: 4
* console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le'));
* // Prints: 6
* ```
*
* If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value,
* an integer between 0 and 255.
*
* If `byteOffset` is not a number, it will be coerced to a number. If the result
* of coercion is `NaN` or `0`, then the entire buffer will be searched. This
* behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf).
*
* ```js
* import { Buffer } from 'buffer';
*
* const b = Buffer.from('abcdef');
*
* // Passing a value that's a number, but not a valid byte.
* // Prints: 2, equivalent to searching for 99 or 'c'.
* console.log(b.indexOf(99.9));
* console.log(b.indexOf(256 + 99));
*
* // Passing a byteOffset that coerces to NaN or 0.
* // Prints: 1, searching the whole buffer.
* console.log(b.indexOf('b', undefined));
* console.log(b.indexOf('b', {}));
* console.log(b.indexOf('b', null));
* console.log(b.indexOf('b', []));
* ```
*
* If `value` is an empty string or empty `Buffer` and `byteOffset` is less
* than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned.
* @since v1.5.0
* @param value What to search for.
* @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
* @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`.
* @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`.
*/
indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
/**
* Identical to `buf.indexOf()`, except the last occurrence of `value` is found
* rather than the first occurrence.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('this buffer is a buffer');
*
* console.log(buf.lastIndexOf('this'));
* // Prints: 0
* console.log(buf.lastIndexOf('buffer'));
* // Prints: 17
* console.log(buf.lastIndexOf(Buffer.from('buffer')));
* // Prints: 17
* console.log(buf.lastIndexOf(97));
* // Prints: 15 (97 is the decimal ASCII value for 'a')
* console.log(buf.lastIndexOf(Buffer.from('yolo')));
* // Prints: -1
* console.log(buf.lastIndexOf('buffer', 5));
* // Prints: 5
* console.log(buf.lastIndexOf('buffer', 4));
* // Prints: -1
*
* const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le');
*
* console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le'));
* // Prints: 6
* console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le'));
* // Prints: 4
* ```
*
* If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value,
* an integer between 0 and 255.
*
* If `byteOffset` is not a number, it will be coerced to a number. Any arguments
* that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer.
* This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf).
*
* ```js
* import { Buffer } from 'buffer';
*
* const b = Buffer.from('abcdef');
*
* // Passing a value that's a number, but not a valid byte.
* // Prints: 2, equivalent to searching for 99 or 'c'.
* console.log(b.lastIndexOf(99.9));
* console.log(b.lastIndexOf(256 + 99));
*
* // Passing a byteOffset that coerces to NaN.
* // Prints: 1, searching the whole buffer.
* console.log(b.lastIndexOf('b', undefined));
* console.log(b.lastIndexOf('b', {}));
*
* // Passing a byteOffset that coerces to 0.
* // Prints: -1, equivalent to passing 0.
* console.log(b.lastIndexOf('b', null));
* console.log(b.lastIndexOf('b', []));
* ```
*
* If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned.
* @since v6.0.0
* @param value What to search for.
* @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
* @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`.
* @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`.
*/
lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number;
/**
* Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents
* of `buf`.
*
* ```js
* import { Buffer } from 'buffer';
*
* // Log the entire contents of a `Buffer`.
*
* const buf = Buffer.from('buffer');
*
* for (const pair of buf.entries()) {
* console.log(pair);
* }
* // Prints:
* // [0, 98]
* // [1, 117]
* // [2, 102]
* // [3, 102]
* // [4, 101]
* // [5, 114]
* ```
* @since v1.1.0
*/
entries(): IterableIterator<[number, number]>;
/**
* Equivalent to `buf.indexOf() !== -1`.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('this is a buffer');
*
* console.log(buf.includes('this'));
* // Prints: true
* console.log(buf.includes('is'));
* // Prints: true
* console.log(buf.includes(Buffer.from('a buffer')));
* // Prints: true
* console.log(buf.includes(97));
* // Prints: true (97 is the decimal ASCII value for 'a')
* console.log(buf.includes(Buffer.from('a buffer example')));
* // Prints: false
* console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8)));
* // Prints: true
* console.log(buf.includes('this', 4));
* // Prints: false
* ```
* @since v5.3.0
* @param value What to search for.
* @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`.
* @param [encoding='utf8'] If `value` is a string, this is its encoding.
* @return `true` if `value` was found in `buf`, `false` otherwise.
*/
includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean;
/**
* Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices).
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('buffer');
*
* for (const key of buf.keys()) {
* console.log(key);
* }
* // Prints:
* // 0
* // 1
* // 2
* // 3
* // 4
* // 5
* ```
* @since v1.1.0
*/
keys(): IterableIterator<number>;
/**
* Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is
* called automatically when a `Buffer` is used in a `for..of` statement.
*
* ```js
* import { Buffer } from 'buffer';
*
* const buf = Buffer.from('buffer');
*
* for (const value of buf.values()) {
* console.log(value);
* }
* // Prints:
* // 98
* // 117
* // 102
* // 102
* // 101
* // 114
*
* for (const value of buf) {
* console.log(value);
* }
* // Prints:
* // 98
* // 117
* // 102
* // 102
* // 101
* // 114
* ```
* @since v1.1.0
*/
values(): IterableIterator<number>;
}
var Buffer: BufferConstructor;
/**
* Decodes a string of Base64-encoded data into bytes, and encodes those bytes
* into a string using Latin-1 (ISO-8859-1).
*
* The `data` may be any JavaScript-value that can be coerced into a string.
*
* **This function is only provided for compatibility with legacy web platform APIs**
* **and should never be used in new code, because they use strings to represent**
* **binary data and predate the introduction of typed arrays in JavaScript.**
* **For code running using Node.js APIs, converting between base64-encoded strings**
* **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.**
* @since v15.13.0, v14.17.0
* @legacy Use `Buffer.from(data, 'base64')` instead.
* @param data The Base64-encoded input string.
*/
function atob(data: string): string;
/**
* Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes
* into a string using Base64.
*
* The `data` may be any JavaScript-value that can be coerced into a string.
*
* **This function is only provided for compatibility with legacy web platform APIs**
* **and should never be used in new code, because they use strings to represent**
* **binary data and predate the introduction of typed arrays in JavaScript.**
* **For code running using Node.js APIs, converting between base64-encoded strings**
* **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.**
* @since v15.13.0, v14.17.0
* @legacy Use `buf.toString('base64')` instead.
* @param data An ASCII (Latin1) string.
*/
function btoa(data: string): string;
interface Blob extends __Blob {}
/**
* `Blob` class is a global reference for `require('node:buffer').Blob`
* https://nodejs.org/api/buffer.html#class-blob
* @since v18.0.0
*/
var Blob: typeof globalThis extends {
onmessage: any;
Blob: infer T;
}
? T
: typeof NodeBlob;
}
}
declare module 'node:buffer' {
export * from 'buffer';
} | PypiClean |
/Netfoll_TL-2.0.1-py3-none-any.whl/netfoll_tl/tl/custom/sendergetter.py | import abc
from ... import utils
class SenderGetter(abc.ABC):
"""
Helper base class that introduces the `sender`, `input_sender`
and `sender_id` properties and `get_sender` and `get_input_sender`
methods.
"""
def __init__(self, sender_id=None, *, sender=None, input_sender=None):
self._sender_id = sender_id
self._sender = sender
self._input_sender = input_sender
self._client = None
@property
def sender(self):
"""
Returns the :tl:`User` or :tl:`Channel` that sent this object.
It may be `None` if Telegram didn't send the sender.
If you only need the ID, use `sender_id` instead.
If you need to call a method which needs
this chat, use `input_sender` instead.
If you're using `telethon.events`, use `get_sender()` instead.
"""
return self._sender
async def get_sender(self):
"""
Returns `sender`, but will make an API call to find the
sender unless it's already cached.
If you only need the ID, use `sender_id` instead.
If you need to call a method which needs
this sender, use `get_input_sender()` instead.
"""
# ``sender.min`` is present both in :tl:`User` and :tl:`Channel`.
# It's a flag that will be set if only minimal information is
# available (such as display name, but username may be missing),
# in which case we want to force fetch the entire thing because
# the user explicitly called a method. If the user is okay with
# cached information, they may use the property instead.
if (self._sender is None or getattr(self._sender, 'min', None)) \
and await self.get_input_sender():
# self.get_input_sender may refresh in which case the sender may no longer be min
# However it could still incur a cost so the cheap check is done twice instead.
if self._sender is None or getattr(self._sender, 'min', None):
try:
self._sender =\
await self._client.get_entity(self._input_sender)
except ValueError:
await self._refetch_sender()
return self._sender
@property
def input_sender(self):
"""
This :tl:`InputPeer` is the input version of the user/channel who
sent the message. Similarly to `input_chat
<telethon.tl.custom.chatgetter.ChatGetter.input_chat>`, this doesn't
have things like username or similar, but still useful in some cases.
Note that this might not be available if the library can't
find the input chat, or if the message a broadcast on a channel.
"""
if self._input_sender is None and self._sender_id and self._client:
try:
self._input_sender = self._client._mb_entity_cache.get(
utils.resolve_id(self._sender_id)[0])._as_input_peer()
except AttributeError:
pass
return self._input_sender
async def get_input_sender(self):
"""
Returns `input_sender`, but will make an API call to find the
input sender unless it's already cached.
"""
if self.input_sender is None and self._sender_id and self._client:
await self._refetch_sender()
return self._input_sender
@property
def sender_id(self):
"""
Returns the marked sender integer ID, if present.
If there is a sender in the object, `sender_id` will *always* be set,
which is why you should use it instead of `sender.id <sender>`.
"""
return self._sender_id
async def _refetch_sender(self):
"""
Re-fetches sender information through other means.
""" | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/io/proxy/README | -------------------------------------------------------------------------------
Project Name
-------------------------------------------------------------------------------
Version 0.6
Release date: 01/31/2008
-------------------------------------------------------------------------------
Project state:
experimental
-------------------------------------------------------------------------------
Credits
James Burke ([email protected])
-------------------------------------------------------------------------------
Project description
The XHR IFrame Proxy (xip) allows you to do cross-domain XMLHttpRequests (XHRs).
It works by using two iframes, one your domain (xip_client.html), one on the
other domain (xip_server.html). They use fragment IDs in the iframe URLs to pass
messages to each other. The xip.js file defines dojox.io.proxy.xip. This module
intercepts XHR calls made by the Dojo XHR methods (dojo.xhr* methods). The module
returns a facade object that acts like an XHR object. Once send is called on the
facade, the facade's data is serialized, given to xip_client.html. xip_client.html
then passes the serialized data to xip_server.html by changing xip_server.html's
URL fragment ID (the #xxxx part of an URL). xip_server.html deserializes the
message fragments, and does an XHR call, gets the response, and serializes the
data. The serialized data is then passed back to xip_client.html by changing
xip_client.html's fragment ID. Then the response is deserialized and used as
the response inside the facade XHR object that was created by dojox.io.proxy.xip.
-------------------------------------------------------------------------------
Dependencies:
xip.js: Dojo Core, dojox.data.dom
xip_client.html: none
xip_server.html: none (but see Additional Notes section)
-------------------------------------------------------------------------------
Documentation
There is some documentation that applies to the Dojo 0.4.x version of these files:
http://dojotoolkit.org/book/dojo-book-0-4/part-5-connecting-pieces/i-o/cross-domain-xmlhttprequest-using-iframe-proxy
The general theory still applies to this code, but the specifics are different
for the Dojo 0.9+ codebase. Doc updates hopefully after the basic code is ported.
The current implementation destroys the iframes used for a request after the request
completes. This seems to cause a memory leak, particularly in IE. So, it is not
suited for doing polling cross-domain requests.
-------------------------------------------------------------------------------
Installation instructions
Grab the following from the Dojox SVN Repository:
http://svn.dojotoolkit.org/var/src/dojo/dojox/trunk/io/proxy/xip.js
http://svn.dojotoolkit.org/var/src/dojo/dojox/trunk/io/proxy/xip_client.html
Install into the following directory structure:
/dojox/io/proxy/
...which should be at the same level as your Dojo checkout.
Grab the following from the Dojox SVN Repository:
http://svn.dojotoolkit.org/var/src/dojo/dojox/trunk/io/proxy/xip_server.html
and install it on the domain that you want to allow receiving cross-domain
requests. Be sure to read the documentation, the Additional Notes below, and
the in-file comments.
-------------------------------------------------------------------------------
Additional Notes
xip_client.html and xip_server.html do not work right away. You need to uncomment
out the script tags in the files. Additionally, xip_server.html requires a JS file,
isAllowed.js, to be defined. See the notes in xip_server.html for more informaiton.
XDOMAIN BUILD INSTRUCTIONS:
The dojox.io.proxy module requires some setup to use with an xdomain build.
The xip_client.html file has to be served from the same domain as your web page.
It cannot be served from the domain that has the xdomain build. Download xip_client.html
and install it on your server. Then set djConfig.xipClientUrl to the local path
of xip_client.html (just use a path, not a whole URL, since it will be on the same
domain as the page). The path to the file should be the path relative to the web
page that is using dojox.io.proxy. | PypiClean |
/D47crunch-2.0.3.tar.gz/D47crunch-2.0.3/changelog.md | # Changelog
## v2.0.3
*Released on 2022-02-27*
### New feature
* `D4xdata.covar_table()` allows exporting the variance-covariance matrix or the correlation matrix for the Δ<sub>4x</sub> values of unknwon samples.
### Changes
* New `hist` keyword to `D4xdata.plot_residuals()`, which adds a histogram of residuals to the side of the plot.
## v2.0.2
*Released on 2021-08-16*
### Internals
* Remove HTML tags in all docstrings
## v2.0.1
*Released on 2021-08-08*
### Bugfix
* Fix silly mistake in readme.
## v2.0.0
*Released on 2021-08-08*
### New feature
* Support for Δ<sub>48</sub> standardization (cf section *Process paired Δ<sub>47</sub> and Δ<sub>48</sub> values* in the documentation).
### Changes
* Extensive changes to the documentation, with new sections (*Tutorial* and *How-to*).
* Documentation is now built with `pdoc` instead of `pdoc3`.
* `D47data.simulate()` replaced by `simulate_single_analysis()` and `virtual_data()`, with additional functionality.
* New method: `D4xdata.report()`
* `D4xdata.table_of_analyses()`, `D4xdata.table_of_sessions()`, and `D4xdata.table_of_samples()` have a new argument `output` controlling what the method should return.
* `SAMPLE_CONSTRAINING_WG_COMPOSITION` is gone, replaced by `D4xdata.Nominal_d13C_VPDB` and `D4xdata.Nominal_d18O_VPDB`.
* `lambda_17` is now replaced by `LAMBDA_17` everywhere.
* Additional tests
### Bugfixes
* Correct (or at least improve) computations of analytical repeatabilities in `D47data.repeatabilities()` and `D47data.compute_r()`. | PypiClean |
/MindsDB-23.8.3.0.tar.gz/MindsDB-23.8.3.0/mindsdb/integrations/handlers/shopify_handler/shopify_handler.py | import shopify
from mindsdb.integrations.handlers.shopify_handler.shopify_tables import ProductsTable, CustomersTable, OrdersTable
from mindsdb.integrations.libs.api_handler import APIHandler
from mindsdb.integrations.libs.response import (
HandlerStatusResponse as StatusResponse,
)
from mindsdb.utilities import log
from mindsdb_sql import parse_sql
class ShopifyHandler(APIHandler):
"""
The Shopify handler implementation.
"""
name = 'shopify'
def __init__(self, name: str, **kwargs):
"""
Initialize the handler.
Args:
name (str): name of particular handler instance
**kwargs: arbitrary keyword arguments.
"""
super().__init__(name)
connection_data = kwargs.get("connection_data", {})
self.connection_data = connection_data
self.kwargs = kwargs
self.connection = None
self.is_connected = False
products_data = ProductsTable(self)
self._register_table("products", products_data)
customers_data = CustomersTable(self)
self._register_table("customers", customers_data)
orders_data = OrdersTable(self)
self._register_table("orders", orders_data)
def connect(self):
"""
Set up the connection required by the handler.
Returns
-------
StatusResponse
connection object
"""
if self.is_connected is True:
return self.connection
api_session = shopify.Session(self.connection_data['shop_url'], '2021-10', self.connection_data['access_token'])
self.connection = api_session
self.is_connected = True
return self.connection
def check_connection(self) -> StatusResponse:
"""
Check connection to the handler.
Returns:
HandlerStatusResponse
"""
response = StatusResponse(False)
try:
api_session = self.connect()
shopify.ShopifyResource.activate_session(api_session)
shopify.Shop.current()
response.success = True
except Exception as e:
log.logger.error(f'Error connecting to Shopify!')
response.error_message = str(e)
self.is_connected = response.success
return response
def native_query(self, query: str) -> StatusResponse:
"""Receive and process a raw query.
Parameters
----------
query : str
query in a native format
Returns
-------
StatusResponse
Request status
"""
ast = parse_sql(query, dialect="mindsdb")
return self.query(ast) | PypiClean |
/KPyGithub-1.32a1.tar.gz/KPyGithub-1.32a1/github/GitAuthor.py |
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. #
# http://pygithub.github.io/PyGithub/v1/index.html #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class GitAuthor(github.GithubObject.NonCompletableGithubObject):
"""
This class represents GitAuthors as returned for example by http://developer.github.com/v3/todo
"""
def __repr__(self):
return self.get__repr__({"name": self._name.value})
@property
def date(self):
"""
:type: datetime.datetime
"""
return self._date.value
@property
def email(self):
"""
:type: string
"""
return self._email.value
@property
def name(self):
"""
:type: string
"""
return self._name.value
def _initAttributes(self):
self._date = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "date" in attributes: # pragma no branch
self._date = self._makeDatetimeAttribute(attributes["date"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"]) | PypiClean |
/fastapi_jsonapi-2.0.0.tar.gz/fastapi_jsonapi-2.0.0/fastapi_jsonapi/views/view_base.py | import logging
from contextvars import ContextVar
from typing import (
Any,
Dict,
Iterable,
List,
Optional,
Tuple,
Type,
Union,
)
from fastapi import Request
from pydantic.fields import ModelField
from fastapi_jsonapi import QueryStringManager, RoutersJSONAPI
from fastapi_jsonapi.data_layers.base import BaseDataLayer
from fastapi_jsonapi.data_typing import (
TypeModel,
TypeSchema,
)
from fastapi_jsonapi.schema import (
JSONAPIObjectSchema,
JSONAPIResultListMetaSchema,
get_related_schema,
)
from fastapi_jsonapi.schema_base import BaseModel, RelationshipInfo
from fastapi_jsonapi.schema_builder import JSONAPIObjectSchemas
from fastapi_jsonapi.splitter import SPLIT_REL
from fastapi_jsonapi.views.utils import (
HTTPMethod,
HTTPMethodConfig,
)
logger = logging.getLogger(__name__)
previous_resource_type_ctx_var: ContextVar[str] = ContextVar("previous_resource_type_ctx_var")
related_field_name_ctx_var: ContextVar[str] = ContextVar("related_field_name_ctx_var")
relationships_schema_ctx_var: ContextVar[Type[BaseModel]] = ContextVar("relationships_schema_ctx_var")
object_schema_ctx_var: ContextVar[Type[JSONAPIObjectSchema]] = ContextVar("object_schema_ctx_var")
included_object_schema_ctx_var: ContextVar[Type[TypeSchema]] = ContextVar("included_object_schema_ctx_var")
relationship_info_ctx_var: ContextVar[RelationshipInfo] = ContextVar("relationship_info_ctx_var")
class ViewBase:
"""
Views are inited for each request
"""
data_layer_cls = BaseDataLayer
method_dependencies: Dict[HTTPMethod, HTTPMethodConfig] = {}
def __init__(self, *, request: Request, jsonapi: RoutersJSONAPI, **options):
self.request: Request = request
self.jsonapi: RoutersJSONAPI = jsonapi
self.options: dict = options
self.query_params: QueryStringManager = QueryStringManager(request=request)
def _get_data_layer(self, schema: Type[BaseModel], **dl_kwargs):
return self.data_layer_cls(
schema=schema,
model=self.jsonapi.model,
**dl_kwargs,
)
def _get_data_layer_for_detail(self, **kwargs: Any) -> BaseDataLayer:
"""
:param kwargs: Any extra kwargs for the data layer
:return:
"""
return self._get_data_layer(
schema=self.jsonapi.schema_detail,
**kwargs,
)
def _get_data_layer_for_list(self, **kwargs: Any) -> BaseDataLayer:
"""
:param kwargs: Any extra kwargs for the data layer
:return:
"""
return self._get_data_layer(
schema=self.jsonapi.schema_list,
**kwargs,
)
def _build_response(self, items_from_db: List[TypeModel], item_schema: Type[BaseModel]):
return self.process_includes_for_db_items(
includes=self.query_params.include,
# as list to reuse helper
items_from_db=items_from_db,
item_schema=item_schema,
)
def _build_detail_response(self, db_item: TypeModel):
result_objects, object_schemas, extras = self._build_response([db_item], self.jsonapi.schema_detail)
# is it ok to do through list?
result_object = result_objects[0]
detail_jsonapi_schema = self.jsonapi.schema_builder.build_schema_for_detail_result(
name=f"Result{self.__class__.__name__}",
object_jsonapi_schema=object_schemas.object_jsonapi_schema,
includes_schemas=object_schemas.included_schemas_list,
)
return detail_jsonapi_schema(data=result_object, **extras)
def _build_list_response(self, items_from_db: List[TypeModel], count: int, total_pages: int):
result_objects, object_schemas, extras = self._build_response(items_from_db, self.jsonapi.schema_list)
# we need to build a new schema here
# because we'd like to exclude some fields (relationships, includes, etc)
list_jsonapi_schema = self.jsonapi.schema_builder.build_schema_for_list_result(
name=f"Result{self.__class__.__name__}",
object_jsonapi_schema=object_schemas.object_jsonapi_schema,
includes_schemas=object_schemas.included_schemas_list,
)
return list_jsonapi_schema(
meta=JSONAPIResultListMetaSchema(count=count, total_pages=total_pages),
data=result_objects,
**extras,
)
# data preparing below:
@classmethod
def get_db_item_id(cls, item_from_db: TypeModel):
"""
TODO: check if id is None? raise?
TODO: any another conversion for id to string?
:param item_from_db:
:return:
"""
return str(item_from_db.id)
@classmethod
def prepare_related_object_data(
cls,
item_from_db: TypeModel,
) -> Tuple[Dict[str, Union[str, int]], Optional[TypeSchema]]:
included_object_schema: Type[TypeSchema] = included_object_schema_ctx_var.get()
relationship_info: RelationshipInfo = relationship_info_ctx_var.get()
item_id = cls.get_db_item_id(item_from_db)
data_for_relationship = {"id": item_id}
processed_object = included_object_schema(
id=item_id,
attributes=item_from_db,
type=relationship_info.resource_type,
)
return data_for_relationship, processed_object
@classmethod
def prepare_data_for_relationship(
cls,
related_db_item: Union[List[TypeModel], TypeModel],
) -> Tuple[Optional[Dict[str, Union[str, int]]], List[TypeSchema]]:
included_objects = []
if related_db_item is None:
return None, included_objects
data_for_relationship, processed_object = cls.prepare_related_object_data(
item_from_db=related_db_item,
)
if processed_object:
included_objects.append(processed_object)
return data_for_relationship, included_objects
@classmethod
def update_related_object(
cls,
relationship_data: Union[Dict[str, str], List[Dict[str, str]]],
included_objects: Dict[Tuple[str, str], TypeSchema],
cache_key: Tuple[str, str],
related_field_name: str,
):
relationships_schema: Type[BaseModel] = relationships_schema_ctx_var.get()
object_schema: Type[JSONAPIObjectSchema] = object_schema_ctx_var.get()
relationship_data_schema = get_related_schema(relationships_schema, related_field_name)
parent_included_object = included_objects.get(cache_key)
new_relationships = {}
if hasattr(parent_included_object, "relationships") and parent_included_object.relationships:
existing = parent_included_object.relationships or {}
if isinstance(existing, BaseModel):
existing = existing.dict()
new_relationships.update(existing)
new_relationships.update(
{
**{
related_field_name: relationship_data_schema(
data=relationship_data,
),
},
},
)
included_objects[cache_key] = object_schema.parse_obj(
parent_included_object,
).copy(
update={"relationships": new_relationships},
)
@classmethod
def update_known_included(
cls,
included_objects: Dict[Tuple[str, str], TypeSchema],
new_included: List[TypeSchema],
):
for included in new_included:
included_objects[(included.id, included.type)] = included
@classmethod
def process_single_db_item_and_prepare_includes(
cls,
parent_db_item: TypeModel,
included_objects: Dict[Tuple[str, str], TypeSchema],
):
previous_resource_type: str = previous_resource_type_ctx_var.get()
related_field_name: str = related_field_name_ctx_var.get()
next_current_db_item = []
cache_key = (cls.get_db_item_id(parent_db_item), previous_resource_type)
current_db_item = getattr(parent_db_item, related_field_name)
current_is_single = False
if not isinstance(current_db_item, Iterable):
# hack to do less if/else
current_db_item = [current_db_item]
current_is_single = True
relationship_data_items = []
for db_item in current_db_item:
next_current_db_item.append(db_item)
data_for_relationship, new_included = cls.prepare_data_for_relationship(
related_db_item=db_item,
)
cls.update_known_included(
included_objects=included_objects,
new_included=new_included,
)
relationship_data_items.append(data_for_relationship)
if current_is_single:
# if initially was single, get back one dict
# hack to do less if/else
relationship_data_items = relationship_data_items[0]
cls.update_related_object(
relationship_data=relationship_data_items,
included_objects=included_objects,
cache_key=cache_key,
related_field_name=related_field_name,
)
return next_current_db_item
@classmethod
def process_db_items_and_prepare_includes(
cls,
parent_db_items: List[TypeModel],
included_objects: Dict[Tuple[str, str], TypeSchema],
):
next_current_db_item = []
for parent_db_item in parent_db_items:
new_next_items = cls.process_single_db_item_and_prepare_includes(
parent_db_item=parent_db_item,
included_objects=included_objects,
)
next_current_db_item.extend(new_next_items)
return next_current_db_item
def process_include_with_nested(
self,
include: str,
current_db_item: Union[List[TypeModel], TypeModel],
item_as_schema: TypeSchema,
current_relation_schema: Type[TypeSchema],
) -> Tuple[Dict[str, TypeSchema], List[JSONAPIObjectSchema]]:
root_item_key = (item_as_schema.id, item_as_schema.type)
included_objects: Dict[Tuple[str, str], TypeSchema] = {
root_item_key: item_as_schema,
}
previous_resource_type = item_as_schema.type
for related_field_name in include.split(SPLIT_REL):
object_schemas = self.jsonapi.schema_builder.create_jsonapi_object_schemas(
schema=current_relation_schema,
includes=[related_field_name],
compute_included_schemas=bool([related_field_name]),
)
relationships_schema = object_schemas.relationships_schema
schemas_include = object_schemas.can_be_included_schemas
current_relation_field: ModelField = current_relation_schema.__fields__[related_field_name]
current_relation_schema: Type[TypeSchema] = current_relation_field.type_
relationship_info: RelationshipInfo = current_relation_field.field_info.extra["relationship"]
included_object_schema: Type[JSONAPIObjectSchema] = schemas_include[related_field_name]
if not isinstance(current_db_item, Iterable):
# xxx: less if/else
current_db_item = [current_db_item]
# ctx vars to skip multi-level args passing
relationships_schema_ctx_var.set(relationships_schema)
object_schema_ctx_var.set(object_schemas.object_jsonapi_schema)
previous_resource_type_ctx_var.set(previous_resource_type)
related_field_name_ctx_var.set(related_field_name)
relationship_info_ctx_var.set(relationship_info)
included_object_schema_ctx_var.set(included_object_schema)
current_db_item = self.process_db_items_and_prepare_includes(
parent_db_items=current_db_item,
included_objects=included_objects,
)
previous_resource_type = relationship_info.resource_type
return included_objects.pop(root_item_key), list(included_objects.values())
def process_db_object(
self,
includes: List[str],
item: TypeModel,
item_schema: Type[TypeSchema],
object_schemas: JSONAPIObjectSchemas,
):
included_objects = []
item_as_schema = object_schemas.object_jsonapi_schema(
id=self.get_db_item_id(item),
attributes=object_schemas.attributes_schema.from_orm(item),
)
for include in includes:
item_as_schema, new_included_objects = self.process_include_with_nested(
include=include,
current_db_item=item,
item_as_schema=item_as_schema,
current_relation_schema=item_schema,
)
included_objects.extend(new_included_objects)
return item_as_schema, included_objects
def process_includes_for_db_items(
self,
includes: List[str],
items_from_db: List[TypeModel],
item_schema: Type[TypeSchema],
):
object_schemas = self.jsonapi.schema_builder.create_jsonapi_object_schemas(
schema=item_schema,
includes=includes,
compute_included_schemas=bool(includes),
use_schema_cache=False,
)
result_objects = []
# form:
# `(type, id): serialized_object`
# helps to exclude duplicates
included_objects: Dict[Tuple[str, str], TypeSchema] = {}
for item in items_from_db:
jsonapi_object, new_included = self.process_db_object(
includes=includes,
item=item,
item_schema=item_schema,
object_schemas=object_schemas,
)
result_objects.append(jsonapi_object)
for included in new_included:
# update too?
included_objects[(included.type, included.id)] = included
extras = {}
if includes:
# if query has includes, add includes to response
# even if no related objects were found
extras.update(
included=[
# ignore key
value
# sort for prettiness
for key, value in sorted(included_objects.items())
],
)
return result_objects, object_schemas, extras | PypiClean |
/ESMValTool-2.9.0-py3-none-any.whl/esmvaltool/diag_scripts/zmnam/zmnam_calc.py | import netCDF4
import numpy as np
from scipy import signal
def butter_filter(data, freq, lowcut=None, order=2):
"""Function to perform time filtering."""
if lowcut is not None:
filttype = 'lowpass'
# Sampling determines Nyquist frequency
nyq = 0.5 * freq
if filttype == 'lowpass':
low = lowcut / nyq
freqs = low
bpoly, apoly = signal.butter(order, freqs, btype=filttype)
# _, h = signal.freqs(b, a)
ysig = signal.filtfilt(bpoly, apoly, data, axis=0)
return ysig
def zmnam_calc(da_fname, outdir, src_props):
"""Function to do EOF/PC decomposition of zg field."""
deg_to_r = np.pi / 180.
lat_weighting = True
outfiles = []
# Note: daily/monthly means have been
# already subtracted from daily/monthly files
# Open daily data
with netCDF4.Dataset(da_fname, "r") as in_file:
time_dim = in_file.variables['time'][:]
time_lnam = getattr(in_file.variables['time'], 'long_name', '')
time_snam = getattr(in_file.variables['time'], 'standard_name', '')
time_uni = in_file.variables['time'].units
time_cal = in_file.variables['time'].calendar
time = np.array(time_dim[:], dtype='d')
date = netCDF4.num2date(time, in_file.variables['time'].units,
in_file.variables['time'].calendar)
lev = np.array(in_file.variables['plev'][:], dtype='d')
lev_lnam = getattr(in_file.variables['plev'], 'long_name', '')
lev_snam = getattr(in_file.variables['plev'], 'standard_name', '')
lev_uni = in_file.variables['plev'].units
lev_pos = in_file.variables['plev'].positive
lev_axi = in_file.variables['plev'].axis
lat = np.array(in_file.variables['lat'][:], dtype='d')
# lat_nam = in_file.variables['lat'].long_name
lat_uni = in_file.variables['lat'].units
lat_axi = in_file.variables['lat'].axis
lon = np.array(in_file.variables['lon'][:], dtype='d')
# lon_nam = in_file.variables['lon'].long_name
lon_uni = in_file.variables['lon'].units
lon_axi = in_file.variables['lon'].axis
zg_da = np.squeeze(np.array(in_file.variables['zg'][:], dtype='d'))
n_tim = len(time_dim)
print('end infile close')
# Start zmNAM index calculation
# Lowpass filter
zg_da_lp = butter_filter(zg_da, 1, lowcut=1. / 90, order=2)
# Outputs: stored by level
# EOFs, eigenvalues, daily and monthly PCs
eofs = np.zeros((len(lev), len(lat)), dtype='d')
eigs = np.zeros(len(lev), dtype='d')
pcs_da = np.zeros((n_tim, len(lev)), dtype='d')
# Calendar-independent monthly mean
sta_mon = [] # first day of the month
mid_mon = [] # 15th of the month
end_mon = [] # last day of the month (add +1 when slicing)
mon = 999
idate = 0
while idate < len(date):
# Save first day of the month
if date[idate].month != mon:
mon = date[idate].month
sta_mon.append(idate)
# Save month mid-day
if date[idate].day == 15:
mid_mon.append(idate)
# Save last day of the month
if idate == len(date) - 1 or (date[idate].month == mon and
date[idate + 1].month != mon):
end_mon.append(idate)
idate += 1
pcs_mo = np.zeros((len(date[mid_mon]), len(lev)), dtype='d')
# Perform analysis by level
for i_lev in np.arange(len(lev)):
# Latitude weighting
if lat_weighting is True:
for j_lat in np.arange(len(lat)):
zg_da_lp[:, i_lev,
j_lat] *= np.sqrt(abs(np.cos(lat[j_lat] * deg_to_r)))
zg_da_lp_an = zg_da_lp[:, i_lev, :] - np.mean(zg_da_lp[:, i_lev, :],
axis=0)
cov = np.dot(zg_da_lp_an.T, zg_da_lp_an) / (n_tim - 1)
# Compute eigenvectors and eigenvalues
eigenval, eigenvec = np.linalg.eig(cov)
sum_eigenval = np.sum(eigenval)
eigenval_norm = eigenval[:] / sum_eigenval
# Largest eigenvalue
max_eigenval = eigenval_norm.argmax()
# PC calculation
pc = np.dot(zg_da_lp_an[:, :], eigenvec)
# Latitude de-weighting
if lat_weighting is True:
for i_lat in np.arange(len(lat)):
eigenvec[i_lat, :] /= np.sqrt(
abs(np.cos(lat[i_lat] * deg_to_r)))
# Retain leading standardized PC & EOF
lead_pc_mean = np.mean(pc[:, max_eigenval], axis=0)
lead_pc_std = np.std(pc[:, max_eigenval], ddof=1, axis=0)
lead_pc = (pc[:, max_eigenval] - lead_pc_mean) / lead_pc_std
lead_eof = eigenvec[:, max_eigenval]
# Constrain meridional EOF structure
max_lat = max(range(len(lat)), key=lambda x: lat[x])
min_lat = min(range(len(lat)), key=lambda x: lat[x])
if np.min(lat) > 0. and (lead_eof[max_lat] > lead_eof[min_lat]):
lead_pc *= -1
lead_eof *= -1
index_name = 'NAM'
if np.min(lat) < 0. and (lead_eof[min_lat] > lead_eof[max_lat]):
lead_pc *= -1
lead_eof *= -1
index_name = 'SAM'
lead_pc_mo = np.zeros(len(date[mid_mon]), dtype='d')
time_mo = np.zeros(len(date[mid_mon]), dtype='d')
# print(lead_pc)
for k_mo in range(len(date[mid_mon])):
lead_pc_mo[k_mo] = np.mean(lead_pc[sta_mon[k_mo]:end_mon[k_mo] +
1])
time_mo[k_mo] = time[mid_mon[k_mo]]
# Store PC/EOF for this level (no time dependent)
eigs[i_lev] = eigenval_norm[max_eigenval]
eofs[i_lev, :] = lead_eof
pcs_da[:, i_lev] = lead_pc
pcs_mo[:, i_lev] = lead_pc_mo
# Save output files
# (1) daily PCs
fname = outdir + '_'.join(src_props) + '_pc_da_' + index_name + '.nc'
outfiles.append(fname)
with netCDF4.Dataset(fname, mode='w') as file_out:
file_out.title = 'Zonal mean annular mode (1)'
file_out.contact = 'F. Serva ([email protected]); \
C. Cagnazzo ([email protected])'
file_out.createDimension('time', None)
file_out.createDimension('plev', np.size(lev))
file_out.createDimension('lat', np.size(lat))
file_out.createDimension('lon', np.size(lon))
time_var = file_out.createVariable('time', 'd', ('time', ))
if time_lnam:
time_var.setncattr('long_name', time_lnam)
if time_snam:
time_var.setncattr('standard_name', time_snam)
time_var.setncattr('units', time_uni)
time_var.setncattr('calendar', time_cal)
time_var[:] = time_dim[:]
lev_var = file_out.createVariable('plev', 'd', ('plev', ))
if lev_lnam:
lev_var.setncattr('long_name', lev_lnam)
if lev_snam:
lev_var.setncattr('standard_name', lev_snam)
lev_var.setncattr('units', lev_uni)
lev_var.setncattr('positive', lev_pos)
lev_var.setncattr('axis', lev_axi)
lev_var[:] = lev[:]
pcs_da_var = file_out.createVariable('PC_da', 'd', (
'time',
'plev',
))
pcs_da_var.setncattr('long_name', 'Daily zonal mean annular mode PC')
pcs_da_var.setncattr('index_type', index_name)
pcs_da_var.setncattr(
'comment',
'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479')
pcs_da_var[:] = pcs_da[:, :]
# (2) monthly PCs
fname = outdir + '_'.join(src_props) + '_pc_mo_' + index_name + '.nc'
outfiles.append(fname)
with netCDF4.Dataset(fname, mode='w') as file_out:
file_out.title = 'Zonal mean annular mode (2)'
file_out.contact = 'F. Serva ([email protected]); \
C. Cagnazzo ([email protected])'
file_out.createDimension('time', None)
file_out.createDimension('plev', np.size(lev))
time_var = file_out.createVariable('time', 'd', ('time', ))
if time_lnam:
time_var.setncattr('long_name', time_lnam)
if time_snam:
time_var.setncattr('standard_name', time_snam)
time_var.setncattr('units', time_uni)
time_var.setncattr('calendar', time_cal)
time_var[:] = time_mo
lev_var = file_out.createVariable('plev', 'd', ('plev', ))
if lev_lnam:
lev_var.setncattr('long_name', lev_lnam)
if lev_snam:
lev_var.setncattr('standard_name', lev_snam)
lev_var.setncattr('units', lev_uni)
lev_var.setncattr('positive', lev_pos)
lev_var.setncattr('axis', lev_axi)
lev_var[:] = lev[:]
pcs_mo_var = file_out.createVariable('PC_mo', 'd', (
'time',
'plev',
))
pcs_mo_var.setncattr('long_name', 'Monthly zonal mean annular mode PC')
pcs_mo_var.setncattr('index_type', index_name)
pcs_mo_var.setncattr(
'comment',
'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479')
pcs_mo_var[:] = pcs_mo[:, :]
# (3) EOFs and explained variances
fname = outdir + '_'.join(src_props) + '_eofs_' + index_name + '.nc'
outfiles.append(fname)
with netCDF4.Dataset(fname, mode='w') as file_out:
file_out.title = 'Zonal mean annular mode (3)'
file_out.contact = 'F. Serva ([email protected]); \
C. Cagnazzo ([email protected])'
file_out.createDimension('time', None)
file_out.createDimension('plev', np.size(lev))
file_out.createDimension('lat', np.size(lat))
file_out.createDimension('lon', np.size(lon))
time_var = file_out.createVariable('time', 'd', ('time', ))
if time_lnam:
time_var.setncattr('long_name', time_lnam)
if time_snam:
time_var.setncattr('standard_name', time_snam)
time_var.setncattr('units', time_uni)
time_var.setncattr('calendar', time_cal)
time_var[:] = 0
#
lev_var = file_out.createVariable('plev', 'd', ('plev', ))
if lev_lnam:
lev_var.setncattr('long_name', lev_lnam)
if lev_snam:
lev_var.setncattr('standard_name', lev_snam)
lev_var.setncattr('units', lev_uni)
lev_var.setncattr('positive', lev_pos)
lev_var.setncattr('axis', lev_axi)
lev_var[:] = lev[:]
#
lat_var = file_out.createVariable('lat', 'd', ('lat', ))
lat_var.setncattr('units', lat_uni)
lev_var.setncattr('axis', lat_axi)
lat_var[:] = lat[:]
#
lon_var = file_out.createVariable('lon', 'd', ('lon', ))
lon_var.setncattr('units', lon_uni)
lon_var.setncattr('axis', lon_axi)
lon_var[:] = lon[:]
#
eofs_var = file_out.createVariable('EOF', 'd', ('plev', 'lat'))
eofs_var.setncattr('long_name', 'Zonal mean annular mode EOF')
eofs_var.setncattr('index_type', index_name)
eofs_var.setncattr(
'comment',
'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479')
eofs_var[:] = eofs[:, :]
#
eigs_var = file_out.createVariable('eigenvalues', 'd', ('plev'))
eigs_var.setncattr('long_name',
'Zonal mean annular mode EOF explained variance')
eigs_var.setncattr('index_type', index_name)
eigs_var.setncattr(
'comment',
'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479')
eigs_var[:] = eigs[:]
return outfiles | PypiClean |
/IcoCube-0.0.1a6.tar.gz/IcoCube-0.0.1a6/ICO3Plugin/ConnectionManager/ICO3IPListenerConnectionManager.py | import socket
from threading import Thread
from ICO3Plugin.ConnectionManager.ICO3ConnectionManager import ICO3ConnectionManager
from ICO3Plugin.ConnectionManager.ICO3IPLink import ICO3IPLink
from ICO3Utilities.Debug.LogDebug import ICO3Log
from ICO3Utilities.Xml.XmlProcess import XmlProcessing
class ICO3IPListenerConnectionManager(ICO3ConnectionManager):
theTargetList = None
theSocketAddress = None
theAddressFamily = None
theListener = None
def installParameters(self, xPrm):
super().installParameters(xPrm)
self.theTargetList = []
self.theSocketAddress = XmlProcessing.getAttributeValue(xPrm, "ConnectionSocket")
self.theAddressFamily = XmlProcessing.getAttributeValue(xPrm, "AddressFamily")
if (self.theAddressFamily is None):
self.theAddressFamily = "InterNetwork"
TargetPrm = XmlProcessing.getTagElement(self.XmlParameters, "TargetList")
if TargetPrm is None:
return
xTargetDevicePrmList = XmlProcessing.getAllTagElementList(TargetPrm)
if xTargetDevicePrmList is None:
return
for xTD in xTargetDevicePrmList:
TGD = ICO3IPTargetID()
TGD.installParameters(xTD)
TGD.theConnectionManager = self
self.theTargetList.append(TGD)
return
def startConnection(self):
self.StarListener()
def stopConnection(self):
self.stopListener()
super().stopConnection()
pass
def stopListener(self):
self.theListener.stopSocketListenning()
pass
def StarListener(self):
xSTh = ICO3IPDeviceListener()
xSTh.theConnectionManager = self
xSTh.setBindAddress(ICO3IPTargetID.getIPAddress(self.theSocketAddress))
xSTh.run()
self.theListener = xSTh
return
def createLink(self, xSocket):
ICO3Log.print("Connection","Create Link Listener--->" + str(xSocket.getpeername()))
xLink = ICO3IPLink()
xLink.theMode = "NODE"
xLink.theSocket = xSocket
xLink.theConnectionManager = self
xLink.theIdentificationMode = "LISTENER"
self.installLink(xLink)
pass
class ICO3IPTargetID:
connectionSocket = None
typeConnection = None
nodeConnection = None
theConnectionManager = None
theIPSocket = None
theLinkSocket = None
SockAddConnect = None
def installParameters(self, xPrm):
self.connectionSocket = XmlProcessing.getAttributeValue(xPrm, "ConnectionSocket")
self.typeConnection = XmlProcessing.getAttributeValue(xPrm, "type")
self.nodeConnection = XmlProcessing.getAttributeValue(xPrm, "Node")
pass
@staticmethod
def getIPAddress(SAddress):
if not isinstance(SAddress, str):
return None
xSplit = SAddress.split(":")
return (xSplit[0], int(xSplit[1]))
class ICO3IPDeviceListener():
BindAddress = None
theConnectionManager = None
Running = False
theIPSocketListener = None
def setBindAddress(self, xBind):
self.BindAddress = xBind
def run(self):
self.Running = True
LTread = Thread( target = self.SocketListener)
LTread.start()
def stopSocketListenning(self):
print ("Stop Socket Listener")
self.Running = False
if self.theIPSocketListener is not None:
self.theIPSocketListener.close()
def SocketListener(self):
try:
self.theIPSocketListener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.theIPSocketListener.bind(self.BindAddress)
ICO3Log.print("Connection","Start Listener @ " + str(self.BindAddress))
self.theIPSocketListener.listen(10)
except Exception as e:
ICO3Log.print("Connection","ICO3IPListenerConnectionManager Socket "+str(e))
if self.theIPSocketListener is not None:
self.theIPSocketListener.close()
self.theIPSocketListener = None
return
while self.Running:
xConn = self.SocketListenerAcceptloop()
ICO3Log.print("Connection","*************** Accept Loop Listener **********")
if xConn is not None:
if self.theConnectionManager is not None:
self.theConnectionManager.createLink(xConn)
ICO3Log.print("Connection","Stop Socket Listener ")
if self.theIPSocketListener is not None:
self.theIPSocketListener.close()
self.theIPSocketListener = None
pass
def SocketListenerAcceptloop(self):
try:
conn, addr = self.theIPSocketListener.accept() # Should be ready
ICO3Log.print("Connection",'New connection from ' + str(addr))
return conn
except Exception as e:
ICO3Log.print("Connection","ICO3IPListenerConnectionManager Accept "+str(e))
return None | PypiClean |
/DatabaseDataGenerator-1.7.0-py3-none-any.whl/DataGenerator/Generators.py | import random
import string
from faker import Faker
from faker_vehicle import VehicleProvider
class Generator:
class GeneratorOutOfItemsException(Exception):
pass
def __init__(self):
pass
def generate(self):
pass
def pickUnique(self, fakerFunction, pickTable):
picks = 0
while True:
name = fakerFunction()
if name not in pickTable:
pickTable.append(name)
break
picks += 1
if picks > 1000:
raise GeneratorOutOfItemsException
return name
class RandomStringGenerator(Generator):
class EmptyStringException(Exception):
pass
def __init__(self, length=10,
hasLowercase=True,
hasUppercase=False,
hasDigits=False):
self.length = length
self.hasLowercase = hasLowercase
self.hasDigits = hasDigits
self.hasUppercase = hasUppercase
def generate(self):
self.__validateChoices()
choice = self.__getChoices()
ran = ''.join(random.choices(choice, k=self.length))
return ran
def __getChoices(self):
choice = ""
if self.hasLowercase:
choice += string.ascii_lowercase
if self.hasUppercase:
choice += string.ascii_uppercase
if self.hasDigits:
choice += string.digits
return choice
def __validateChoices(self):
if (
not self.hasLowercase and not self.hasUppercase and not self.hasDigits):
raise self.EmptyStringException(
"Random string can not be empty!")
class SequentialPatternGenerator(Generator):
def __init__(self, pattern, chList):
self.__pattern = pattern
self.__chList = chList
def generate(self):
try:
pick = self.__chList.pop(0)
output = self.__trySub(self.__pattern, "%s", pick)
return output
except IndexError:
raise self.GeneratorOutOfItemsException
def __trySub(self, istr, pattern, sub):
return istr.replace(pattern, str(sub))
class RandomIntegerGenerator(RandomStringGenerator):
def __init__(self, imin, imax):
super().__init__()
self.imin = int(imin)
self.imax = int(imax)
def generate(self):
ran = random.randint(self.imin, self.imax)
return int(ran)
class RandomFloatGenerator(RandomStringGenerator):
def __init__(self, fmin, fmax, decimals=2):
super().__init__()
self.__fmin = int(fmin)
self.__fmax = int(fmax)
self.__decimals = decimals
def generate(self):
ran = self.__fmin + (random.random() * (self.__fmax - self.__fmin))
return round(float(ran), self.__decimals)
class SerialGenerator(Generator):
def __init__(self, start=0, step=1):
self.start = start
self.step = step
self.current = start
def generate(self):
output = self.current
self.current += self.step
return output
class ConstantGenerator(Generator):
def __init__(self, value):
self.__value = value
def generate(self):
return self.__value
class SetGenerator(Generator):
def __init__(self, chSet, destructive=False):
self.chSet = list(chSet)
self.__destructive = destructive
def generate(self):
try:
pick = random.choice(self.chSet)
if self.__destructive:
self.chSet.remove(pick)
return pick
except IndexError:
raise self.GeneratorOutOfItemsException
class SequentialSetGenerator(Generator):
def __init__(self, chSet):
self.chSet = list(chSet)
def generate(self):
try:
pick = self.chSet.pop(0)
return pick
except IndexError:
raise self.GeneratorOutOfItemsException
class FakeFirstNameGenerator(Generator):
def __init__(self, unique=False):
self.__unique = unique
if unique:
self.__pickTable = []
self.__faker = Faker()
self.__fakerFunction = self.__faker.first_name
# todo this can be done better by making it more generic
def generate(self):
if self.__unique:
name = self.pickUnique(self.__fakerFunction, self.__pickTable)
else:
name = fakerFunction()
return name
class FakeLastNameGenerator(Generator):
def __init__(self, unique=False):
self.__unique = unique
if unique:
self.__pickTable = []
self.__faker = Faker()
self.__fakerFunction = self.__faker.last_name
def generate(self):
if self.__unique:
name = self.pickUnique(self.__fakerFunction, self.__pickTable)
else:
name = fakerFunction()
return name
class FakeNameGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = f"{self.__faker.first_name()} {self.__faker.last_name()}"
return name
class FakeCityGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.city()
name = name.replace('\'', "")
return name
class FakeCountryGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.country()
name = name.replace('\'', "")
return name
class FakeStreetGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.street_name()
name = name.replace('\'', "")
return name
class FakeEmailGenerator(Generator):
def __init__(self, unique=False):
self.__unique = unique
if unique:
self.__pickTable = []
self.__faker = Faker()
self.__fakerFunction = self.__faker.email
def generate(self):
if self.__unique:
name = self.pickUnique(self.__fakerFunction, self.__pickTable)
else:
name = fakerFunction()
name = name.replace('\'', "")
return name
class FakeIPv4Generator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.ipv4()
name = name.replace('\'', "")
return name
class FakeIPv6Generator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.ipv6()
name = name.replace('\'', "")
return name
class FakeMacGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.mac_address()
name = name.replace('\'', "")
return name
class FakeUriGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.uri()
name = name.replace('\'', "")
return name
class FakeUrlGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.url()
name = name.replace('\'', "")
return name
class FakeUsernameGenerator(Generator):
def __init__(self, unique=False):
self.__unique = unique
if unique:
self.__pickTable = []
self.__faker = Faker()
self.__fakerFunction = self.__faker.user_name
def generate(self):
if self.__unique:
name = self.pickUnique(self.__fakerFunction, self.__pickTable)
else:
name = fakerFunction()
name = name.replace('\'', "")
return name
class FakeCreditCardNumberGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
ccnumber = self.__faker.credit_card_number()
ccnumber = ccnumber.replace('\'', "")
return ccnumber
class FakeDateGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
date = self.__faker.date()
date = str(date)
date = date.replace('\'', "")
return date
class FakeCurrentDecadeDateGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
dateTime = self.__faker.date_this_decade()
return str(dateTime)
class FakeCurrentMonthDateGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
dateTime = self.__faker.date_this_month()
return str(dateTime)
class FakeCurrentYearDateGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
datetime = self.__faker.date_this_year()
return str(datetime)
class FakeDateTimeGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
datetime = self.__faker.date_time_ad()
return str(datetime)
class FakeCurrentDecadeDateTimeGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
datetime = self.__faker.date_time_this_decade()
return str(datetime)
class FakeCurrentMonthDateTimeGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
datetime = self.__faker.date_time_this_month()
return str(datetime)
class FakeCurrentYearDateTimeGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
datetime = self.__faker.date_time_this_year()
return str(datetime)
class FakeVehicleModelGenerator(Generator):
def __init__(self):
self.__faker = Faker()
self.__faker.add_provider(VehicleProvider)
def generate(self):
name = self.__faker.vehicle_model()
name = name.replace('\'', "")
return name
class FakeVehicleMakeGenerator(Generator):
def __init__(self):
self.__faker = Faker()
self.__faker.add_provider(VehicleProvider)
def generate(self):
name = self.__faker.vehicle_make()
name = name.replace('\'', "")
return name
class FakeLicensePlateGenerator(Generator):
def __init__(self):
self.__faker = Faker()
def generate(self):
name = self.__faker.license_plate()
name = name.replace('\'', "")
return name
class PrettyTimeGenerator(Generator):
def __init__(self, imin, imax):
self.__imin = imin
self.__imax = imax
self.secondsInMinute = 60
self.secondsInHour = 60 * 60
self.secondsInDay = 60 * 60 * 24
def generate(self):
time = random.randint(self.__imin, self.__imax)
minutes = 0
hours = 0
days = 0
while time >= self.secondsInDay:
days += 1
time -= self.secondsInDay
while time >= self.secondsInHour:
hours += 1
time -= self.secondsInHour
while time >= self.secondsInMinute:
minutes += 1
time -= self.secondsInMinute
seconds = time
timeStr = ""
timeStr = self.__addValIfNotNone(timeStr, days, 'd');
timeStr = self.__addValIfNotNone(timeStr, hours, 'h');
timeStr = self.__addValIfNotNone(timeStr, minutes, 'm');
timeStr = self.__addValIfNotNone(timeStr, seconds, 's');
timeStr = timeStr[:-1]
return timeStr
def __addValIfNotNone(self, istr, val, affix):
if val > 0:
istr += str(val) + affix + " "
return istr | PypiClean |
/Nitrous-0.9.3-py3-none-any.whl/turbogears/startup.py |
from builtins import str
__all__ = ['call_on_startup', 'call_on_shutdown',
'reloader_thread', 'webpath',
'start_bonjour', 'stop_bonjour', 'start_server',
'start_turbogears', 'stop_turbogears']
import atexit
import logging
import os
import signal
import sys
from os.path import abspath, exists
import pkg_resources
import cherrypy
pkg_resources.require('Nitrous')
from turbogears import config, database, scheduler, view
from turbogears.visit.api import VisitTool
from turbogears.identity.exceptions import IdentityConfigurationException
from turbogears.identity.base import verify_identity_status
from turbogears.dispatchers import VirtualPathDispatcher
from turbogears.hooks import NestedVariablesHook
# module globals
log = logging.getLogger("turbogears.startup")
dns_sd_pid = None
call_on_startup = []
call_on_shutdown = []
webpath = ''
started = False
# module public functions
def start_bonjour(package=None):
"""Register the TurboGears server with Apple's Bonjour framework.
Currently only Unix-like systems are supported where either the 'avahi'
daemon (Linux etc.) is available or the 'dns-sd' program (Mac OS X).
"""
global dns_sd_pid
if dns_sd_pid:
return
if sys.platform in ('win32', 'os2'):
dns_sd_pid = -1 # service not available
return
if not package:
app = cherrypy.tree.apps.get('')
if not app:
return
package = app.root.__module__
package = package.split('.', 1)[0]
host = config.get('server.socket_host', '0.0.0.0')
port = str(config.get('server.socket_port'))
env = config.get('environment') or 'development'
name = '%s:%s' % (package, env)
typ = '_http._tcp'
cmds = [
('/usr/bin/avahi-publish-service', ['-H', host, name, typ, port]),
('/usr/bin/dns-sd', ['-R', name, typ, '.' + host, port, 'path=/'])]
for cmd, args in cmds:
# TODO: This check is flawed. If one has both services installed and
# avahi isn't the one running, then this won't work. We should either
# try registering with both or checking what service is running and use
# that. Program availability on the file system was never enough...
if exists(cmd):
dns_sd_pid = os.spawnv(os.P_NOWAIT, cmd, [cmd] + args)
atexit.register(stop_bonjour)
break
else:
dns_sd_pid = -1 # service not available
def stop_bonjour():
"""Stop the Bonjour publishing daemon if it is running."""
if not dns_sd_pid or dns_sd_pid < 0:
return
try:
os.kill(dns_sd_pid, signal.SIGTERM)
except OSError:
pass
def config_static():
"""Configure serving static content used by TurboGears."""
config.update({'/tg_static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': abspath(
pkg_resources.resource_filename(__name__, 'static'))}})
config.update({'/tg_js': {
'tools.staticdir.on': True,
'tools.staticdir.dir': abspath(
pkg_resources.resource_filename(__name__, 'static/js'))}})
def config_root():
"""Configure the encoding and virtual path for the root controller."""
global webpath
encoding = config.get('genshi.default_encoding',
config.get('kid.encoding', 'utf-8'))
config.update({'/': {
'tools.decode.on': True,
'tools.decode.encoding': encoding,
'tools.encode.on': False,
'tools.encode.encoding': encoding,
'tools.encode.text_only': False,
'tools.encode.add_charset': False}})
webpath = config.get('server.webpath') or ''
if webpath:
# sanitize server.webpath setting
webpath = webpath.strip('/')
if webpath:
webpath = '/' + webpath
config.update({'server.webpath': webpath})
# configure virtual path dispatcher for webpath
if webpath:
config.update({'/': {'request.dispatch': VirtualPathDispatcher(
config.get('request.dispatch'), webpath)}})
def start_turbogears():
"""Handles TurboGears tasks when the CherryPy server starts.
This performs the following initialization tasks (in given order):
* Loads the template engines and the base templates.
* Turns off CherryPy access and error logging to screen since
it disrupts with our own logging configuration. You can use
the qualnames cherrypy.access and cherrypy.error for these messages.
* Adds a static tool for TurboGears's static files (URL '/tg_static').
* Adds a static tool for TurboGears's JavaScript files (URL '/tg_js').
* Adds a tool for decoding request parameters to Unicode.
* Adds a virtual path dispatcher if enabled in the configuration.
* Adds CherryPy tools and hooks for visit tracking, identity,
database and decoding parameters into nested dictionaries.
* Registers the server with the Bonjour framework, if available.
* Calls 'turbogears.database.bind_metadata' when using SQLAlchemy.
* Loads all turbogears.extensions entry points and calls their
'start_extension' method.
* Calls the callables registered in 'turbogears.call_on_startup'.
* Starts the TurboGears scheduler if enabled in the configuration.
"""
global started
if started:
log.info("TurboGears has already been started.")
return
log.info("Starting TurboGears...")
# Initialize template engines and load base templates
log.info("Loading template engines...")
view.load_engines()
view.loadBaseTemplates()
# Add CherryPy request hooks
log.info("Adding CherryPy tools, hooks and dispatchers...")
config_static()
config_root()
hooks = cherrypy.request.hooks
cherrypy.request.original_hooks = hooks.copy()
hooks.attach('before_finalize', verify_identity_status)
hooks.attach('on_end_resource', database.EndTransactions)
# The NestedVariablesHook needs to happen after cherrypy.tools.decode
# so that request params are properly decoded before it runs
hooks.attach('before_handler', NestedVariablesHook, priority=64)
if config.get('visit.on', False):
# The VisitTool needs to happen after cherrypy.tools.decode
# so that request params are properly decoded before it runs,
# but it must run before the NestedVariablesHook to work properly
cherrypy.tools.visit = cherrypy.Tool(
'before_handler', VisitTool(), priority=62)
# Register server with Bonjour framework
bonjour = config.get('tg.bonjour', None)
env = config.get('environment') or 'development'
if bonjour or env == 'development':
log.info("Starting the Bonjour service...")
start_bonjour(bonjour)
# Bind metadata for SQLAlchemy
if config.get('sqlalchemy.dburi'):
log.info("Binding metadata for SQLAlchemy...")
database.bind_metadata()
# Start all TurboGears extensions
extensions = pkg_resources.iter_entry_points('turbogears.extensions')
for entrypoint in extensions:
# We try to load the extension and run its 'start_extension' method,
# if present. If either fails, we simply log the exception and
# continue, because a) when the autoreloader is active, unhandled
# exceptions in the startup phase will not stop the server and
# b) faulty extensions (which may be from a different package)
# should not crash the server.
log.info("Starting TurboGears extension %s..." % entrypoint)
try:
ext = entrypoint.load()
except Exception as e:
log.exception("Error loading TurboGears extension plugin %s: %s",
entrypoint, e)
continue
if hasattr(ext, 'start_extension'):
try:
ext.start_extension()
except Exception as e:
log.exception("Error starting TurboGears extension %s: %s",
entrypoint, e)
if isinstance(e, IdentityConfigurationException):
raise # don't swallow internal configuration error
# Call registered startup functions
if call_on_startup:
log.info("Running the registered startup functions...")
for startup_function in call_on_startup:
startup_function()
# Start the scheduler
if config.get('tg.scheduler', False):
log.info("Starting the scheduler...")
scheduler.start_scheduler()
started = True
log.info("TurboGears has been started.")
def stop_turbogears():
"""Handles TurboGears tasks when the CherryPy server stops.
Ends all open database transactions, shuts down all extensions,
calls user provided shutdown functions and stops the scheduler.
"""
global started
if not started:
log.info("TurboGears has already been stopped.")
return
log.info("Stopping TurboGears...")
if config.get('tg.scheduler', False):
log.info("Stopping the scheduler...")
scheduler.stop_scheduler()
# Call registered shutdown functions
if call_on_shutdown:
log.info("Running the registered shutdown functions...")
for shutdown_function in call_on_shutdown:
shutdown_function()
# Shut down all TurboGears extensions
extensions = pkg_resources.iter_entry_points('turbogears.extensions')
for entrypoint in extensions:
log.info("Stopping TurboGears extension %s" % entrypoint)
try:
ext = entrypoint.load()
except Exception as e:
log.exception("Error loading TurboGears extension plugin '%s': %s",
entrypoint, e)
continue
if hasattr(ext, 'shutdown_extension'):
try:
ext.shutdown_extension()
except Exception as e:
log.exception(
"Error shutting down TurboGears extension '%s': %s",
entrypoint, e)
log.info("Stopping the Bonjour service...")
stop_bonjour()
# Restore CherryPy request hooks
log.info("Removing additional CherryPy hooks...")
try:
cherrypy.request.hooks = cherrypy.request.original_hooks
except AttributeError:
log.debug("CherryPy hooks could not be restored.")
started = False
log.info("TurboGears has been stopped.")
def start_server(root):
"""Start the CherryPy Server."""
if started:
log.info("The server has already been started.")
return
in_production = config.get('environment') == 'production'
if not in_production:
try:
# Help for the configuration spelling:
# https://blog.joel.mx/posts/debugging-cherrypy-applications-with-werkzeug # noqa
import backlash
config.update({'/': {
'request.throw_errors': True,
'wsgi.pipeline': [('debugger', backlash.DebuggedApplication),],
'wsgi.debugger.evalex': True
}})
except ImportError:
log.info('TurboGears supports interactive debugging in '
'non-production environments via backlash '
'(pip install backlash to enable).')
app = cherrypy.tree.mount(root, config=config.app)
config.update({'log.screen': False})
embedded = config.get('environment') == 'embedded'
if config.get('engine.start', not embedded):
cherrypy.engine.start()
if config.get('engine.block', not embedded):
cherrypy.engine.block()
else:
start_turbogears()
atexit.register(stop_turbogears)
return app
# Subscribe to engine events at import time so that our callbacks get used
# regardless of how the server is started.
cherrypy.engine.subscribe('start', start_turbogears)
cherrypy.engine.subscribe('stop', stop_turbogears) | PypiClean |
/7Wonder-RL-Lib-0.1.1.tar.gz/7Wonder-RL-Lib-0.1.1/.github/ISSUE_TEMPLATE/feature_request.md | ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
| PypiClean |
/NREL_sup3r-0.1.0-py3-none-any.whl/sup3r/qa/utilities.py | """Utilities used for QA"""
import numpy as np
from scipy.interpolate import interp1d
import logging
logger = logging.getLogger(__name__)
def tke_frequency_spectrum(u, v, f_range=None):
"""Kinetic Energy Spectrum. Gives the portion of kinetic energy
associated with each frequency.
Parameters
----------
u: ndarray
(lat, lon)
U component of wind
v : ndarray
(lat, lon)
V component of wind
f_range : list | None
List with min and max frequency. When comparing spectra for different
domains this needs to be tailored to the specific domain. e.g. f =
[1/max_time, ..., 1/min_time] If this is not specified f with be
set to [0, ..., len(y)] where y is the fft output.
Returns
-------
ndarray
1D array of amplitudes corresponding to the portion of total energy
with a given frequency
"""
v_f = np.fft.fftn(v.reshape((-1, v.shape[-1])))
u_f = np.fft.fftn(u.reshape((-1, u.shape[-1])))
E_f = np.abs(v_f)**2 + np.abs(u_f)**2
E_f = np.mean(E_f, axis=0)
if f_range is None:
f = np.arange(len(E_f))
else:
f = np.linspace(f_range[0], f_range[1], len(E_f))
E_f = f**2 * E_f
n_steps = E_f.shape[0] // 2
E_f_a = E_f[:n_steps]
E_f_b = E_f[-n_steps:][::-1]
E_f = E_f_a + E_f_b
return f[:n_steps], E_f
def frequency_spectrum(var, f_range=None):
"""Frequency Spectrum. Gives the portion of the variable
associated with each frequency.
Parameters
----------
var: ndarray
(lat, lon, temporal)
f_range : list | None
List with min and max frequency. When comparing spectra for different
domains this needs to be tailored to the specific domain. e.g. f =
[1/max_time, ..., 1/min_time] If this is not specified f with be
set to [0, ..., len(y)] where y is the fft output.
Returns
-------
ndarray
Array of frequencies corresponding to energy amplitudes
ndarray
1D array of amplitudes corresponding to the portion of the variable
with a given frequency
"""
var_f = np.fft.fftn(var.reshape((-1, var.shape[-1])))
E_f = np.abs(var_f)**2
E_f = np.mean(E_f, axis=0)
if f_range is None:
f = np.arange(len(E_f))
else:
f = np.linspace(f_range[0], f_range[1], len(E_f))
E_f = f**2 * E_f
n_steps = E_f.shape[0] // 2
E_f_a = E_f[:n_steps]
E_f_b = E_f[-n_steps:][::-1]
E_f = E_f_a + E_f_b
return f[:n_steps], E_f
def tke_wavenumber_spectrum(u, v, x_range=None, axis=0):
"""Turbulent Kinetic Energy Spectrum. Gives the portion of kinetic energy
associated with each wavenumber.
Parameters
----------
u: ndarray
(lat, lon)
U component of wind
v : ndarray
(lat, lon)
V component of wind
x_range : list | None
List with min and max wavenumber. When comparing spectra for different
domains this needs to be tailored to the specific domain. e.g. k =
[1/max_length, ..., 1/min_length] If this is not specified k with be
set to [0, ..., len(y)] where y is the fft output.
axis : int
Axis to average over to get a 1D wind field. If axis=0 this returns
the zonal energy spectrum
Returns
-------
ndarray
Array of wavenumbers corresponding to energy amplitudes
ndarray
1D array of amplitudes corresponding to the portion of total energy
with a given wavenumber
"""
u_k = np.fft.fftn(u)
v_k = np.fft.fftn(v)
E_k = np.mean(np.abs(v_k)**2 + np.abs(u_k)**2, axis=axis)
if x_range is None:
k = np.arange(len(E_k))
else:
k = np.linspace(x_range[0], x_range[1], len(E_k))
n_steps = len(k) // 2
E_k = k**2 * E_k
E_k_a = E_k[1:n_steps + 1]
E_k_b = E_k[-n_steps:][::-1]
E_k = E_k_a + E_k_b
return k[:n_steps], E_k
def wavenumber_spectrum(var, x_range=None, axis=0):
"""Wavenumber Spectrum. Gives the portion of the given variable
associated with each wavenumber.
Parameters
----------
var: ndarray
(lat, lon)
x_range : list | None
List with min and max wavenumber. When comparing spectra for different
domains this needs to be tailored to the specific domain. e.g. k =
[1/max_length, ..., 1/min_length] If this is not specified k with be
set to [0, ..., len(y)] where y is the fft output.
axis : int
Axis to average over to get a 1D field. If axis=0 this returns
the zonal spectrum
Returns
-------
ndarray
Array of wavenumbers corresponding to amplitudes
ndarray
1D array of amplitudes corresponding to the portion of the given
variable with a given wavenumber
"""
var_k = np.fft.fftn(var)
E_k = np.mean(np.abs(var_k)**2, axis=axis)
if x_range is None:
k = np.arange(len(E_k))
else:
k = np.linspace(x_range[0], x_range[1], len(E_k))
n_steps = len(k) // 2
E_k = k**2 * E_k
E_k_a = E_k[1:n_steps + 1]
E_k_b = E_k[-n_steps:][::-1]
E_k = E_k_a + E_k_b
return k[:n_steps], E_k
def direct_dist(var, bins=40, range=None, diff_max=None, scale=1,
percentile=99.9, interpolate=False, period=None):
"""Returns the direct distribution for the given variable.
Parameters
----------
var: ndarray
(lat, lon, temporal)
bins : int
Number of bins for the direct pdf.
range : tuple | None
Optional min/max range for the direct pdf.
diff_max : float
Max value to keep for given variable
scale : int
Factor to scale the distribution by. This is used so that distributions
from data with different resolutions can be compared. For instance, if
this is calculating a vorticity distribution from data with a spatial
resolution of 4km then the distribution needs to be scaled by 4km to
compare to another scaled vorticity distribution with a different
resolution.
percentile : float
Percentile to use to determine the maximum allowable value in the
distribution. e.g. percentile=99 eliminates values above the 99th
percentile from the histogram.
interpolate : bool
Whether to interpolate over histogram counts. e.g. if a bin has
count = 0 and surrounding bins have count > 0 the bin with count = 0
will have an interpolated value.
period : float | None
If variable is periodic this gives that period. e.g. If the variable
is winddirection the period is 360 degrees and we need to account for
0 and 360 being close.
Returns
-------
ndarray
var at bin centers
ndarray
var value counts
float
Normalization factor
"""
if period is not None:
diffs = (var + period) % period
diffs /= scale
else:
diffs = var / scale
diff_max = diff_max or np.percentile(np.abs(diffs), percentile)
diffs = diffs[(np.abs(diffs) < diff_max)]
norm = np.sqrt(np.mean(diffs**2))
counts, centers = continuous_dist(diffs, bins=bins, range=range,
interpolate=interpolate)
return centers, counts, norm
def gradient_dist(var, bins=40, range=None, diff_max=None, scale=1,
percentile=99.9, interpolate=False, period=None):
"""Returns the gradient distribution for the given variable.
Parameters
----------
var: ndarray
(lat, lon, temporal)
bins : int
Number of bins for the gradient pdf.
range : tuple | None
Optional min/max range for the gradient pdf.
diff_max : float
Max value to keep for gradient
scale : int
Factor to scale the distribution by. This is used so that distributions
from data with different resolutions can be compared. For instance, if
this is calculating a velocity gradient distribution from data with a
spatial resolution of 4km then the distribution needs to be scaled by
4km to compare to another scaled velocity gradient distribution with a
different resolution.
percentile : float
Percentile to use to determine the maximum allowable value in the
distribution. e.g. percentile=99 eliminates values above the 99th
percentile from the histogram.
interpolate : bool
Whether to interpolate over histogram counts. e.g. if a bin has
count = 0 and surrounding bins have count > 0 the bin with count = 0
will have an interpolated value.
period : float | None
If variable is periodic this gives that period. e.g. If the variable
is winddirection the period is 360 degrees and we need to account for
0 and 360 being close.
Returns
-------
ndarray
d(var) / dx at bin centers
ndarray
d(var) / dx value counts
float
Normalization factor
"""
diffs = np.diff(var, axis=1).flatten()
if period is not None:
diffs = (diffs + period / 2) % period - period / 2
diffs /= scale
diff_max = diff_max or np.percentile(np.abs(diffs), percentile)
diffs = diffs[(np.abs(diffs) < diff_max)]
norm = np.sqrt(np.mean(diffs**2))
counts, centers = continuous_dist(diffs, bins=bins, range=range,
interpolate=interpolate)
return centers, counts, norm
def time_derivative_dist(var, bins=40, range=None, diff_max=None, t_steps=1,
scale=1, percentile=99.9, interpolate=False,
period=None):
"""Returns the time derivative distribution for the given variable.
Parameters
----------
var: ndarray
(lat, lon, temporal)
bins : int
Number of bins for the time derivative pdf.
range : tuple | None
Optional min/max range for the time derivative pdf.
diff_max : float
Max value to keep for time derivative
t_steps : int
Number of time steps to use for differences. e.g. If t_steps=1 this
uses var[i + 1] - [i] to compute time derivatives.
scale : int
Factor to scale the distribution by. This is used so that distributions
from data with different resolutions can be compared. For instance, if
this is calculating a time derivative distribution from data with a
temporal resolution of 15min then the distribution needs to be scaled
by 15min to compare to another scaled time derivative distribution with
a different resolution
percentile : float
Percentile to use to determine the maximum allowable value in the
distribution. e.g. percentile=99 eliminates values above the 99th
percentile from the histogram.
interpolate : bool
Whether to interpolate over histogram counts. e.g. if a bin has
count = 0 and surrounding bins have count > 0 the bin with count = 0
will have an interpolated value.
period : float | None
If variable is periodic this gives that period. e.g. If the variable
is winddirection the period is 360 degrees and we need to account for
0 and 360 being close.
Returns
-------
ndarray
d(var) / dt values at bin centers
ndarray
d(var) / dt value counts
float
Normalization factor
"""
msg = (f'Received t_steps={t_steps} for time derivative calculation but '
'data only has {var.shape[-1]} time steps')
assert t_steps < var.shape[-1], msg
diffs = (var[..., t_steps:] - var[..., :-t_steps]).flatten()
if period is not None:
diffs = (diffs + period / 2) % period - period / 2
diffs /= scale
diff_max = diff_max or np.percentile(np.abs(diffs), percentile)
diffs = diffs[(np.abs(diffs) < diff_max)]
norm = np.sqrt(np.mean(diffs**2))
counts, centers = continuous_dist(diffs, bins=bins, range=range,
interpolate=interpolate)
return centers, counts, norm
def continuous_dist(diffs, bins=None, range=None, interpolate=False):
"""Get interpolated distribution from histogram
Parameters
----------
diffs : ndarray
Array of values to use to construct distribution
bins : int
Number of bins for the distribution. If None then the number of bins
will be determined from the value range and the smallest difference
between values
range : tuple | None
Optional min/max range for the distribution.
interpolate : bool
Whether to interpolate over histogram counts. e.g. if a bin has
count = 0 and surrounding bins have count > 0 the bin with count = 0
will have an interpolated value.
Returns
-------
ndarray
distribution value counts
ndarray
distribution values at bin centers
"""
if bins is None:
dx = np.abs(np.diff(diffs))
dx = dx[dx > 0]
dx = np.mean(dx)
bins = int((np.max(diffs) - np.min(diffs)) / dx)
logger.debug(f'Using n_bins={bins} to compute distribution')
counts, edges = np.histogram(diffs, bins=bins, range=range)
centers = edges[:-1] + (np.diff(edges) / 2)
if interpolate:
indices = np.where(counts > 0)
y = counts[indices]
x = centers[indices]
if len(x) > 1:
interp = interp1d(x, y, bounds_error=False, fill_value=0)
counts = interp(centers)
counts = counts.astype(float) / counts.sum()
return counts, centers | PypiClean |
/Klampt-0.9.0-cp36-cp36m-win_amd64.whl/klampt/math/so3.py | import math
from . import vectorops
from typing import Tuple,Callable
from ..model.typing import Rotation,Matrix3,Vector3
def __str__(R : Rotation) -> str:
"""Converts a rotation to a string."""
return '\n'.join([' '.join([str(ri) for ri in r]) for r in matrix(R)])
def identity() -> Rotation:
"""Returns the identity rotation."""
return [1.,0.,0.,0.,1.,0.,0.,0.,1.]
def inv(R : Rotation) -> Rotation:
"""Inverts the rotation."""
Rinv = [R[0],R[3],R[6],R[1],R[4],R[7],R[2],R[5],R[8]]
return Rinv
def apply(R : Rotation, point : Vector3) -> Vector3:
"""Applies the rotation to a point."""
return (R[0]*point[0]+R[3]*point[1]+R[6]*point[2],
R[1]*point[0]+R[4]*point[1]+R[7]*point[2],
R[2]*point[0]+R[5]*point[1]+R[8]*point[2])
def matrix(R : Rotation) -> Matrix3:
"""Returns the 3x3 rotation matrix corresponding to R."""
return [[R[0],R[3],R[6]],
[R[1],R[4],R[7]],
[R[2],R[5],R[8]]]
def from_matrix(mat : Matrix3) -> Rotation:
"""Returns a rotation R corresponding to the 3x3 rotation matrix mat."""
R = [mat[0][0],mat[1][0],mat[2][0],mat[0][1],mat[1][1],mat[2][1],mat[0][2],mat[1][2],mat[2][2]]
return R
def ndarray(R : Rotation) -> "ndarray":
"""Returns the 3x3 numpy rotation matrix corresponding to R."""
import numpy
return numpy.array(matrix(R))
def from_ndarray(mat : "ndarray") -> Rotation:
"""Returns a rotation R corresponding to the 3x3 rotation matrix mat."""
return mat.T.flatten().tolist()
def mul(R1 : Rotation, R2 : Rotation) -> Rotation:
"""Multiplies two rotations."""
if len(R1) != 9: raise ValueError("R1 is not a rotation matrix")
if len(R2) != 9: raise ValueError("R2 is not a rotation matrix (did you mean to use apply())?")
m1=matrix(R1)
m2T=matrix(inv(R2))
mres = matrix(identity())
for i in range(3):
for j in range(3):
mres[i][j] = vectorops.dot(m1[i],m2T[j])
R = from_matrix(mres)
return R
def trace(R : Rotation) -> float:
"""Computes the trace of the rotation matrix."""
return R[0]+R[4]+R[8]
def angle(R : Rotation) -> float:
"""Returns absolute deviation of R from identity"""
ctheta = (trace(R) - 1.0)*0.5
return math.acos(max(min(ctheta,1.0),-1.0))
def rpy(R : Rotation) -> Vector3:
"""Converts a rotation matrix to a roll,pitch,yaw angle triple.
The result is given in radians."""
sign = lambda x: 1 if x > 0 else (-1 if x < 0 else 0)
m = matrix(R)
_sb = min(1.0, max(m[2][0],-1.0))
b = -math.asin(_sb) # m(2,0)=-sb
cb = math.cos(b)
if abs(cb) > 1e-7:
ca = m[0][0]/cb #m(0,0)=ca*cb
ca = min(1.0,max(ca,-1.0))
if sign(m[1][0]) == sign(cb): #m(1,0)=sa*cb
a = math.acos(ca);
else:
a = 2*math.pi - math.acos(ca)
cc = m[2][2] / cb #m(2,2)=cb*cc
cc = min(1.0,max(cc,-1.0))
if sign(m[2][1]) == sign(cb): #m(2,1)=cb*sc
c = math.acos(cc)
else:
c = math.pi*2 - math.acos(cc)
else:
#b is close to 90 degrees, i.e. cb=0
#this reduces the degrees of freedom, so we can set c=0
c = 0
#m(0,1)=-sa
_sa = min(1.0, max(m[0][1],-1.0))
a = -math.asin(_sa);
if sign(math.cos(a)) != sign(m[1][1]): #m(1,1)=ca
a = math.pi - a;
return c,b,a
def from_rpy(rollpitchyaw : Vector3) -> Rotation:
"""Converts from roll,pitch,yaw angle triple to a rotation
matrix. The triple is given in radians. The x axis is "roll",
y is "pitch", and z is "yaw".
"""
roll,pitch,yaw = rollpitchyaw
Rx,Ry,Rz = from_axis_angle(((1,0,0),roll)),from_axis_angle(((0,1,0),pitch)),from_axis_angle(((0,0,1),yaw))
return mul(Rz,mul(Ry,Rx))
def rotation_vector(R : Rotation) -> Vector3:
"""Returns the rotation vector w (exponential map) representation of R such
that e^[w] = R. Equivalent to axis-angle representation with
w/||w||=axis, ||w||=angle."""
theta = angle(R)
if abs(theta-math.pi)<0.5:
#for values close to pi this alternate technique has better numerical
#performance
c = math.cos(theta)
x2=(R[0]-c)/(1.0 - c)
y2=(R[4]-c)/(1.0 - c)
z2=(R[8]-c)/(1.0 - c)
if x2 < 0:
assert(x2>-1e-5)
x2=0
if y2 < 0:
assert(y2>-1e-5)
y2=0
if z2 < 0:
assert(z2>-1e-5)
z2=0
x = theta*math.sqrt(x2)
y = theta*math.sqrt(y2)
z = theta*math.sqrt(z2)
if abs(theta-math.pi) < 1e-5:
#determined up to sign changes, we know r12=2xy,r13=2xz,r23=2yz
xy=R[3]
xz=R[6]
yz=R[7]
if(x > y):
if(x > z):
#x is largest
if(xy < 0): y=-y
if(xz < 0): z=-z
else:
#z is largest
if(yz < 0): y=-y
if(xz < 0): x=-x
else:
if(y > z):
#y is largest
if(xy < 0): x=-x
if(yz < 0): z=-z
else:
#z is largest
if(yz < 0): y=-y
if(xz < 0): x=-x
else:
#alternate technique: use sign of anti-cross product
eps = theta-math.pi
if eps*(R[3+2]-R[6+1]) > 0:
x = -x
if eps*(R[6+0]-R[0+2]) > 0:
y = -y
if eps*(R[0+1]-R[3+0]) > 0:
z = -z
return [x,y,z]
#normal
scale = 1
if abs(theta) > 1e-5:
scale = theta/math.sin(theta)
return vectorops.mul(deskew(R),scale)
def axis_angle(R : Rotation) -> Tuple:
"""Returns the (axis,angle) pair representing R"""
m = rotation_vector(R)
return (vectorops.unit(m),vectorops.norm(m))
def from_axis_angle(aa : Tuple) -> Rotation:
"""Converts an axis-angle representation (axis,angle) to a 3D rotation
matrix."""
return rotation(aa[0],aa[1])
def from_rotation_vector(w : Vector3) -> Rotation:
"""Converts a rotation vector representation w to a 3D rotation matrix."""
length = vectorops.norm(w)
if length < 1e-7: return identity()
return rotation(vectorops.mul(w,1.0/length),length)
#aliases for rotation_vector and from_rotation_vector
moment = rotation_vector
from_moment = from_rotation_vector
def from_quaternion(q : Tuple) -> Rotation:
"""Given a unit quaternion (w,x,y,z), produce the corresponding rotation
matrix."""
w,x,y,z = q
x2 = x + x; y2 = y + y; z2 = z + z;
xx = x * x2; xy = x * y2; xz = x * z2;
yy = y * y2; yz = y * z2; zz = z * z2;
wx = w * x2; wy = w * y2; wz = w * z2;
a11 = 1.0 - (yy + zz)
a12 = xy - wz
a13 = xz + wy
a21 = xy + wz
a22 = 1.0 - (xx + zz)
a23 = yz - wx
a31 = xz - wy
a32 = yz + wx
a33 = 1.0 - (xx + yy)
return [a11,a21,a31,a12,a22,a32,a13,a23,a33]
def quaternion(R : Rotation) -> Tuple:
"""Given a Klamp't rotation representation, produces the corresponding
unit quaternion (w,x,y,z)."""
tr = trace(R) + 1.0;
a11,a21,a31,a12,a22,a32,a13,a23,a33 = R
#If the trace is nonzero, it's a nondegenerate rotation
if tr > 1e-5:
s = math.sqrt(tr)
w = s * 0.5
s = 0.5 / s
x = (a32 - a23) * s
y = (a13 - a31) * s
z = (a21 - a12) * s
return vectorops.unit((w,x,y,z))
else:
#degenerate it's a rotation of 180 degrees
nxt = [1, 2, 0]
#check for largest diagonal entry
i = 0
if a22 > a11: i = 1
if a33 > max(a11,a22): i = 2
j = nxt[i]
k = nxt[j]
M = matrix(R)
q = [0.0]*4
s = math.sqrt((M[i][i] - (M[j][j] + M[k][k])) + 1.0);
q[i] = s * 0.5
if abs(s)<1e-7:
raise ValueError("Could not solve for quaternion... Invalid rotation matrix?")
else:
s = 0.5 / s;
q[3] = (M[k][j] - M[j][k]) * s;
q[j] = (M[i][j] + M[j][i]) * s;
q[k] = (M[i][k] + M[k][i]) * s;
w,x,y,z = q[3],q[0],q[1],q[2]
return vectorops.unit([w,x,y,z])
def distance(R1 : Rotation, R2 : Rotation) -> float:
"""Returns the absolute angle one would need to rotate in order to get
from R1 to R2"""
R = mul(R1,inv(R2))
return angle(R)
def error(R1 : Rotation, R2 : Rotation) -> float:
"""Returns a 3D "difference vector" that describes how far R1 is from R2.
More precisely, this is the (local) Lie derivative, which is the rotation
vector representation of R1*R2^T.
Fun fact: the error w=error(R1,R2) is related to the derivative of
interpolate(R2,R1,u) at u=0 by
d/du interpolate(R2,R1,0) = mul(cross_product(w),R2).
You can also recover R1 from w via R1 = mul(from_moment(w),R2).
"""
R = mul(R1,inv(R2))
return moment(R)
def cross_product(w : Vector3) -> Rotation:
"""Returns the cross product matrix associated with w.
The matrix [w]R is the derivative of the matrix R as it rotates about
the axis w/||w|| with angular velocity ||w||.
"""
return [0.,w[2],-w[1], -w[2],0.,w[0], w[1],-w[0],0.]
def diag(R : Rotation) -> Vector3:
"""Returns the diagonal of the 3x3 matrix reprsenting the so3 element R."""
return [R[0],R[4],R[8]]
def deskew(R : Rotation) -> Vector3:
"""If R is a (flattened) cross-product matrix of the 3-vector w, this will
return w. Otherwise, it will return a representation w of (R-R^T)/2 (off
diagonals of R) such that (R-R^T)/2 = cross_product(w). """
return [0.5*(R[5]-R[7]),0.5*(R[6]-R[2]),0.5*(R[1]-R[3])]
def rotation(axis : Vector3, angle: float) -> Rotation:
"""Given a unit axis and an angle in radians, returns the rotation
matrix."""
cm = math.cos(angle)
sm = math.sin(angle)
#m = s[r]-c[r][r]+rrt = s[r]-c(rrt-I)+rrt = cI + rrt(1-c) + s[r]
R = vectorops.mul(cross_product(axis),sm)
for i in range(3):
for j in range(3):
R[i*3+j] += axis[i]*axis[j]*(1.-cm)
R[0] += cm
R[4] += cm
R[8] += cm
return R
def canonical(v : Vector3) -> Rotation:
"""Given a unit vector v, finds R that defines a basis [x,y,z] such that
x = v and y and z are orthogonal"""
if abs(vectorops.normSquared(v) - 1.0) > 1e-4:
raise RuntimeError("Nonunit vector supplied to canonical()")
assert(len(v)==3)
if abs(v[0]-1.0) < 1e-5:
return identity()
elif abs(v[0]+1.0) < 1e-5:
#flip of basis
R = identity()
R[0] = -1.0
R[4] = -1.0
return R
R = list(v) + [0.]*6
x,y,z = v
scale = 1.0/(1.0+x);
R[3]= -y;
R[4]= x + scale*z*z;
R[5]= -scale*y*z;
R[6]= -z;
R[7]= -scale*y*z;
R[8]= x + scale*y*y;
return R
def align(a : Vector3, b : Vector3) -> Rotation:
"""Returns a minimal-angle rotation that aligns the vector a to align with
the vector b. Both a and b must be nonzero."""
an = vectorops.norm(a)
bn = vectorops.norm(b)
if abs(an) < 1e-5 or abs(bn) < 1e-5:
return identity()
a = vectorops.mul(a,1.0/an)
b = vectorops.mul(b,1.0/bn)
v = vectorops.cross(a,b)
c = vectorops.dot(a,b)
if abs(c+1)<1e-5: #rotation of pi
v = vectorops.cross(a,[0,0,1])
vn = vectorops.norm(v)
if vn < 1e-5:
v = vectorops.cross(a,[0,1,0])
vn = vectorops.norm(v)
return rotation(vectorops.mul(v,1.0/vn),math.pi)
vhat = cross_product(v)
vhat2 = mul(vhat,vhat)
return vectorops.madd(vectorops.add(identity(),vhat),vhat2,1.0/(1.0+c))
def interpolate(R1 : Rotation, R2 : Rotation, u : float) -> Rotation:
"""Interpolate linearly between the two rotations R1 and R2. """
R = mul(inv(R1),R2)
m = moment(R)
angle = vectorops.norm(m)
if angle==0: return R1
axis = vectorops.div(m,angle)
return mul(R1,rotation(axis,angle*u))
def interpolator(R1 : Rotation, R2 : Rotation) -> Callable:
"""Returns a function of one parameter u that interpolates linearly
between the two rotations R1 and R2. After f(u) is constructed, calling
f(u) is about 2x faster than calling interpolate(R1,R2,u)."""
R = mul(inv(R1),R2)
m = moment(R)
angle = vectorops.norm(m)
if angle==0:
axis = [1,0,0]
else:
axis = vectorops.div(m,angle)
def f(u,R1=R1,axis=axis,angle=angle):
return mul(R1,rotation(axis,angle*u))
return f
def det(R : Rotation) -> float:
"""Returns the determinant of the 3x3 matrix R"""
m = matrix(R)
return m[0][0]*m[1][1]*m[2][2]+m[0][1]*m[1][2]*m[2][0]+m[0][2]*m[1][0]*m[2][1]-m[0][0]*m[1][2]*m[2][1]-m[0][1]*m[1][0]*m[2][2]-m[0][2]*m[1][1]*m[2][0]
def is_rotation(R : Rotation, tol=1e-5) -> bool:
"""Returns true if R is a rotation matrix, i.e. is orthogonal to the given tolerance and has + determinant"""
RRt = mul(R,inv(R))
err = vectorops.sub(RRt,identity())
if any(abs(v) > tol for v in err):
return False
if det(R) < 0:
return False
return True
def sample() -> Rotation:
"""Returns a uniformly distributed rotation matrix."""
import random
q = [random.gauss(0,1),random.gauss(0,1),random.gauss(0,1),random.gauss(0,1)]
q = vectorops.unit(q)
theta = math.acos(q[3])*2.0
if abs(theta) < 1e-8:
m = [0,0,0]
else:
m = vectorops.mul(vectorops.unit(q[0:3]),theta)
return from_moment(m) | PypiClean |
/DownloadFTP_Packages-1.0-py3-none-any.whl/DownloadFTP_Packages/FTP_Package.py |
from ftplib import FTP
import tkinter as tk
import os
import sys
# 导入消息对话框子模块
import tkinter.messagebox as tk_box
from threading import Thread
# # 项目
# project_model = "T8520"
#
# # 下载版本
# download_edition = '1.0.0.6'
#
# # 本地存放路径
# local_path = 'D:' + os.sep + '1' + os.sep
# 定义登录窗口
class LoginExecute():
def __init__(self):
self.win = tk.Tk()
# 实例化FTP方法
self.ftp = FTP()
# 把窗口的 x 功能禁吊
# 窗体的通信协议方法
self.win.protocol('WM_DELETE_WINDOW', self.callback)
# 软件标题
self.win.title('登录')
# 软件标题的图标
self.win.iconbitmap(self.standard_path("main.ico"))
# 初始化界面大小
self.win.geometry('400x300')
# 登陆界面
tk.Label(self.win, text='FTP:').place(x=100, y=60)
tk.Label(self.win, text='端口:').place(x=100, y=100)
tk.Label(self.win, text='账户:').place(x=100, y=140)
tk.Label(self.win, text='密码:').place(x=100, y=180)
# FTP输入框
self.var_ftp_host = tk.StringVar()
# 设置初始值 --FTP
self.var_ftp_host.set('172.16.23.240')
self.enter_ftp_host = tk.Entry(self.win, textvariable=self.var_ftp_host)
self.enter_ftp_host.place(x=160, y=60)
# 端口输入框
self.var_ftp_port = tk.StringVar()
# 设置初始值--端口
self.var_ftp_port.set(21)
self.enter_ftp_port = tk.Entry(self.win, textvariable=self.var_ftp_port)
self.enter_ftp_port.place(x=160, y=100)
# 账号输入框
self.var_usr_name = tk.StringVar()
# 设置初始值 --账号
self.var_usr_name.set('xdcftp')
self.enter_usr_name = tk.Entry(self.win, textvariable=self.var_usr_name)
self.enter_usr_name.place(x=160, y=140)
# 密码输入框
self.var_usr_pwd = tk.StringVar()
# 设置初始值--密码
self.var_usr_pwd.set('1q2w3e4r5t~!@')
self.enter_usr_pwd = tk.Entry(self.win, textvariable=self.var_usr_pwd, show='*')
self.enter_usr_pwd.place(x=160, y=180)
# 登录按钮
bt_login = tk.Button(self.win, text='登录', command=self.login_main)
bt_login.place(x=120, y=230)
# 退出按铃
bt_logquit = tk.Button(self.win, text='退出', command=self.close_win)
bt_logquit.place(x=260, y=230)
self.win.mainloop()
def login_main(self):
self.host = self.enter_ftp_host.get()
self.port = self.enter_ftp_port.get()
self.user = self.enter_usr_name.get()
self.poss = self.enter_usr_pwd.get()
if self.host != '' and self.port != '' and self.user != '' and self.poss != '':
# 访问ftp & 登录
self.request_ftp()
# 关闭窗口
self.win.destroy()
# 实例化FTP软件窗口
w = FtpExecute()
# 启动软件
w.launch_window(self.ftp)
print('FPT:%s,端口:%s' % (self.host, self.port))
print('账号:%s,密码:%s' % (self.user, self.poss))
else:
if self.host == '':
self.showinfo_window('FTP IP地址不能为空')
else:
pass
if self.port == '':
self.showinfo_window('FTP端口号不能为空')
else:
pass
if self.user == '':
self.showinfo_window('登录账号不能为空')
else:
pass
if self.poss == '':
self.showinfo_window('登录密码不能为空')
else:
pass
# _______________________FTP______________________________
# 初始化访问的ftp ip 和 端口号 & 登录
def request_ftp(self):
# 编码格式
encode = ['UTF-8', 'gbk', 'GB2312', 'GB18030', 'Big5', 'HZ']
# 断言是否可以正常连接ftp
try:
# 连接ftp
self.ftp.connect(self.host, int(self.port))
# 如果连接不到ftp报出提示
except:
# 无法连接到服务器
self.showerror_window('无法连接FTP服务器,请检测网络')
# 终止程序继续执行
sys.exit(0)
else:
# 给ftp设置 gbk 的编码格式
self.ftp.encoding = encode[1]
# 登录
self.login()
# 登录方法----用户名,密码
def login(self):
# 登录 ftp
self.ftp.login(self.user, self.poss)
# 欢迎信息
# print(self.ftp.welcome)
# 提示消息框
def showinfo_window(self, messages):
# 去掉tk弹框
tk.Tk().withdraw()
tk_box.showinfo(title="提示", message=messages)
# 错误弹框
def showerror_window(self, messages):
# 去掉tk弹框
tk.Tk().withdraw()
# 弹出错误提示框
tk_box.showerror(title="错误信息", message=messages)
# 窗口显示图标 -- 目的:准确目录
def standard_path(self, retative_path):
try:
base_path = sys._MEIPASS
except:
# 获取根目录
base_path = os.path.abspath('')
# base_path = self.gml + os.sep + 'logo' + os.sep
# 文件名称与路径拼接
return os.path.join(base_path, retative_path)
# 这个函数不做任何事,实际上让关闭按钮失效
def callback(self):
pass
# 这个函数做两个事情,关闭窗口&结束程序
def close_win(self):
# 关闭窗口
self.win.destroy()
# 终止程序 -- 线程也会死掉
sys.exit(0)
# 定义FTP软件窗口
class FtpExecute():
# 这个函数不做任何事,实际上让关闭按钮失效
def callback(self):
pass
# 窗口显示图标 -- 目的:准确目录
def standard_path(self, retative_path):
try:
base_path = sys._MEIPASS
except:
# 获取根目录
base_path = os.path.abspath('')
# base_path = self.gml + os.sep + 'logo' + os.sep
# 文件名称与路径拼接
return os.path.join(base_path, retative_path)
# 启动窗口
def launch_window(self, ftp):
# 初始化 ftp ip、ftp 端口、登录账号、登录密码
self.ftp = ftp
# 初始化窗口库
self.root = tk.Tk()
# 把窗口的 x 功能禁吊
# 窗体的通信协议方法
self.root.protocol('WM_DELETE_WINDOW', self.callback)
# # 把窗口隐藏
# self.root.withdraw()
# 软件标题
self.root.title('OTA一键下载工具_v2.0')
# 取消标题栏
# root.overrideredirect(True)
# 软件标题的图标
self.root.iconbitmap(self.standard_path("main.ico"))
# 初始化窗口大小
self.root.geometry('570x220')
# 最大化窗口大小
# self.root.maxsize(1080, 860)
# 窗口背景验证
self.root.configure(bg="pink")
tk.Label(self.root, text='请输入需要下载的项目(T8213、T8520【注:不能有空格】): ', font=('微软雅黑', 12), bg="pink").grid(row=0, column=0)
self.project_model = tk.Entry(self.root, text='', width=13, font=('微软雅黑', 9))
self.project_model.grid(row=0, column=1)
tk.Label(self.root, text='请输入需要下载的版本(1.0.0.6、0.0.9.7【注:不能有空格】): ', font=('微软雅黑', 12), bg="pink").grid(row=1, column=0)
self.download_edition = tk.Entry(self.root, text='', width=13, font=('微软雅黑', 9))
self.download_edition.grid(row=1, column=1)
tk.Label(self.root, text=r'请输入本地存放路径(D:\test C:\User 【注:不能有空格】): ', font=('微软雅黑', 12), bg="pink").grid(row=2, column=0)
self.local_path = tk.Entry(self.root, text='', width=13, font=('微软雅黑', 9))
self.local_path.grid(row=2, column=1)
self.but1 = tk.Button(self.root, text='一键下载', font=('微软雅黑', 12), width=8, height=1, command=lambda: self.download_info())
self.but1.grid(row=3, column=1)
# 只有通过这个退出按钮才可以退出程序 退出关闭窗口 结束主程序 随后线程自己死掉
self.but2 = tk.Button(self.root, text=" 退 出 ", font=('微软雅黑', 12), width=8, height=1, command=lambda: self.close_win())
self.but2.grid(row=8, column=1)
self.root.mainloop()
# 下载显示信息显示
def download_info(self):
# 定义 "项目编号" "版本号" "本地存放路径" "在末尾加在 \ 号变成新的本来存放路径" 的全局变量
global project_model, download_edition, local_path, new_local_path
# 将输入的项目 版本号 本地存放路径 数据 赋值给 对应的全局变量
project_model = self.project_model.get()
download_edition = self.download_edition.get()
local_path = self.local_path.get()
# 如果都不为空可以进行下一步
if project_model != '' and download_edition != '' and local_path != '':
# 显示下载信息
self.download = tk.Label(self.root, text='正在下载: ',
bg="pink", font=('微软雅黑', 12))
self.download.grid(row=4, column=0)
self.clock = tk.Entry(self.root, bg="pink", bd=0, width=50, font=('微软雅黑', 12))
self.clock.grid(row=5, column=0)
self.clock.insert(0, ' 加载中......')
'''
异步操作:
1.访问ftp服务器
2.登录ftp
3.通过输入的本地存放路径 得到 本地路径和远程路径
4.传入本地路径和远程路径 开始下载
5.下载成功后弹出提示框
6.下载完成后退出FTP
'''
Thread(target=self.start_download).start()
else:
# 判断设备版本是否为空
if project_model == '':
self.showinfo_window('项目编号不能为空')
else:
pass
# 判断基站版本是否为空
if download_edition == '':
self.showinfo_window('下载的版本不能为空')
else:
pass
# 判断输入的本地路径是否为空
if local_path == '':
self.showinfo_window('本地存放路径不能为空')
else:
pass
# 这个函数做两个事情,关闭窗口&结束程序
def close_win(self):
# 关闭窗口
self.root.destroy()
# 终止程序 -- 线程也会死掉
sys.exit(0)
'''
异步操作:
1.访问ftp服务器
2.登录ftp
3.通过输入的本地存放路径 得到 本地路径和远程路径
4.传入本地路径和远程路径 开始下载
5.下载成功后弹出提示框
6.下载完成后退出FTP
'''
def start_download(self):
# 关闭窗口
# self.root.destroy()
# 实例化ftp文件下载 & 登录 -- ftp ip地址 端口 账号 密码
# self.request_ftp(self.ftp_host, self.ftp_port, self.ftp_user, self.ftp_passwd)
# "在末尾加在 \ 号变成新的本来存放路径"
new_local_path = local_path + os.sep
# 通过输入的本地存放路径 得到 本地路径和远程路径
data = self.create_path(new_local_path)
# 传入本地路径和远程路径 下载
self.download_catalogue(data[0], data[1])
# 下载成功后弹出提示框
self.showinfo_window("下载完成")
# 退出FTP
self.quit_()
# 提示消息框
def showinfo_window(self, messages):
# 去掉tk弹框
tk.Tk().withdraw()
tk_box.showinfo(title="提示", message=messages)
# 错误弹框
def showerror_window(self, messages):
# 去掉tk弹框
tk.Tk().withdraw()
# 弹出错误提示框
tk_box.showerror(title="错误信息", message=messages)
# _______________________FTP______________________________
# 通过"项目型号“和”版本号“ 获取 ”需要下载的文件“ 和 ”ftp中项目存放路径“
def get_filename(self, projectModel, downloadEdition):
"""
projectModel: 项目编号
downloadEdition: 下载版本
"""
# ftp中项目存放路径
projectRoute = f"/TestVersion/{projectModel}/"
# 设置FTP当前操作的路径--进入该路径
self.ftp.cwd(projectRoute)
# 获取目录下的文件
list = self.ftp.nlst()
# print(f"版本数目:{len(list)}个。分别为:", list)
# 遍历目录
for file in list:
# 通过输入下载的版本号获取该版本号的文件夹
if file.endswith(downloadEdition):
# 将 "该文件夹名" 和 "ftp中项目存放路径" 返回备用
return [file, projectRoute]
# 创建路径 -- 本地存放路径
def create_path(self, local):
# 通过"项目型号“和”版本号“ 获取 ”需要下载的文件“ 和 ”ftp中项目存放路径“
# 所以downloadfile_info [需要下载的文件名, ftp中项目存放路径]
downloadfile_info = self.get_filename(project_model, download_edition)
# 创建路径 - 本地路径
localPath = local + downloadfile_info[0]
# 创建路径 - 远程路径
mstscPath = downloadfile_info[1] + downloadfile_info[0]
# print("本地地址:", local_path)
# print("远程地址:", mstsc_path)
# 返回 本地路径 和 远程路径
return [localPath, mstscPath]
# 下载单个文件 下载文件到Local_file 被下载文件mstsc_file
def download_file(self, local_file, mstsc_file):
print('开始%s' % mstsc_file)
# #改变弹框中的值前 先删除
self.clock.delete(0, tk.END)
# 改变弹框中的值
self.clock.insert(0, ' ' + mstsc_file)
# start = time.time()
# 以二进制的方式打开local_file
file = open(local_file, 'wb')
# ftp命令: RETR<filename> 从服务器上找回(复制)文件
# 接收服务器上文件并写入本地文件
self.ftp.retrbinary(cmd='RETR ' + mstsc_file, callback=file.write)
# 完成如上操作后关闭打开的file文件
file.close()
print('结束%s' % mstsc_file)
# print('%s下载完成,耗时%.2f秒' % (mstsc_file, time.time()-start))
# 下载整个目录 local_dir(本地目录), mstsc_dir(远程目录)
'''
只要是目录:
1.更新本地路径和远程路径
2.检测本来是否有该路径---没有就创建
3.将远程路径切到更新后的远程路径中
不是路径
直接下载
'''
def download_catalogue(self, local_dir, mstsc_dir):
# 判断本地目录不存在
if not os.path.exists(local_dir):
# 创建本地目录
os.makedirs(local_dir)
# 进入远程操作目录 - 下载准备工作
self.ftp.cwd(mstsc_dir)
# 获取远程目录下的文件 - 准备下载
remote_files = self.ftp.nlst()
# print("远程文件目录:", remote_files)
# 遍历远程目录下的文件
for file in remote_files:
# 将 远程路径 与 远程目录下的文件下遍历的文件 进行路径拼接 -- new远程路径
Mstsc = mstsc_dir + '/' + file
# Mstsc_dir = mstsc_dir + "/MCU/T8520_APP_V1.0.1.0.bin" # 不是目录实验
# # 将 本地路径 与 远程目录下的文件下遍历的文件 进行路径拼接 -- new本地路径
Local = os.path.join(local_dir, file)
# print("正在下载", self.ftp.nlst(file))
# Info_box(self.ftp.nlst(file))
try:
# 如果能正常进入--代表是文件夹
self.ftp.cwd(Mstsc)
# 返回出来,上面进入是测试是否是文件夹
self.ftp.cwd("..")
# 重新调用该方法(递归) -- 把new的本地路径和new的远程路径传过去
self.download_catalogue(Local, Mstsc)
except:
# 上面报错代表不是文件夹,# 是固件包 -- 直接下载
self.download_file(Local, file)
# # 异步处理
# t = Thread(target=self.download_info, args=(Local, file, ))
# t.start()
# 一个目录下载完后返回上个目录 ???? - 执行完后这个语句不是结束了从执行效果看反复调用此方法
self.ftp.cwd('..')
# 关闭ftp连接
def close_(self):
self.ftp.close()
# 退出ftp
def quit_(self):
self.ftp.quit() | PypiClean |
/Editra-0.7.20.tar.gz/Editra-0.7.20/scripts/i18n/gen_lang.sh |
##############################################################################
# Variables
##############################################################################
ARG=$1
IMPORT_DIR=$2
##############################################################################
# Function: print_help
# Purpose: Print the scripts usage help to the console
##############################################################################
print_help () {
echo
echo "Usage: $0 [-h|-mo|-po|-all|-app]"
echo " -h Print this help message"
echo " -mo Generate mo files and install them in the locale directory"
echo " -po Generate new po files from the project source"
echo " -all Regenerate everything"
echo " -app Only regenerate the file list"
echo " -lp <path> Import translations from Launchpad export"
echo
}
##############################################################################
# Function: get_appfile
# Purpose: Generate the app file
##############################################################################
gen_appfile () {
OUTPUT="$(pwd)/app.fil"
BASE="../.."
PLUGINS="$BASE/plugins"
# Remove current file
rm app.fil
# Start searching for files
DIRS=("$BASE/" "$BASE/src/" "$BASE/src/eclib/" "$BASE/src/syntax/"
"$PLUGINS/" "$PLUGINS/codebrowser/codebrowser/"
"$PLUGINS/filebrowser/filebrowser/" "$PLUGINS/Launch/launch/"
"$PLUGINS/PyShell/PyShell/" )
# TODO: why does this not give the right number?
#DIRNUM=${#DIRS}
for ((i=0; i < 9; i++)); do
DIR=${DIRS[${i}]}
for FNAME in $(ls $DIR); do
if ! [ -z `echo $FNAME | grep "^.*\.py$"` ]; then
if [ -a "$DIR$FNAME" ]; then
echo "Found: $DIR$FNAME"
echo "$DIR$FNAME" >> $OUTPUT
fi
fi
done
done
}
##############################################################################
# Function: import_lp_files
# Purpose: Copy exported launchpad files to here and rename
##############################################################################
import_lp_files() {
python getlpfiles.py $IMPORT_DIR
}
##############################################################################
# Function: gen_flist
# Purpose: Generate the list of files to create the po files from
##############################################################################
gen_flist() {
python mkflist.py
}
##############################################################################
# Function: gen_po
# Purpose: Generate new po files from the source
##############################################################################
gen_po () {
python mki18n.py -pv --domain=Editra
# Copy all .new files to override the originals
for fname in $(ls); do
if ! [ -z $(echo $fname | grep '.*\.new') ]; then
name=$(echo $fname | sed 's/.new//')
mv $fname $name
fi
done
}
##############################################################################
# Function: make_mo
# Purpose: Make mo files and place them in the appropriate locale directory
##############################################################################
make_mo () {
python mki18n.py -mv --domain=Editra --moTarget=../../locale
}
##############################################################################
# Main
##############################################################################
if [ "$ARG" = "-po" ]
then
gen_appfile
gen_po
exit 0
elif [ "$ARG" = "-mo" ]
then
make_mo
exit 0
elif [ "$ARG" = "-all" ]
then
gen_appfile
gen_po
make_mo
exit 0
elif [ "$ARG" = "-app" ]
then
gen_appfile
exit 0
elif [ "$ARG" = "-lp" ]
then
import_lp_files
exit 0
else
print_help
fi | PypiClean |
/FIXation-0.0.4.tar.gz/FIXation-0.0.4/fixation/models.py |
class Entry:
def __init__(self):
self.attrib = {}
class Message(Entry):
def parse_value(self, tag, text):
if tag == 'NotReqXML':
text = bool(int(text))
elif tag == 'ComponentID':
text = int(text)
setattr(self, tag, text)
def pretty_name(self):
return self.Name
def pretty_type(self):
return self.MsgType
def __repr__(self):
return "<Message %s>" % self.pretty_name()
class MsgContent(Entry):
def parse_value(self, tag, text):
if tag == 'Reqd':
text = bool(int(text))
elif tag == 'ComponentID':
text = int(text)
setattr(self, tag, text)
def pretty_name(self):
return self.TagText
def pretty_type(self):
if self.TagText.isdigit():
return self.TagText
else:
return "Component"
def __repr__(self):
return '<MsgContent %s>' % self.pretty_name()
class Component(Entry):
def parse_value(self, tag, text):
if tag == 'NotReqXML':
text = bool(int(text))
elif tag == 'ComponentID':
text = int(text)
setattr(self, tag, text)
def pretty_name(self):
return self.Name
def pretty_type(self):
return "Component"
def __repr__(self):
return '<Component %s>' % self.pretty_name()
class Field(Entry):
def parse_value(self, tag, text):
if tag == 'NotReqXML':
text = bool(int(text))
setattr(self, tag, text)
@property
def FieldName(self):
return self.Name
@property
def TagText(self):
return self.Tag
def pretty_name(self):
return self.Name
def pretty_type(self):
return self.Tag
def __repr__(self):
return '<Field %s>' % self.Name
class Enum(Entry):
def parse_value(self, tag, text):
setattr(self, tag, text)
def pretty_name(self):
return self.SymbolicName
def __repr__(self):
return '<Enum %s>' % self.SymbolicName
def get_id(target):
if isinstance(target, Message):
return target.MsgType
elif isinstance(target, Field):
return target.Tag
elif isinstance(target, Component):
return target.Name
elif isinstance(target, MsgContent):
return target.TagText
raise ValueError('No support for: ' + str(type(target))) | PypiClean |
/MezzanineFor1.7-3.1.10.tar.gz/MezzanineFor1.7-3.1.10/mezzanine/pages/migrations/south/0013_auto__add_field_page_in_sitemap.py | import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.in_sitemap'
db.add_column('pages_page', 'in_sitemap',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Page.in_sitemap'
db.delete_column('pages_page', 'in_sitemap')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'generic.assignedkeyword': {
'Meta': {'ordering': "('_order',)", 'object_name': 'AssignedKeyword'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': "orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'pages.link': {
'Meta': {'ordering': "('_order',)", 'object_name': 'Link', '_ormbases': ['pages.Page']},
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
'pages.page': {
'Meta': {'ordering': "('titles',)", 'object_name': 'Page'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '[1, 2, 3]', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
#'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': "orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'})
},
'pages.richtextpage': {
'Meta': {'ordering': "('_order',)", 'object_name': 'RichTextPage', '_ormbases': ['pages.Page']},
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['pages'] | PypiClean |
/Electrum-VTC-2.9.3.3.tar.gz/Electrum-VTC-2.9.3.3/gui/vtc/main_window.py |
import sys, time, threading
import os, json, traceback
import shutil
import socket
import weakref
import webbrowser
import csv
from decimal import Decimal
import base64
from functools import partial
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
import icons_rc
from electrum_vtc import keystore
from electrum_vtc.bitcoin import COIN, is_valid, TYPE_ADDRESS
from electrum_vtc.plugins import run_hook
from electrum_vtc.i18n import _
from electrum_vtc.util import (format_time, format_satoshis, PrintError,
format_satoshis_plain, NotEnoughFunds,
UserCancelled)
from electrum_vtc import Transaction, mnemonic
from electrum_vtc import util, bitcoin, commands, coinchooser
from electrum_vtc import SimpleConfig, paymentrequest
from electrum_vtc.wallet import Wallet, Multisig_Wallet
try:
from electrum_vtc.plot import plot_history
except:
plot_history = None
from amountedit import AmountEdit, BTCAmountEdit, MyLineEdit, BTCkBEdit
from qrcodewidget import QRCodeWidget, QRDialog
from qrtextedit import ShowQRTextEdit
from transaction_dialog import show_transaction
from fee_slider import FeeSlider
from vtctabwidget import VtcTabWidget
from electrum_vtc import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(self.onPress)
self.func = func
self.setIconSize(QSize(25,25))
def onPress(self, checked=False):
'''Drops the unwanted PyQt4 "checked" argument'''
self.func()
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
self.func()
from electrum_vtc.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
def __init__(self, gui_object, wallet):
QMainWindow.__init__(self)
self.gui_object = gui_object
self.config = config = gui_object.config
self.network = gui_object.daemon.network
self.fx = gui_object.daemon.fx
self.invoices = wallet.invoices
self.contacts = wallet.contacts
self.tray = gui_object.tray
self.app = gui_object.app
self.cleaned_up = False
self.is_max = False
self.payment_request = None
self.checking_accounts = False
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
self.require_fee_update = False
self.tx_notifications = []
self.tl_windows = []
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
self.completions = QStringListModel()
self.tabs = tabs = VtcTabWidget(self)
self.send_tab = self.create_send_tab()
self.receive_tab = self.create_receive_tab()
self.addresses_tab = self.create_addresses_tab()
self.utxo_tab = self.create_utxo_tab()
self.console_tab = self.create_console_tab()
self.contacts_tab = self.create_contacts_tab()
tabs.addTab(self.create_history_tab(), QIcon(":icons/vertcoin.png"), _('History'))
tabs.addTab(self.send_tab, QIcon(":icons/send.png"), _('Send'))
tabs.addTab(self.receive_tab, QIcon(":icons/receive.png"), _('Receive'))
def add_optional_tab(tabs, tab, icon, description, name):
tab.tab_icon = icon
tab.tab_description = description
tab.tab_pos = len(tabs)
tab.tab_name = name
if self.config.get('show_{}_tab'.format(name), False):
tabs.addTab(tab, icon, description.replace("&", ""))
add_optional_tab(tabs, self.addresses_tab, QIcon(":icons/address-book.png"), _("&Addresses"), "addresses")
add_optional_tab(tabs, self.utxo_tab, QIcon(":icons/tx_output.png"), _("Co&ins"), "utxo")
add_optional_tab(tabs, self.contacts_tab, QIcon(":icons/address-book.png"), _("Con&tacts"), "contacts")
add_optional_tab(tabs, self.console_tab, QIcon(":icons/debugwindow.png"), _("Con&sole"), "console")
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum-vtc.png"))
self.init_menubar()
wrtabs = weakref.proxy(tabs)
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() - 1)%wrtabs.count()))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() + 1)%wrtabs.count()))
for i in range(wrtabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: wrtabs.setCurrentIndex(i))
self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok)
self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.connect(self, QtCore.SIGNAL('network'), self.on_network_qt)
interests = ['updated', 'new_transaction', 'status',
'banner', 'verified', 'fee']
# To avoid leaking references to "self" that prevent the
# window from being GC-ed when closed, callbacks should be
# methods of this class only, and specifically not be
# partials, lambdas or methods of subobjects. Hence...
self.network.register_callback(self.on_network, interests)
# set initial message
self.console.showMessage(self.network.banner)
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
self.connect(self, SIGNAL('new_fx_quotes'), self.on_fx_quotes)
self.connect(self, SIGNAL('new_fx_history'), self.on_fx_history)
# update fee slider in case we missed the callback
self.fee_slider.update()
self.load_wallet(wallet)
self.connect_slots(gui_object.timer)
self.fetch_alias()
def set_tab_property(self, w):
w.setProperty('tab', QVariant(True))
return w
def on_history(self, b):
self.emit(SIGNAL('new_fx_history'))
def on_fx_history(self):
self.history_list.refresh_headers()
self.history_list.update()
def on_quotes(self, b):
self.emit(SIGNAL('new_fx_quotes'))
def on_fx_quotes(self):
self.update_status()
# Refresh edits with the new rate
edit = self.fiat_send_e if self.fiat_send_e.is_last_edited else self.amount_e
edit.textEdited.emit(edit.text())
edit = self.fiat_receive_e if self.fiat_receive_e.is_last_edited else self.receive_amount_e
edit.textEdited.emit(edit.text())
# History tab needs updating if it used spot
if self.fx.history_used_spot:
self.history_list.update()
def toggle_tab(self, tab):
show = not self.config.get('show_{}_tab'.format(tab.tab_name), False)
self.config.set_key('show_{}_tab'.format(tab.tab_name), show)
item_text = (_("Hide") if show else _("Show")) + " " + tab.tab_description
tab.menu_action.setText(item_text)
if show:
# Find out where to place the tab
index = len(self.tabs)
for i in range(len(self.tabs)):
try:
if tab.tab_pos < self.tabs.widget(i).tab_pos:
index = i
break
except AttributeError:
pass
self.tabs.insertTab(index, tab, tab.tab_icon, tab.tab_description.replace("&", ""))
else:
i = self.tabs.indexOf(tab)
self.tabs.removeTab(i)
def push_top_level_window(self, window):
'''Used for e.g. tx dialog box to ensure new dialogs are appropriately
parented. This used to be done by explicitly providing the parent
window, but that isn't something hardware wallet prompts know.'''
self.tl_windows.append(window)
def pop_top_level_window(self, window):
self.tl_windows.remove(window)
def top_level_window(self):
'''Do the right thing in the presence of tx dialog windows'''
override = self.tl_windows[-1] if self.tl_windows else None
return self.top_level_window_recurse(override)
def diagnostic_name(self):
return "%s/%s" % (PrintError.diagnostic_name(self),
self.wallet.basename() if self.wallet else "None")
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def on_error(self, exc_info):
if not isinstance(exc_info[1], UserCancelled):
traceback.print_exception(*exc_info)
self.show_error(str(exc_info[1]))
def on_network(self, event, *args):
if event == 'updated':
self.need_update.set()
self.emit(QtCore.SIGNAL('updated'), event, *args)
elif event == 'new_transaction':
self.tx_notifications.append(args[0])
elif event in ['status', 'banner', 'verified', 'fee']:
# Handle in GUI thread
self.emit(QtCore.SIGNAL('network'), event, *args)
else:
self.print_error("unexpected network message:", event, args)
def on_network_qt(self, event, *args):
# Handle a network message in the GUI thread
if event == 'status':
self.update_status()
elif event == 'banner':
self.console.showMessage(args[0])
elif event == 'verified':
self.history_list.update_item(*args)
elif event == 'fee':
if self.config.is_dynfee():
self.fee_slider.update()
self.do_update_fee()
else:
self.print_error("unexpected network_qt signal:", event, args)
def fetch_alias(self):
self.alias_info = None
alias = self.config.get('alias')
if alias:
alias = str(alias)
def f():
self.alias_info = self.contacts.resolve_openalias(alias)
self.emit(SIGNAL('alias_received'))
t = threading.Thread(target=f)
t.setDaemon(True)
t.start()
def close_wallet(self):
if self.wallet:
self.print_error('close_wallet', self.wallet.storage.path)
run_hook('close_wallet', self.wallet)
def load_wallet(self, wallet):
wallet.thread = TaskThread(self, self.on_error)
self.wallet = wallet
self.update_recently_visited(wallet.storage.path)
# address used to create a dummy transaction and estimate transaction fee
self.history_list.update()
self.address_list.update()
self.utxo_list.update()
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
# update menus
self.seed_menu.setEnabled(self.wallet.has_seed())
self.mpk_menu.setEnabled(self.wallet.is_deterministic())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.request_list.update()
self.tabs.show()
self.init_geometry()
if self.config.get('hide_gui') and self.gui_object.tray.isVisible():
self.hide()
else:
self.show()
self.watching_only_changed()
run_hook('load_wallet', wallet, self)
def init_geometry(self):
winpos = self.wallet.storage.get("winpos-qt")
try:
screen = self.app.desktop().screenGeometry()
assert screen.contains(QRect(*winpos))
self.setGeometry(*winpos)
except:
self.print_error("using default geometry")
self.setGeometry(100, 100, 840, 640)
def watching_only_changed(self):
title = 'Electrum-VTC %s - %s' % (self.wallet.electrum_version,
self.wallet.basename().decode('utf8'))
extra = [self.wallet.storage.get('wallet_type', '?')]
if self.wallet.is_watching_only():
self.warn_if_watching_only()
extra.append(_('watching only'))
title += ' [%s]'% ', '.join(extra)
self.setWindowTitle(title)
self.password_menu.setEnabled(self.wallet.can_change_password())
self.import_privkey_menu.setVisible(self.wallet.can_import_privkey())
self.import_address_menu.setVisible(self.wallet.can_import_address())
self.export_menu.setEnabled(self.wallet.can_export())
def warn_if_watching_only(self):
if self.wallet.is_watching_only():
msg = ' '.join([
_("This wallet is watching-only."),
_("This means you will not be able to spend vertcoins with it."),
_("Make sure you own the seed phrase or the private keys, before you request vertcoins to be sent to this wallet.")
])
self.show_warning(msg, title=_('Information'))
def open_wallet(self):
wallet_folder = self.get_wallet_folder()
filename = unicode(QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder))
if not filename:
return
self.gui_object.new_window(filename)
def backup_wallet(self):
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
self.show_message(_("A copy of your wallet file was created in")+" '%s'" % str(new_path), title=_("Wallet backup created"))
except (IOError, os.error), reason:
self.show_critical(_("Electrum was unable to copy your wallet file to the specified location.") + "\n" + str(reason), title=_("Unable to create backup"))
def update_recently_visited(self, filename):
filename = filename.decode('utf8')
recent = self.config.get('recently_open', [])
try:
sorted(recent)
except:
recent = []
if filename in recent:
recent.remove(filename)
recent.insert(0, filename)
recent = recent[:5]
self.config.set_key('recently_open', recent)
self.recently_visited_menu.clear()
for i, k in enumerate(sorted(recent)):
b = os.path.basename(k)
def loader(k):
return lambda: self.gui_object.new_window(k.encode('utf8'))
self.recently_visited_menu.addAction(b, loader(k)).setShortcut(QKeySequence("Ctrl+%d"%(i+1)))
self.recently_visited_menu.setEnabled(len(recent))
def get_wallet_folder(self):
return os.path.dirname(os.path.abspath(self.config.get_wallet_path()))
def new_wallet(self):
wallet_folder = self.get_wallet_folder()
i = 1
while True:
filename = "wallet_%d" % i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
full_path = os.path.join(wallet_folder, filename)
self.gui_object.start_new_window(full_path, None)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
self.recently_visited_menu = file_menu.addMenu(_("&Recently open"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addSeparator()
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_privkey_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
self.import_address_menu = wallet_menu.addAction(_("Import addresses"), self.import_addresses)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
contacts_menu = wallet_menu.addMenu(_("Contacts"))
contacts_menu.addAction(_("&New"), self.new_contact_dialog)
contacts_menu.addAction(_("Import"), lambda: self.contact_list.import_contacts())
invoices_menu = wallet_menu.addMenu(_("Invoices"))
invoices_menu.addAction(_("Import"), lambda: self.invoice_list.import_invoices())
hist_menu = wallet_menu.addMenu(_("&History"))
hist_menu.addAction("Plot", self.plot_history_dialog).setEnabled(plot_history is not None)
hist_menu.addAction("Export", self.export_history_dialog)
wallet_menu.addSeparator()
wallet_menu.addAction(_("Find"), self.toggle_search).setShortcut(QKeySequence("Ctrl+F"))
def add_toggle_action(view_menu, tab):
is_shown = self.config.get('show_{}_tab'.format(tab.tab_name), False)
item_name = (_("Hide") if is_shown else _("Show")) + " " + tab.tab_description
tab.menu_action = view_menu.addAction(item_name, lambda: self.toggle_tab(tab))
view_menu = menubar.addMenu(_("&View"))
add_toggle_action(view_menu, self.addresses_tab)
add_toggle_action(view_menu, self.utxo_tab)
add_toggle_action(view_menu, self.contacts_tab)
add_toggle_action(view_menu, self.console_tab)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), lambda: self.gui_object.show_network_dialog(self))
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
run_hook('init_menubar_tools', self, tools_menu)
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("https://vertcoin.org"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://docs.electrum.org/")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
help_menu.addSeparator()
help_menu.addAction(_("&Donate to server"), self.donate_to_server)
self.setMenuBar(menubar)
def donate_to_server(self):
d = self.network.get_donation_address()
if d:
host = self.network.get_parameters()[0]
self.pay_to_URI('vertcoin:%s?message=donation for %s'%(d, host))
else:
self.show_error(_('No donation address for this server'))
def show_about(self):
QMessageBox.about(self, "Electrum-VTC",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" +
_("Electrum's focus is speed, with low resource usage and simplifying Vertcoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the Vertcoin system."))
def show_report_bug(self):
msg = ' '.join([
_("Please report any bugs as issues on github:<br/>"),
"<a href=\"https://github.com/vertcoin/electrum-vtc/issues\">https://github.com/vertcoin/electrum-vtc/issues</a><br/><br/>",
_("Before reporting a bug, upgrade to the most recent version of Electrum (latest release or git HEAD), and include the version number in your report."),
_("Try to explain not only what the bug is, but how it occurs.")
])
self.show_message(msg, title="Electrum-VTC - " + _("Reporting Bugs"))
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
self.print_error("Notifying GUI")
if len(self.tx_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.tx_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.tx_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount_and_units(total_amount)})
self.tx_notifications = []
else:
for tx in self.tx_notifications:
if tx:
self.tx_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s") % { 'amount' : self.format_amount_and_units(v)})
def notify(self, message):
if self.tray:
self.tray.showMessage("Electrum-VTC", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
def timer_actions(self):
# Note this runs in the GUI thread
if self.need_update.is_set():
self.need_update.clear()
self.update_wallet()
# resolve aliases
self.payto_e.resolve()
# update fee
if self.require_fee_update:
self.do_update_fee()
self.require_fee_update = False
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def format_amount_and_units(self, amount):
text = self.format_amount(amount) + ' '+ self.base_unit()
x = self.fx.format_amount_and_units(amount)
if text and x:
text += ' (%s)'%x
return text
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
assert self.decimal_point in [2, 5, 8]
if self.decimal_point == 2:
return 'bits'
if self.decimal_point == 5:
return 'mVTC'
if self.decimal_point == 8:
return 'VTC'
raise Exception('Unknown base unit')
def connect_fields(self, window, btc_e, fiat_e, fee_e):
def edit_changed(edit):
if edit.follows:
return
edit.setStyleSheet(BLACK_FG)
fiat_e.is_last_edited = (edit == fiat_e)
amount = edit.get_amount()
rate = self.fx.exchange_rate() if self.fx else None
if rate is None or amount is None:
if edit is fiat_e:
btc_e.setText("")
if fee_e:
fee_e.setText("")
else:
fiat_e.setText("")
else:
if edit is fiat_e:
btc_e.follows = True
btc_e.setAmount(int(amount / Decimal(rate) * COIN))
btc_e.setStyleSheet(BLUE_FG)
btc_e.follows = False
if fee_e:
window.update_fee()
else:
fiat_e.follows = True
fiat_e.setText(self.fx.ccy_amount_str(
amount * Decimal(rate) / COIN, False))
fiat_e.setStyleSheet(BLUE_FG)
fiat_e.follows = False
btc_e.follows = False
fiat_e.follows = False
fiat_e.textChanged.connect(partial(edit_changed, fiat_e))
btc_e.textChanged.connect(partial(edit_changed, btc_e))
fiat_e.is_last_edited = False
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
server_height = self.network.get_server_height()
server_lag = self.network.get_local_height() - server_height
# Server height can be 0 after switching to a new server
# until we get a headers subscription request response.
# Display the synchronizing message in that case.
if not self.wallet.up_to_date or server_height == 0:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging (%d blocks)"%server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u, x = self.wallet.get_balance()
text = _("Balance" ) + ": %s "%(self.format_amount_and_units(c))
if u:
text += " [%s unconfirmed]"%(self.format_amount(u, True).strip())
if x:
text += " [%s unmatured]"%(self.format_amount(x, True).strip())
# append fiat balance and price
if self.fx.is_enabled():
text += self.fx.get_fiat_status_text(c + u + x,
self.base_unit(), self.get_decimal_point()) or ''
if not self.network.proxy:
icon = QIcon(":icons/status_connected.png")
else:
icon = QIcon(":icons/status_connected_proxy.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename().decode('utf8')))
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.history_list.update()
self.request_list.update()
self.address_list.update()
self.utxo_list.update()
self.contact_list.update()
self.invoice_list.update()
self.update_completions()
def create_history_tab(self):
from history_list import HistoryList
self.history_list = l = HistoryList(self)
l.searchable_list = l
return self.set_tab_property(l)
def show_address(self, addr):
import address_dialog
d = address_dialog.AddressDialog(self, addr)
d.exec_()
def show_transaction(self, tx, tx_desc = None):
'''tx_desc is set only for txs created in the Send tab'''
show_transaction(tx, self, tx_desc)
def create_receive_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.receive_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
msg = _('Vertcoin address where the payment should be received. Note that each payment request uses a different Vertcoin address.')
self.receive_address_label = HelpLabel(_('Receiving address'), msg)
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, -1)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, -1)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.fiat_receive_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_receive_e.setVisible(False)
grid.addWidget(self.fiat_receive_e, 2, 2, Qt.AlignLeft)
self.connect_fields(self, self.receive_amount_e, self.fiat_receive_e, None)
self.expires_combo = QComboBox()
self.expires_combo.addItems(map(lambda x:x[0], expiration_values))
self.expires_combo.setCurrentIndex(3)
self.expires_combo.setFixedWidth(self.receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them a signed payment request.'),
_('Expired requests have to be deleted manually from your list, in order to free the corresponding Vertcoin addresses.'),
_('The Vertcoin address never expires and will always be part of this Electrum wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.setProperty("primary", QVariant(True))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
self.receive_requests_label = QLabel(_('Requests'))
from request_list import RequestList
self.request_list = RequestList(self)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self.receive_qr)
w = QWidget()
w.searchable_list = self.request_list
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.request_list)
vbox.setStretchFactor(self.request_list, 1000)
return self.set_tab_property(w)
def delete_payment_request(self, addr):
self.wallet.remove_payment_request(addr, self.config)
self.request_list.update()
self.clear_receive_tab()
def get_request_URI(self, addr):
req = self.wallet.receive_requests[addr]
message = self.wallet.labels.get(addr, '')
amount = req['amount']
URI = util.create_URI(addr, amount, message)
if req.get('time'):
URI += "&time=%d"%req.get('time')
if req.get('exp'):
URI += "&exp=%d"%req.get('exp')
if req.get('name') and req.get('sig'):
sig = req.get('sig').decode('hex')
sig = bitcoin.base_encode(sig, base=58)
URI += "&name=" + req['name'] + "&sig="+sig
return str(URI)
def sign_payment_request(self, addr):
alias = self.config.get('alias')
alias_privkey = None
if alias and self.alias_info:
alias_addr, alias_name, validated = self.alias_info
if alias_addr:
if self.wallet.is_mine(alias_addr):
msg = _('This payment request will be signed.') + '\n' + _('Please enter your password')
password = self.password_dialog(msg)
if password:
try:
self.wallet.sign_payment_request(addr, alias, alias_addr, password)
except Exception as e:
self.show_error(str(e))
return
else:
return
else:
return
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
if not message and not amount:
self.show_error(_('No message or amount'))
return False
i = self.expires_combo.currentIndex()
expiration = map(lambda x: x[1], expiration_values)[i]
req = self.wallet.make_payment_request(addr, amount, message, expiration)
self.wallet.add_payment_request(req, self.config)
self.sign_payment_request(addr)
self.request_list.update()
self.address_list.update()
self.save_request_button.setEnabled(False)
def view_and_paste(self, title, msg, data):
dialog = WindowModalDialog(self, title)
vbox = QVBoxLayout()
label = QLabel(msg)
label.setWordWrap(True)
vbox.addWidget(label)
pr_e = ShowQRTextEdit(text=data)
vbox.addWidget(pr_e)
vbox.addLayout(Buttons(CopyCloseButton(pr_e.text, self.app, dialog)))
dialog.setLayout(vbox)
dialog.exec_()
def export_payment_request(self, addr):
r = self.wallet.receive_requests.get(addr)
pr = paymentrequest.serialize_request(r).SerializeToString()
name = r['id'] + '.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(str(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def new_payment_request(self):
addr = self.wallet.get_unused_address()
if addr is None:
from electrum_vtc.wallet import Imported_Wallet
if not self.wallet.is_deterministic():
msg = [
_('No more addresses in your wallet.'),
_('You are using a non-deterministic wallet, which cannot create new addresses.'),
_('If you want to create new addresses, use a deterministic wallet instead.')
]
self.show_message(' '.join(msg))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
addr = self.wallet.get_receiving_address()
if addr:
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def show_send_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.send_tab))
def show_receive_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.receive_tab))
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.show_receive_tab()
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text()).encode('utf8')
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_URI(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def create_send_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
from paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n'\
+ _('You may enter a Vertcoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a Vertcoin address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, -1)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, -1)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, -1)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1)
self.fiat_send_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_send_e.setVisible(False)
grid.addWidget(self.fiat_send_e, 4, 2)
self.amount_e.frozen.connect(
lambda: self.fiat_send_e.setFrozen(self.amount_e.isReadOnly()))
self.max_button = EnterButton(_("Max"), self.spend_max)
self.max_button.setFixedWidth(140)
grid.addWidget(self.max_button, 4, 3)
hbox = QHBoxLayout()
hbox.addStretch(1)
grid.addLayout(hbox, 4, 4)
msg = _('Vertcoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
def fee_cb(dyn, pos, fee_rate):
if dyn:
self.config.set_key('fee_level', pos, False)
else:
self.config.set_key('fee_per_kb', fee_rate, False)
self.spend_max() if self.is_max else self.update_fee()
self.fee_slider = FeeSlider(self, self.config, fee_cb)
self.fee_slider.setFixedWidth(140)
self.fee_e = BTCAmountEdit(self.get_decimal_point)
if not self.config.get('show_fee', False):
self.fee_e.setVisible(False)
self.fee_e.textEdited.connect(self.update_fee)
# This is so that when the user blanks the fee and moves on,
# we go back to auto-calculate mode and put a fee back.
self.fee_e.editingFinished.connect(self.update_fee)
self.connect_fields(self, self.amount_e, self.fiat_send_e, self.fee_e)
self.rbf_checkbox = QCheckBox(_('Replaceable'))
msg = [_('If you check this box, your transaction will be marked as non-final,'),
_('and you will have the possiblity, while it is unconfirmed, to replace it with a transaction that pays a higher fee.'),
_('Note that some merchants do not accept non-final transactions until they are confirmed.')]
self.rbf_checkbox.setToolTip('<p>' + ' '.join(msg) + '</p>')
self.rbf_checkbox.setVisible(False)
grid.addWidget(self.fee_e_label, 5, 0)
grid.addWidget(self.fee_slider, 5, 1)
grid.addWidget(self.fee_e, 5, 2)
grid.addWidget(self.rbf_checkbox, 5, 3)
self.preview_button = EnterButton(_("Preview"), self.do_preview)
self.preview_button.setToolTip(_('Display the details of your transactions before signing it.'))
self.send_button = EnterButton(_("Send"), self.do_send)
self.send_button.setProperty("primary", QVariant(True))
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.clear_button)
buttons.addWidget(self.preview_button)
buttons.addWidget(self.send_button)
grid.addLayout(buttons, 6, 1, 1, 3)
self.amount_e.shortcut.connect(self.spend_max)
self.payto_e.textChanged.connect(self.update_fee)
self.amount_e.textEdited.connect(self.update_fee)
def reset_max(t):
self.is_max = False
self.max_button.setEnabled(not bool(t))
self.amount_e.textEdited.connect(reset_max)
self.fiat_send_e.textEdited.connect(reset_max)
def entry_changed():
text = ""
if self.not_enough_funds:
amt_color, fee_color = RED_FG, RED_FG
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
elif self.fee_e.isModified():
amt_color, fee_color = BLACK_FG, BLACK_FG
elif self.amount_e.isModified():
amt_color, fee_color = BLACK_FG, BLUE_FG
else:
amt_color, fee_color = BLUE_FG, BLUE_FG
self.statusBar().showMessage(text)
self.amount_e.setStyleSheet(amt_color)
self.fee_e.setStyleSheet(fee_color)
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
from invoice_list import InvoiceList
self.invoice_list = InvoiceList(self)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoice_list)
vbox.setStretchFactor(self.invoice_list, 1000)
w.searchable_list = self.invoice_list
run_hook('create_send_tab', grid)
return self.set_tab_property(w)
def spend_max(self):
self.is_max = True
self.do_update_fee()
def update_fee(self):
self.require_fee_update = True
def get_payto_or_dummy(self):
r = self.payto_e.get_recipient()
if r:
return r
return (TYPE_ADDRESS, self.wallet.dummy_address())
def do_update_fee(self):
'''Recalculate the fee. If the fee was manually input, retain it, but
still build the TX to see if there are enough funds.
'''
if not self.config.get('offline') and self.config.is_dynfee() and not self.config.has_fee_estimates():
self.statusBar().showMessage(_('Waiting for fee estimates...'))
return False
freeze_fee = (self.fee_e.isModified()
and (self.fee_e.text() or self.fee_e.hasFocus()))
amount = '!' if self.is_max else self.amount_e.get_amount()
if amount is None:
if not freeze_fee:
self.fee_e.setAmount(None)
self.not_enough_funds = False
self.statusBar().showMessage('')
else:
fee = self.fee_e.get_amount() if freeze_fee else None
outputs = self.payto_e.get_outputs(self.is_max)
if not outputs:
_type, addr = self.get_payto_or_dummy()
outputs = [(_type, addr, amount)]
try:
tx = self.wallet.make_unsigned_transaction(self.get_coins(), outputs, self.config, fee)
self.not_enough_funds = False
except NotEnoughFunds:
self.not_enough_funds = True
if not freeze_fee:
self.fee_e.setAmount(None)
return
except BaseException:
return
if not freeze_fee:
fee = None if self.not_enough_funds else tx.get_fee()
self.fee_e.setAmount(fee)
if self.is_max:
amount = tx.output_value()
self.amount_e.setAmount(amount)
if fee is None:
return
rbf_policy = self.config.get('rbf_policy', 2)
if rbf_policy == 0:
b = True
elif rbf_policy == 1:
fee_rate = fee * 1000 / tx.estimated_size()
try:
c = self.config.reverse_dynfee(fee_rate)
b = c in [-1, 25]
except:
b = False
elif rbf_policy == 2:
b = False
self.rbf_checkbox.setVisible(b)
self.rbf_checkbox.setChecked(b)
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
self.update_fee()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, coins):
self.pay_from = coins
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:10] + '...' + h[-10:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, label = self.contacts.get(key)
return label + ' <' + key + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
'''Password request wrapper. The password is passed to the function
as the 'password' named argument. "None" indicates either an
unencrypted wallet, or the user cancelled the password request.
An empty input is passed as the empty string.'''
def request_password(self, *args, **kwargs):
parent = self.top_level_window()
password = None
while self.wallet.has_password():
password = self.password_dialog(parent=parent)
if password is None:
# User cancelled password input
return
try:
self.wallet.check_password(password)
break
except Exception as e:
self.show_error(str(e), parent=parent)
continue
kwargs['password'] = password
return func(self, *args, **kwargs)
return request_password
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
self.show_error(_('Payment request has expired'))
return
label = unicode( self.message_e.text() )
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs(self.is_max)
if self.payto_e.is_alias and self.payto_e.validated is False:
alias = self.payto_e.toPlainText()
msg = _('WARNING: the alias "%s" could not be validated via an additional security check, DNSSEC, and thus may not be correct.'%alias) + '\n'
msg += _('Do you wish to continue?')
if not self.question(msg):
return
if not outputs:
self.show_error(_('No outputs'))
return
for _type, addr, amount in outputs:
if addr is None:
self.show_error(_('Vertcoin Address is None'))
return
if _type == TYPE_ADDRESS and not bitcoin.is_address(addr):
self.show_error(_('Invalid Vertcoin Address'))
return
if amount is None:
self.show_error(_('Invalid Amount'))
return
freeze_fee = self.fee_e.isVisible() and self.fee_e.isModified() and (self.fee_e.text() or self.fee_e.hasFocus())
fee = self.fee_e.get_amount() if freeze_fee else None
coins = self.get_coins()
return outputs, fee, label, coins
def do_preview(self):
self.do_send(preview = True)
def do_send(self, preview = False):
if run_hook('abort_send', self):
return
r = self.read_send_tab()
if not r:
return
outputs, fee, tx_desc, coins = r
try:
tx = self.wallet.make_unsigned_transaction(coins, outputs, self.config, fee)
except NotEnoughFunds:
self.show_message(_("Insufficient funds"))
return
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
amount = tx.output_value() if self.is_max else sum(map(lambda x:x[2], outputs))
fee = tx.get_fee()
use_rbf = self.rbf_checkbox.isChecked()
if use_rbf:
tx.set_rbf(True)
if fee < tx.required_fee(self.wallet):
self.show_error(_("This transaction requires a higher fee, or it will not be propagated by the network"))
return
if preview:
self.show_transaction(tx, tx_desc)
return
# confirmation dialog
msg = [
_("Amount to be sent") + ": " + self.format_amount_and_units(amount),
_("Mining fee") + ": " + self.format_amount_and_units(fee),
]
x_fee = run_hook('get_tx_extra_fee', self.wallet, tx)
if x_fee:
x_fee_address, x_fee_amount = x_fee
msg.append( _("Additional fees") + ": " + self.format_amount_and_units(x_fee_amount) )
confirm_rate = 2 * self.config.max_fee_rate()
if fee > confirm_rate * tx.estimated_size() / 1000:
msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))
if self.wallet.has_password():
msg.append("")
msg.append(_("Enter your password to proceed"))
password = self.password_dialog('\n'.join(msg))
if not password:
return
else:
msg.append(_('Proceed?'))
password = None
if not self.question('\n'.join(msg)):
return
def sign_done(success):
if success:
if not tx.is_complete():
self.show_transaction(tx)
self.do_clear()
else:
self.broadcast_transaction(tx, tx_desc)
self.sign_tx_with_password(tx, sign_done, password)
@protected
def sign_tx(self, tx, callback, password):
self.sign_tx_with_password(tx, callback, password)
def sign_tx_with_password(self, tx, callback, password):
'''Sign the transaction in a separate thread. When done, calls
the callback with a success code of True or False.
'''
# call hook to see if plugin needs gui interaction
run_hook('sign_tx', self, tx)
def on_signed(result):
callback(True)
def on_failed(exc_info):
self.on_error(exc_info)
callback(False)
task = partial(self.wallet.sign_transaction, tx, password)
WaitingDialog(self, _('Signing transaction...'), task,
on_signed, on_failed)
def broadcast_transaction(self, tx, tx_desc):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr and pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.network.broadcast(tx)
if pr and status is True:
self.invoices.set_paid(pr, tx.txid())
self.invoices.save()
self.payment_request = None
refund_address = self.wallet.get_receiving_addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
# Capture current TL window; override might be removed on return
parent = self.top_level_window()
def broadcast_done(result):
# GUI thread
if result:
status, msg = result
if status:
if tx_desc is not None and tx.is_complete():
self.wallet.set_label(tx.txid(), tx_desc)
parent.show_message(_('Payment sent.') + '\n' + msg)
self.invoice_list.update()
self.do_clear()
else:
parent.show_error(msg)
WaitingDialog(self, _('Broadcasting transaction...'),
broadcast_thread, broadcast_done, self.on_error)
def query_choice(self, msg, choices):
# Needed by QtHandler for hardware wallets
dialog = WindowModalDialog(self.top_level_window())
clayout = ChoicesLayout(msg, choices)
vbox = QVBoxLayout(dialog)
vbox.addLayout(clayout.layout())
vbox.addLayout(Buttons(OkButton(dialog)))
if not dialog.exec_():
return None
return clayout.selected_index()
def lock_amount(self, b):
self.amount_e.setFrozen(b)
self.max_button.setEnabled(not b)
def prepare_for_payment_request(self):
self.show_send_tab()
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def delete_invoice(self, key):
self.invoices.remove(key)
self.invoice_list.update()
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.invoice_list.update()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
self.payto_e.is_pr = True
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(format_satoshis_plain(pr.get_amount(), self.decimal_point))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.show_message(self.payment_request.error)
self.payment_request = None
self.do_clear()
def on_pr(self, request):
self.payment_request = request
if self.payment_request.verify(self.contacts):
self.emit(SIGNAL('payment_request_ok'))
else:
self.emit(SIGNAL('payment_request_error'))
def pay_to_URI(self, URI):
if not URI:
return
try:
out = util.parse_URI(unicode(URI), self.on_pr)
except BaseException as e:
self.show_error(_('Invalid Vertcoin URI:') + '\n' + str(e))
return
self.show_send_tab()
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if r or (name and sig):
self.prepare_for_payment_request()
return
address = out.get('address')
amount = out.get('amount')
label = out.get('label')
message = out.get('message')
# use label as description (not BIP21 compliant)
if label and not message:
message = label
if address:
self.payto_e.setText(address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
def do_clear(self):
self.is_max = False
self.not_enough_funds = False
self.payment_request = None
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fiat_send_e, self.fee_e]:
e.setText('')
e.setFrozen(False)
self.set_pay_from([])
self.rbf_checkbox.setChecked(False)
self.update_status()
run_hook('do_clear', self)
def set_frozen_state(self, addrs, freeze):
self.wallet.set_frozen_state(addrs, freeze)
self.address_list.update()
self.utxo_list.update()
self.update_fee()
def create_list_tab(self, l):
w = QWidget()
w.searchable_list = l
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
self.set_tab_property(l)
return w
def create_addresses_tab(self):
from address_list import AddressList
self.address_list = l = AddressList(self)
return self.create_list_tab(l)
def create_utxo_tab(self):
from utxo_list import UTXOList
self.utxo_list = l = UTXOList(self)
return self.create_list_tab(l)
def create_contacts_tab(self):
from contact_list import ContactList
self.contact_list = l = ContactList(self)
return self.create_list_tab(l)
def remove_address(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_address(addr)
self.address_list.update()
self.history_list.update()
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
return self.wallet.get_spendable_coins(None, self.config)
def spend_coins(self, coins):
self.set_pay_from(coins)
self.show_send_tab()
self.update_fee()
def paytomany(self):
self.show_send_tab()
self.payto_e.paytomany()
msg = '\n'.join([
_('Enter a list of outputs in the \'Pay to\' field.'),
_('One output per line.'),
_('Format: address, amount'),
_('You may load a CSV file using the file icon.')
])
self.show_message(msg, title=_('Pay to many'))
def payto_contacts(self, labels):
paytos = [self.get_contact_payto(label) for label in labels]
self.show_send_tab()
if len(paytos) == 1:
self.payto_e.setText(paytos[0])
self.amount_e.setFocus()
else:
text = "\n".join([payto + ", 0" for payto in paytos])
self.payto_e.setText(text)
self.payto_e.setFocus()
def set_contact(self, label, address):
if not is_valid(address):
self.show_error(_('Invalid Address'))
self.contact_list.update() # Displays original unchanged value
return False
self.contacts[address] = ('address', label)
self.contact_list.update()
self.history_list.update()
self.update_completions()
return True
def delete_contacts(self, labels):
if not self.question(_("Remove %s from your list of contacts?")
% " + ".join(labels)):
return
for label in labels:
self.contacts.pop(label)
self.history_list.update()
self.contact_list.update()
self.update_completions()
def show_invoice(self, key):
pr = self.invoices.get(key)
pr.verify(self.contacts)
self.show_pr_details(pr)
def show_pr_details(self, pr):
key = pr.get_id()
d = WindowModalDialog(self, _("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Amount") + ':'), 1, 0)
outputs_str = '\n'.join(map(lambda x: self.format_amount(x[2])+ self.base_unit() + ' @ ' + x[1], pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 1, 1)
expires = pr.get_expiration_date()
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
if expires:
grid.addWidget(QLabel(_("Expires") + ':'), 4, 0)
grid.addWidget(QLabel(format_time(expires)), 4, 1)
vbox.addLayout(grid)
def do_export():
fn = self.getOpenFileName(_("Save invoice to file"), "*.bip70")
if not fn:
return
with open(fn, 'w') as f:
data = f.write(pr.raw)
self.show_message(_('Invoice saved as' + ' ' + fn))
exportButton = EnterButton(_('Save'), do_export)
def do_delete():
if self.question(_('Delete invoice?')):
self.invoices.remove(key)
self.history_list.update()
d.close()
deleteButton = EnterButton(_('Delete'), do_delete)
vbox.addLayout(Buttons(exportButton, deleteButton, CloseButton(d)))
d.exec_()
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
if pr.verify(self.contacts):
self.payment_request_ok()
else:
self.payment_request_error()
def create_console_tab(self):
from console import Console
self.console = console = Console()
return self.set_tab_property(console)
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet,
'network' : self.network,
'plugins' : self.gui_object.plugins,
'window': self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.config, self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: f(method, args, self.password_dialog)
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
sb.addWidget(self.balance_label)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
self.lock_icon = QIcon()
self.password_button = StatusBarButton(self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget(self.password_button)
sb.addPermanentWidget(StatusBarButton(QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton(QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget(self.seed_button)
self.status_button = StatusBarButton(QIcon(":icons/status_disconnected.png"), _("Network"), lambda: self.gui_object.show_network_dialog(self))
sb.addPermanentWidget(self.status_button)
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.has_password() else QIcon(":icons/unlock.png")
self.password_button.setIcon(icon)
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.can_change_password())
self.send_button.setVisible(not self.wallet.is_watching_only())
def change_password_dialog(self):
from password_dialog import ChangePasswordDialog
d = ChangePasswordDialog(self, self.wallet)
ok, password, new_password, encrypt_file = d.run()
if not ok:
return
try:
self.wallet.update_password(password, new_password, encrypt_file)
except BaseException as e:
self.show_error(str(e))
return
except:
traceback.print_exc(file=sys.stdout)
self.show_error(_('Failed to update password'))
return
msg = _('Password was updated successfully') if new_password else _('Password is disabled, this wallet is not protected')
self.show_message(msg, title=_("Success"))
self.update_lock_icon()
def toggle_search(self):
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
tab = self.tabs.currentWidget()
if hasattr(tab, 'searchable_list'):
tab.searchable_list.filter(t)
def new_contact_dialog(self):
d = WindowModalDialog(self, _("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line1.setFixedWidth(280)
line2 = QLineEdit()
line2.setFixedWidth(280)
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if d.exec_():
self.set_contact(unicode(line2.text()), str(line1.text()))
def show_master_public_keys(self):
dialog = WindowModalDialog(self, "Master Public Keys")
mpk_list = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(100)
mpk_text.addCopyButton(self.app)
def show_mpk(index):
mpk_text.setText(mpk_list[index])
# only show the combobox in case multiple accounts are available
if len(mpk_list) > 1:
def label(key):
if isinstance(self.wallet, Multisig_Wallet):
return _("cosigner") + ' ' + str(i+1)
return ''
labels = [ label(i) for i in range(len(mpk_list))]
on_click = lambda clayout: show_mpk(clayout.selected_index())
labels_clayout = ChoicesLayout(_("Master Public Keys"), labels, on_click)
vbox.addLayout(labels_clayout.layout())
show_mpk(0)
vbox.addWidget(mpk_text)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
self.show_message(_('This wallet has no seed'))
return
keystore = self.wallet.get_keystore()
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except BaseException as e:
self.show_error(str(e))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, seed, passphrase)
d.exec_()
def show_qrcode(self, data, title = _("QR code"), parent=None):
if not data:
return
d = QRDialog(data, parent or self, title)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address:
return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = WindowModalDialog(self, _("Private key"))
d.setMinimumSize(600, 200)
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pk_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
msg_sign = ("Signing with an address actually means signing with the corresponding "
"private key, and verifying with the corresponding public key. The "
"address you have entered does not have a unique public key, so these "
"operations cannot be performed.")
@protected
def do_sign(self, address, message, signature, password):
address = str(address.text()).strip()
message = unicode(message.toPlainText()).encode('utf-8').strip()
if not bitcoin.is_address(address):
self.show_message('Invalid Vertcoin address.')
return
if not bitcoin.is_p2pkh(address):
self.show_message('Cannot sign messages with this type of address.' + '\n\n' + self.msg_sign)
return
if not self.wallet.is_mine(address):
self.show_message('Address not in wallet.')
return
task = partial(self.wallet.sign_message, address, message, password)
def show_signed_message(sig):
signature.setText(base64.b64encode(sig))
self.wallet.thread.add(task, on_success=show_signed_message)
def do_verify(self, address, message, signature):
address = str(address.text()).strip()
message = unicode(message.toPlainText()).encode('utf-8').strip()
if not bitcoin.is_address(address):
self.show_message('Invalid Vertcoin address.')
return
if not bitcoin.is_p2pkh(address):
self.show_message('Cannot verify messages with this type of address.' + '\n\n' + self.msg_sign)
return
try:
# This can throw on invalid base64
sig = base64.b64decode(str(signature.toPlainText()))
verified = bitcoin.verify_message(address, sig, message)
except:
verified = False
if verified:
self.show_message(_("Signature verified"))
else:
self.show_error(_("Wrong signature"))
def sign_verify_message(self, address=''):
d = WindowModalDialog(self, _('Sign/verify Message'))
d.setMinimumSize(410, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
cyphertext = str(encrypted_e.toPlainText())
task = partial(self.wallet.decrypt_message, str(pubkey_e.text()),
cyphertext, password)
self.wallet.thread.add(task, on_success=message_e.setText)
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = unicode(message_e.toPlainText())
message = message.encode('utf-8')
try:
encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text()))
encrypted_e.setText(encrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def encrypt_message(self, address = ''):
d = WindowModalDialog(self, _('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_key(address)
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def password_dialog(self, msg=None, parent=None):
from password_dialog import PasswordDialog
parent = parent or self
d = PasswordDialog(parent, msg)
return d.run()
def tx_from_text(self, txt):
from electrum_vtc.transaction import tx_from_str, Transaction
try:
tx = tx_from_str(txt)
return Transaction(tx)
except:
traceback.print_exc(file=sys.stdout)
self.show_critical(_("Electrum was unable to parse your transaction"))
return
def read_tx_from_qrcode(self):
from electrum_vtc import qrscanner
try:
data = qrscanner.scan_barcode(self.config.get_video_device())
except BaseException as e:
self.show_error(str(e))
return
if not data:
return
# if the user scanned a bitcoin URI
if data.startswith("Vertcoin:"):
self.pay_to_URI(data)
return
# else if the user scanned an offline signed tx
# transactions are binary, but qrcode seems to return utf8...
data = data.decode('utf8')
z = bitcoin.base_decode(data, length=None, base=43)
data = ''.join(chr(ord(b)) for b in z).encode('hex')
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to open your transaction file") + "\n" + str(reason), title=_("Unable to read file or no transaction found"))
return
return self.tx_from_text(file_content)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum_vtc import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
txid = str(txid).strip()
try:
r = self.network.synchronous_get(('blockchain.transaction.get',[txid]))
except BaseException as e:
self.show_message(str(e))
return
tx = transaction.Transaction(r)
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
d = WindowModalDialog(self, _('Private keys'))
d.setMinimumSize(850, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-vtc-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.get_addresses()
done = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done:
break
private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password))
d.emit(SIGNAL('computing_privkeys'))
d.emit(SIGNAL('show_privkeys'))
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error) as reason:
txt = "\n".join([
_("Electrum was unable to produce a private key-export."),
str(reason)
])
self.show_critical(txt, title=_("Unable to create csv"))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.json")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
self.show_message(_("Your labels were imported from") + " '%s'" % str(labelsFile))
except (IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to import your labels.") + "\n" + str(reason))
self.address_list.update()
self.history_list.update()
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum-vtc_labels.json', "*.json")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f, indent=4, sort_keys=True)
self.show_message(_("Your labels were exported to") + " '%s'" % str(fileName))
except (IOError, os.error), reason:
self.show_critical(_("Electrum was unable to export your labels.") + "\n" + str(reason))
def export_history_dialog(self):
d = WindowModalDialog(self, _('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-vtc-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
self.show_critical(export_error_label + "\n" + str(reason), title=_("Unable to export history"))
return
self.show_message(_("Your wallet history has been successfully exported."))
def plot_history_dialog(self):
if plot_history is None:
return
wallet = self.wallet
history = wallet.get_history()
if len(history) > 0:
plt = plot_history(self.wallet, history)
plt.show()
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_history()
lines = []
for item in history:
tx_hash, height, confirmations, timestamp, value, balance = item
if height>0:
if timestamp is not None:
time_string = format_time(timestamp)
else:
time_string = _("unverified")
else:
time_string = _("unconfirmed")
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if tx_hash:
label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
if is_csv:
lines.append([tx_hash, label, confirmations, value_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash","label", "confirmations", "value", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent = 4))
def sweep_key_dialog(self):
d = WindowModalDialog(self, title=_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys:")))
keys_e = QTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
addresses = self.wallet.get_unused_addresses()
h, address_e = address_field(addresses)
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text()).strip()
if bitcoin.is_address(addr):
return addr
def get_pk():
text = str(keys_e.toPlainText())
return keystore.get_private_keys(text)
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
on_address = lambda text: address_e.setStyleSheet(BLACK_FG if get_address() else RED_FG)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
address_e.textChanged.connect(on_address)
if not d.exec_():
return
try:
tx = self.wallet.sweep(get_pk(), self.network, self.config, get_address(), None)
except BaseException as e:
self.show_message(str(e))
return
self.warn_if_watching_only()
self.show_transaction(tx)
def _do_import(self, title, msg, func):
text = text_dialog(self, title, msg + ' :', _('Import'))
if not text:
return
bad = []
good = []
for key in str(text).split():
try:
addr = func(key)
good.append(addr)
except BaseException as e:
bad.append(key)
continue
if good:
self.show_message(_("The following addresses were added") + ':\n' + '\n'.join(good))
if bad:
self.show_critical(_("The following inputs could not be imported") + ':\n'+ '\n'.join(bad))
self.address_list.update()
self.history_list.update()
def import_addresses(self):
if not self.wallet.can_import_address():
return
title, msg = _('Import addresses'), _("Enter addresses")
self._do_import(title, msg, self.wallet.import_address)
@protected
def do_import_privkey(self, password):
if not self.wallet.can_import_privkey():
return
title, msg = _('Import private keys'), _("Enter private keys")
self._do_import(title, msg, lambda x: self.wallet.import_key(x, password))
def update_fiat(self):
b = self.fx and self.fx.is_enabled()
self.fiat_send_e.setVisible(b)
self.fiat_receive_e.setVisible(b)
self.history_list.refresh_headers()
self.history_list.update()
self.update_status()
def settings_dialog(self):
self.need_restart = False
d = WindowModalDialog(self, _('Preferences'))
vbox = QVBoxLayout()
tabs = QTabWidget()
gui_widgets = []
fee_widgets = []
tx_widgets = []
id_widgets = []
# language
lang_help = _('Select which language is used in the GUI (after restart).')
lang_label = HelpLabel(_('Language') + ':', lang_help)
lang_combo = QComboBox()
from electrum_vtc.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
gui_widgets.append((lang_label, lang_combo))
nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.history_list.update()
self.address_list.update()
nz.valueChanged.connect(on_nz)
gui_widgets.append((nz_label, nz))
def on_dynfee(x):
self.config.set_key('dynamic_fees', x == Qt.Checked)
self.fee_slider.update()
update_maxfee()
dynfee_cb = QCheckBox(_('Use dynamic fees'))
dynfee_cb.setChecked(self.config.is_dynfee())
dynfee_cb.setToolTip(_("Use fees recommended by the server."))
fee_widgets.append((dynfee_cb, None))
dynfee_cb.stateChanged.connect(on_dynfee)
def on_maxfee(x):
m = maxfee_e.get_amount()
if m: self.config.set_key('max_fee_rate', m)
self.fee_slider.update()
def update_maxfee():
d = self.config.is_dynfee()
maxfee_e.setDisabled(d)
maxfee_label.setDisabled(d)
maxfee_label = HelpLabel(_('Max static fee'), _('Max value of the static fee slider'))
maxfee_e = BTCkBEdit(self.get_decimal_point)
maxfee_e.setAmount(self.config.max_fee_rate())
maxfee_e.textChanged.connect(on_maxfee)
update_maxfee()
fee_widgets.append((maxfee_label, maxfee_e))
feebox_cb = QCheckBox(_('Edit fees manually'))
feebox_cb.setChecked(self.config.get('show_fee', False))
feebox_cb.setToolTip(_("Show fee edit box in send tab."))
def on_feebox(x):
self.config.set_key('show_fee', x == Qt.Checked)
self.fee_e.setVisible(bool(x))
feebox_cb.stateChanged.connect(on_feebox)
fee_widgets.append((feebox_cb, None))
rbf_policy = self.config.get('rbf_policy', 2)
rbf_label = HelpLabel(_('Propose Replace-By-Fee') + ':', '')
rbf_combo = QComboBox()
rbf_combo.addItems([_('Always'), _('If the fee is low'), _('Never')])
rbf_combo.setCurrentIndex(rbf_policy)
def on_rbf(x):
self.config.set_key('rbf_policy', x)
rbf_combo.currentIndexChanged.connect(on_rbf)
fee_widgets.append((rbf_label, rbf_combo))
msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\
+ _('The following alias providers are available:') + '\n'\
+ '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\
+ 'For more information, see http://openalias.org'
alias_label = HelpLabel(_('OpenAlias') + ':', msg)
alias = self.config.get('alias','')
alias_e = QLineEdit(alias)
def set_alias_color():
if not self.config.get('alias'):
alias_e.setStyleSheet("")
return
if self.alias_info:
alias_addr, alias_name, validated = self.alias_info
alias_e.setStyleSheet(GREEN_BG if validated else RED_BG)
else:
alias_e.setStyleSheet(RED_BG)
def on_alias_edit():
alias_e.setStyleSheet("")
alias = str(alias_e.text())
self.config.set_key('alias', alias, True)
if alias:
self.fetch_alias()
set_alias_color()
self.connect(self, SIGNAL('alias_received'), set_alias_color)
alias_e.editingFinished.connect(on_alias_edit)
id_widgets.append((alias_label, alias_e))
# SSL certificate
msg = ' '.join([
_('SSL certificate used to sign payment requests.'),
_('Use setconfig to set ssl_chain and ssl_privkey.'),
])
if self.config.get('ssl_privkey') or self.config.get('ssl_chain'):
try:
SSL_identity = paymentrequest.check_ssl_config(self.config)
SSL_error = None
except BaseException as e:
SSL_identity = "error"
SSL_error = str(e)
else:
SSL_identity = ""
SSL_error = None
SSL_id_label = HelpLabel(_('SSL certificate') + ':', msg)
SSL_id_e = QLineEdit(SSL_identity)
SSL_id_e.setStyleSheet(RED_BG if SSL_error else GREEN_BG if SSL_identity else '')
if SSL_error:
SSL_id_e.setToolTip(SSL_error)
SSL_id_e.setReadOnly(True)
id_widgets.append((SSL_id_label, SSL_id_e))
units = ['VTC', 'mVTC', 'bits']
msg = _('Base unit of your wallet.')\
+ '\n1VTC=1000mVTC.\n' \
+ _(' These settings affects the fields in the Send tab')+' '
unit_label = HelpLabel(_('Base unit') + ':', msg)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
def on_unit(x):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
edits = self.amount_e, self.fee_e, self.receive_amount_e
amounts = [edit.get_amount() for edit in edits]
if unit_result == 'VTC':
self.decimal_point = 8
elif unit_result == 'mVTC':
self.decimal_point = 5
elif unit_result == 'bits':
self.decimal_point = 2
else:
raise Exception('Unknown base unit')
self.config.set_key('decimal_point', self.decimal_point, True)
self.history_list.update()
self.request_list.update()
self.address_list.update()
for edit, amount in zip(edits, amounts):
edit.setAmount(amount)
self.update_status()
unit_combo.currentIndexChanged.connect(on_unit)
gui_widgets.append((unit_label, unit_combo))
block_explorers = sorted(util.block_explorer_info().keys())
msg = _('Choose which online block explorer to use for functions that open a web browser')
block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_ex_combo.findText(util.block_explorer(self.config)))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
gui_widgets.append((block_ex_label, block_ex_combo))
from electrum_vtc import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
msg = _("Install the zbar package to enable this.")
qr_label = HelpLabel(_('Video Device') + ':', msg)
qr_combo.setEnabled(qrscanner.libzbar is not None)
on_video_device = lambda x: self.config.set_key("video_device", str(qr_combo.itemData(x).toString()), True)
qr_combo.currentIndexChanged.connect(on_video_device)
gui_widgets.append((qr_label, qr_combo))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
multiple_cb.setEnabled(self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))
tx_widgets.append((usechange_cb, None))
def on_multiple(x):
multiple = x == Qt.Checked
if self.wallet.multiple_change != multiple:
self.wallet.multiple_change = multiple
self.wallet.storage.put('multiple_change', multiple)
multiple_change = self.wallet.multiple_change
multiple_cb = QCheckBox(_('Use multiple change addresses'))
multiple_cb.setEnabled(self.wallet.use_change)
multiple_cb.setToolTip('\n'.join([
_('In some cases, use up to 3 change addresses in order to break '
'up large coin amounts and obfuscate the recipient address.'),
_('This may result in higher transactions fees.')
]))
multiple_cb.setChecked(multiple_change)
multiple_cb.stateChanged.connect(on_multiple)
tx_widgets.append((multiple_cb, None))
def fmt_docs(key, klass):
lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]
return '\n'.join([key, "", " ".join(lines)])
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
chooser_name = coinchooser.get_name(self.config)
msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')
msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())
chooser_label = HelpLabel(_('Coin selection') + ':', msg)
chooser_combo = QComboBox()
chooser_combo.addItems(choosers)
i = choosers.index(chooser_name) if chooser_name in choosers else 0
chooser_combo.setCurrentIndex(i)
def on_chooser(x):
chooser_name = choosers[chooser_combo.currentIndex()]
self.config.set_key('coin_chooser', chooser_name)
chooser_combo.currentIndexChanged.connect(on_chooser)
tx_widgets.append((chooser_label, chooser_combo))
def on_unconf(x):
self.config.set_key('confirmed_only', bool(x))
conf_only = self.config.get('confirmed_only', False)
unconf_cb = QCheckBox(_('Spend only confirmed coins'))
unconf_cb.setToolTip(_('Spend only confirmed inputs.'))
unconf_cb.setChecked(conf_only)
unconf_cb.stateChanged.connect(on_unconf)
tx_widgets.append((unconf_cb, None))
# Fiat Currency
hist_checkbox = QCheckBox()
ccy_combo = QComboBox()
ex_combo = QComboBox()
def update_currencies():
if not self.fx: return
currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))
ccy_combo.clear()
ccy_combo.addItems([_('None')] + currencies)
if self.fx.is_enabled():
ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))
def update_history_cb():
if not self.fx: return
hist_checkbox.setChecked(self.fx.get_history_config())
hist_checkbox.setEnabled(self.fx.is_enabled())
def update_exchanges():
if not self.fx: return
b = self.fx.is_enabled()
ex_combo.setEnabled(b)
if b:
h = self.fx.get_history_config()
c = self.fx.get_currency()
exchanges = self.fx.get_exchanges_by_ccy(c, h)
else:
exchanges = self.fx.get_exchanges_by_ccy('USD', False)
ex_combo.clear()
ex_combo.addItems(sorted(exchanges))
ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))
def on_currency(hh):
if not self.fx: return
b = bool(ccy_combo.currentIndex())
ccy = str(ccy_combo.currentText()) if b else None
self.fx.set_enabled(b)
if b and ccy != self.fx.ccy:
self.fx.set_currency(ccy)
update_history_cb()
update_exchanges()
self.update_fiat()
def on_exchange(idx):
exchange = str(ex_combo.currentText())
if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():
self.fx.set_exchange(exchange)
def on_history(checked):
if not self.fx: return
self.fx.set_history_config(checked)
update_exchanges()
self.history_list.refresh_headers()
if self.fx.is_enabled() and checked:
# reset timeout to get historical rates
self.fx.timeout = 0
update_currencies()
update_history_cb()
update_exchanges()
ccy_combo.currentIndexChanged.connect(on_currency)
hist_checkbox.stateChanged.connect(on_history)
ex_combo.currentIndexChanged.connect(on_exchange)
fiat_widgets = []
fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))
fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))
fiat_widgets.append((QLabel(_('Source')), ex_combo))
tabs_info = [
(fee_widgets, _('Fees')),
(tx_widgets, _('Transactions')),
(gui_widgets, _('Appearance')),
(fiat_widgets, _('Fiat')),
(id_widgets, _('Identity')),
]
for widgets, name in tabs_info:
tab = QWidget()
grid = QGridLayout(tab)
grid.setColumnStretch(0,1)
for a,b in widgets:
i = grid.rowCount()
if b:
if a:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
tabs.addTab(tab, name)
vbox.addWidget(tabs)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
if self.fx:
self.fx.timeout = 0
self.disconnect(self, SIGNAL('alias_received'), set_alias_color)
run_hook('close_settings_dialog')
if self.need_restart:
self.show_warning(_('Please restart Electrum to activate the new GUI settings'), title=_('Success'))
def closeEvent(self, event):
# It seems in some rare cases this closeEvent() is called twice
if not self.cleaned_up:
self.cleaned_up = True
self.clean_up()
event.accept()
def clean_up(self):
self.wallet.thread.stop()
if self.network:
self.network.unregister_callback(self.on_network)
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.wallet.storage.put("winpos-qt", [g.left(),g.top(),
g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:],
True)
if self.qr_window:
self.qr_window.close()
self.close_wallet()
self.gui_object.close_window(self)
def plugins_dialog(self):
self.pluginsdialog = d = WindowModalDialog(self, _('Electrum Plugins'))
plugins = self.gui_object.plugins
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(plugins.count() * 35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
settings_widgets = {}
def enable_settings_widget(p, name, i):
widget = settings_widgets.get(name)
if not widget and p and p.requires_settings():
widget = settings_widgets[name] = p.settings_widget(d)
grid.addWidget(widget, i, 1)
if widget:
widget.setEnabled(bool(p and p.is_enabled()))
def do_toggle(cb, name, i):
p = plugins.toggle(name)
cb.setChecked(bool(p))
enable_settings_widget(p, name, i)
run_hook('init_qt', self.gui_object)
for i, descr in enumerate(plugins.descriptions.values()):
name = descr['__name__']
p = plugins.get(name)
if descr.get('registers_keystore'):
continue
try:
cb = QCheckBox(descr['fullname'])
cb.setEnabled(plugins.is_available(name, self.wallet))
cb.setChecked(p is not None and p.is_enabled())
grid.addWidget(cb, i, 0)
enable_settings_widget(p, name, i)
cb.clicked.connect(partial(do_toggle, cb, name, i))
msg = descr['description']
if descr.get('requires'):
msg += '\n\n' + _('Requires') + ':\n' + '\n'.join(map(lambda x: x[1], descr.get('requires')))
grid.addWidget(HelpButton(msg), i, 2)
except Exception:
self.print_msg("error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def cpfp(self, parent_tx, new_tx):
total_size = parent_tx.estimated_size() + new_tx.estimated_size()
d = WindowModalDialog(self, _('Child Pays for Parent'))
vbox = QVBoxLayout(d)
msg = (
"A CPFP is a transaction that sends an unconfirmed output back to "
"yourself, with a high fee. The goal is to have miners confirm "
"the parent transaction in order to get the fee attached to the "
"child transaction.")
vbox.addWidget(WWLabel(_(msg)))
msg2 = ("The proposed fee is computed using your "
"fee/kB settings, applied to the total size of both child and "
"parent transactions. After you broadcast a CPFP transaction, "
"it is normal to see a new unconfirmed transaction in your history.")
vbox.addWidget(WWLabel(_(msg2)))
grid = QGridLayout()
grid.addWidget(QLabel(_('Total size') + ':'), 0, 0)
grid.addWidget(QLabel('%d bytes'% total_size), 0, 1)
max_fee = new_tx.output_value()
grid.addWidget(QLabel(_('Input amount') + ':'), 1, 0)
grid.addWidget(QLabel(self.format_amount(max_fee) + ' ' + self.base_unit()), 1, 1)
output_amount = QLabel('')
grid.addWidget(QLabel(_('Output amount') + ':'), 2, 0)
grid.addWidget(output_amount, 2, 1)
fee_e = BTCAmountEdit(self.get_decimal_point)
def f(x):
a = max_fee - fee_e.get_amount()
output_amount.setText((self.format_amount(a) + ' ' + self.base_unit()) if a else '')
fee_e.textChanged.connect(f)
fee = self.config.fee_per_kb() * total_size / 1000
fee_e.setAmount(fee)
grid.addWidget(QLabel(_('Fee' + ':')), 3, 0)
grid.addWidget(fee_e, 3, 1)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * total_size / 1000
fee = min(max_fee, fee)
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
fee_slider.update()
grid.addWidget(fee_slider, 4, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
fee = fee_e.get_amount()
if fee > max_fee:
self.show_error(_('Max fee exceeded'))
return
new_tx = self.wallet.cpfp(parent_tx, fee)
new_tx.set_rbf(True)
self.show_transaction(new_tx)
def bump_fee_dialog(self, tx):
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
tx_label = self.wallet.get_label(tx.txid())
tx_size = tx.estimated_size()
d = WindowModalDialog(self, _('Bump Fee'))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Current fee') + ': %s'% self.format_amount(fee) + ' ' + self.base_unit()))
vbox.addWidget(QLabel(_('New fee' + ':')))
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(fee * 1.5)
vbox.addWidget(fee_e)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * tx_size / 1000
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
vbox.addWidget(fee_slider)
cb = QCheckBox(_('Final'))
vbox.addWidget(cb)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
is_final = cb.isChecked()
new_fee = fee_e.get_amount()
delta = new_fee - fee
if delta < 0:
self.show_error("fee too low")
return
try:
new_tx = self.wallet.bump_fee(tx, delta)
except BaseException as e:
self.show_error(str(e))
return
if is_final:
new_tx.set_rbf(False)
self.show_transaction(new_tx, tx_label) | PypiClean |
/parts/Mouth/Vomit.py | def MouthVomit():
return (
'<path fill-rule="evenodd" clip-rule="evenodd"'
' d="M34.0082 30.3979C35.128 19.9071 38.2345 11.0056 53.9962 11C69.7578 10.9944 72.9169 19.9575 73.9943 30.4952C74.081 31.3433 73.1739 32 72.037 32C65.3505 32 62.6703 30.5048 53.9894 30.5C45.3085 30.4952 40.7568 32 36.0925 32C34.949 32 33.8962 31.4475 34.0082 30.3979Z"'
' fill="black" fill-opacity="0.7" />'
'<mask id="mask0_0_694" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="34" y="11" width="40" height="21">'
' <path fill-rule="evenodd" clip-rule="evenodd"'
' d="M34.0082 30.3979C35.128 19.9071 38.2345 11.0056 53.9962 11C69.7578 10.9944 72.9169 19.9575 73.9943 30.4952C74.081 31.3433 73.1739 32 72.037 32C65.3505 32 62.6703 30.5048 53.9894 30.5C45.3085 30.4952 40.7568 32 36.0925 32C34.949 32 33.8962 31.4475 34.0082 30.3979Z"'
' fill="white" />'
'</mask>'
'<g mask="url(#mask0_0_694)">'
' <rect x="39" width="31" height="16" rx="5" fill="white" />'
'</g>'
'<g filter="url(#filter0_i_0_694)">'
' <path fill-rule="evenodd" clip-rule="evenodd"'
' d="M42 25C38.6863 25 36 27.6863 36 31V34V36V38C36 41.3137 38.6863 44 42 44C45.3137 44 48 41.3137 48 38V37V36H48.083C48.559 33.1623 51.027 31 54 31C56.973 31 59.441 33.1623 59.917 36H60C60 39.3137 62.6863 42 66 42C69.3137 42 72 39.3137 72 36V34V31C72 27.6863 69.3137 25 66 25H42Z"'
' fill="#88C553" />'
'</g>'
'<defs>'
' <filter id="filter0_i_0_694" x="36" y="25" width="36" height="19" filterUnits="userSpaceOnUse"'
' color-interpolation-filters="sRGB">'
' <feFlood flood-opacity="0" result="BackgroundImageFix" />'
' <feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape" />'
' <feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"'
' result="hardAlpha" />'
' <feOffset dy="-1" />'
' <feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1" />'
' <feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.1 0" />'
' <feBlend mode="normal" in2="shape" result="effect1_innerShadow_0_694" />'
' </filter>'
'</defs>'
) | PypiClean |
/Flask-S3-0.3.3.tar.gz/Flask-S3-0.3.3/flask_s3.py | import hashlib
import json
import logging
import os
import re
import gzip
import warnings
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import mimetypes
from collections import defaultdict
import boto3
import boto3.exceptions
from botocore.exceptions import ClientError
from flask import current_app
from flask import url_for as flask_url_for
import six
logger = logging.getLogger('flask_s3')
# Mapping for Header names to S3 parameters
header_mapping = {
'cache-control': 'CacheControl',
'content-disposition': 'ContentDisposition',
'content-encoding': 'ContentEncoding',
'content-language': 'ContentLanguage',
'content-length': 'ContentLength',
'content-md5': 'ContentMD5',
'content-type': 'ContentType',
'expires': 'Expires',
}
DEFAULT_SETTINGS = {'FLASKS3_USE_HTTPS': True,
'FLASKS3_ACTIVE': True,
'FLASKS3_DEBUG': False,
'FLASKS3_BUCKET_DOMAIN': 's3.amazonaws.com',
'FLASKS3_CDN_DOMAIN': '',
'FLASKS3_USE_CACHE_CONTROL': False,
'FLASKS3_HEADERS': {},
'FLASKS3_FILEPATH_HEADERS': {},
'FLASKS3_ONLY_MODIFIED': False,
'FLASKS3_URL_STYLE': 'host',
'FLASKS3_GZIP': False,
'FLASKS3_GZIP_ONLY_EXTS': [],
'FLASKS3_FORCE_MIMETYPE': False,
'FLASKS3_PREFIX': ''}
__version__ = (0, 3, 3)
def _get_statics_prefix(app):
"""
Get the complete prefix that should be used by static files.
"""
upload_prefix = app.config.get('FLASKS3_PREFIX', '')
return '/%s' % upload_prefix.lstrip('/').rstrip('/')
def split_metadata_params(headers):
"""
Given a dict of headers for s3, seperates those that are boto3
parameters and those that must be metadata
"""
params = {}
metadata = {}
for header_name in headers:
if header_name.lower() in header_mapping:
params[header_mapping[header_name.lower()]] = headers[header_name]
else:
metadata[header_name] = headers[header_name]
return metadata, params
def merge_two_dicts(x, y):
"""Given two dicts, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return z
def hash_file(filename):
"""
Generate a hash for the contents of a file
"""
hasher = hashlib.sha1()
with open(filename, 'rb') as f:
buf = f.read(65536)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(65536)
return hasher.hexdigest()
def _get_bucket_name(**values):
"""
Generates the bucket name for url_for.
"""
app = current_app
# manage other special values, all have no meaning for static urls
values.pop('_external', False) # external has no meaning here
values.pop('_anchor', None) # anchor as well
values.pop('_method', None) # method too
url_style = get_setting('FLASKS3_URL_STYLE', app)
if url_style == 'host':
url_format = '{bucket_name}.{bucket_domain}'
elif url_style == 'path':
url_format = '{bucket_domain}/{bucket_name}'
else:
raise ValueError('Invalid S3 URL style: "{}"'.format(url_style))
if get_setting('FLASKS3_CDN_DOMAIN', app):
bucket_path = '{}'.format(get_setting('FLASKS3_CDN_DOMAIN', app))
else:
bucket_path = url_format.format(
bucket_name=get_setting('FLASKS3_BUCKET_NAME', app),
bucket_domain=get_setting('FLASKS3_BUCKET_DOMAIN', app),
)
bucket_path += _get_statics_prefix(app).rstrip('/')
return bucket_path, values
def url_for(endpoint, **values):
"""
Generates a URL to the given endpoint.
If the endpoint is for a static resource then an Amazon S3 URL is
generated, otherwise the call is passed on to `flask.url_for`.
Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
`flask.url_for` in templates automatically. It is unlikely that this
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
of your templates.
"""
app = current_app
if app.config.get('TESTING', False) and not app.config.get('FLASKS3_OVERRIDE_TESTING', True):
return flask_url_for(endpoint, **values)
if 'FLASKS3_BUCKET_NAME' not in app.config:
raise ValueError("FLASKS3_BUCKET_NAME not found in app configuration.")
if endpoint == 'static' or endpoint.endswith('.static'):
scheme = 'https'
if not app.config.get("FLASKS3_USE_HTTPS", True):
scheme = 'http'
# allow per url override for scheme
scheme = values.pop('_scheme', scheme)
bucket_path, values = _get_bucket_name(**values)
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
built = urls.build(endpoint, values=values, force_external=True)
return built
return flask_url_for(endpoint, **values)
def _bp_static_url(blueprint):
""" builds the absolute url path for a blueprint's static folder """
u = six.u('%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or ''))
return u
def _gather_files(app, hidden, filepath_filter_regex=None):
""" Gets all files in static folders and returns in dict."""
dirs = [(six.u(app.static_folder), app.static_url_path)]
if hasattr(app, 'blueprints'):
blueprints = app.blueprints.values()
bp_details = lambda x: (x.static_folder, _bp_static_url(x))
dirs.extend([bp_details(x) for x in blueprints if x.static_folder])
valid_files = defaultdict(list)
for static_folder, static_url_loc in dirs:
if not os.path.isdir(static_folder):
logger.warning("WARNING - [%s does not exist]" % static_folder)
else:
logger.debug("Checking static folder: %s" % static_folder)
for root, _, files in os.walk(static_folder):
relative_folder = re.sub(r'^/',
'',
root.replace(static_folder, ''))
files = [os.path.join(root, x)
for x in files if (
(hidden or x[0] != '.') and
# Skip this file if the filter regex is
# defined, and this file's path is a
# negative match.
(filepath_filter_regex == None or re.search(
filepath_filter_regex,
os.path.join(relative_folder, x))))]
if files:
valid_files[(static_folder, static_url_loc)].extend(files)
return valid_files
def _path_to_relative_url(path):
""" Converts a folder and filename into a ralative url path """
return os.path.splitdrive(path)[1].replace('\\', '/')
def _static_folder_path(static_url, static_folder, static_asset):
"""
Returns a path to a file based on the static folder, and not on the
filesystem holding the file.
Returns a path relative to static_url for static_asset
"""
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# sub-directory then file etc.
if not static_asset.startswith(static_folder):
raise ValueError("%s static asset must be under %s static folder" %
(static_asset, static_folder))
rel_asset = static_asset[len(static_folder):]
# Now bolt the static url path and the relative asset location together
return '%s/%s' % (static_url.rstrip('/'), rel_asset.lstrip('/'))
def _write_files(s3, app, static_url_loc, static_folder, files, bucket,
ex_keys=None, hashes=None):
""" Writes all the files inside a static folder to S3. """
should_gzip = app.config.get('FLASKS3_GZIP')
add_mime = app.config.get('FLASKS3_FORCE_MIMETYPE')
gzip_include_only = app.config.get('FLASKS3_GZIP_ONLY_EXTS')
new_hashes = []
static_folder_rel = _path_to_relative_url(static_folder)
for file_path in files:
per_file_should_gzip = should_gzip
asset_loc = _path_to_relative_url(file_path)
full_key_name = _static_folder_path(static_url_loc, static_folder_rel,
asset_loc)
key_name = full_key_name.lstrip("/")
logger.debug("Uploading {} to {} as {}".format(file_path, bucket, key_name))
exclude = False
if app.config.get('FLASKS3_ONLY_MODIFIED', False):
file_hash = hash_file(file_path)
new_hashes.append((full_key_name, file_hash))
if hashes and hashes.get(full_key_name, None) == file_hash:
exclude = True
if ex_keys and full_key_name in ex_keys or exclude:
logger.debug("%s excluded from upload" % key_name)
else:
h = {}
# Set more custom headers if the filepath matches certain
# configured regular expressions.
filepath_headers = app.config.get('FLASKS3_FILEPATH_HEADERS')
if filepath_headers:
for filepath_regex, headers in six.iteritems(filepath_headers):
if re.search(filepath_regex, file_path):
for header, value in six.iteritems(headers):
h[header] = value
# check for extension, only if there are extensions provided
if per_file_should_gzip and gzip_include_only:
if os.path.splitext(file_path)[1] not in gzip_include_only:
per_file_should_gzip = False
if per_file_should_gzip:
h["content-encoding"] = "gzip"
if (add_mime or per_file_should_gzip) and "content-type" not in h:
# When we use GZIP we have to explicitly set the content type
# or if the mime flag is True
(mimetype, encoding) = mimetypes.guess_type(file_path,
False)
if mimetype:
h["content-type"] = mimetype
else:
logger.warn("Unable to detect mimetype for %s" %
file_path)
file_mode = 'rb' if six.PY3 else 'r'
with open(file_path, file_mode) as fp:
merged_dicts = merge_two_dicts(get_setting('FLASKS3_HEADERS', app), h)
metadata, params = split_metadata_params(merged_dicts)
if per_file_should_gzip:
compressed = six.BytesIO()
z = gzip.GzipFile(os.path.basename(file_path), 'wb', 9,
compressed)
z.write(fp.read())
z.close()
data = compressed.getvalue()
else:
data = fp.read()
s3.put_object(Bucket=bucket,
Key=key_name,
Body=data,
ACL="public-read",
Metadata=metadata,
**params)
return new_hashes
def _upload_files(s3, app, files_, bucket, hashes=None):
new_hashes = []
prefix = _get_statics_prefix(app)
for (static_folder, static_url), names in six.iteritems(files_):
static_upload_url = '%s/%s' % (prefix.rstrip('/'), static_url.lstrip('/'))
new_hashes.extend(_write_files(s3, app, static_upload_url, static_folder,
names, bucket, hashes=hashes))
return new_hashes
def get_setting(name, app=None):
"""
Returns the value for `name` settings (looks into `app` config, and into
DEFAULT_SETTINGS). Returns None if not set.
:param name: (str) name of a setting (e.g. FLASKS3_URL_STYLE)
:param app: Flask app instance
:return: setting value or None
"""
default_value = DEFAULT_SETTINGS.get(name, None)
return app.config.get(name, default_value) if app else default_value
def create_all(app, user=None, password=None, bucket_name=None,
location=None, include_hidden=False,
filepath_filter_regex=None, put_bucket_acl=True):
"""
Uploads of the static assets associated with a Flask application to
Amazon S3.
All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
the bucket does not exist then it is created.
Flask-S3 creates the same relative static asset folder structure on
S3 as can be found within your Flask application.
Many of the optional arguments to `create_all` can be specified
instead in your application's configuration using the Flask-S3
`configuration`_ variables.
:param app: a :class:`flask.Flask` application object.
:param user: an AWS Access Key ID. You can find this key in the
Security Credentials section of your AWS account.
:type user: `basestring` or None
:param password: an AWS Secret Access Key. You can find this key in
the Security Credentials section of your AWS
account.
:type password: `basestring` or None
:param bucket_name: the name of the bucket you wish to server your
static assets from. **Note**: while a valid
character, it is recommended that you do not
include periods in bucket_name if you wish to
serve over HTTPS. See Amazon's `bucket
restrictions`_ for more details.
:type bucket_name: `basestring` or None
:param location: the AWS region to host the bucket in; an empty
string indicates the default region should be used,
which is the US Standard region. Possible location
values include: `'DEFAULT'`, `'EU'`, `'us-east-1'`,
`'us-west-1'`, `'us-west-2'`, `'ap-south-1'`,
`'ap-northeast-2'`, `'ap-southeast-1'`,
`'ap-southeast-2'`, `'ap-northeast-1'`,
`'eu-central-1'`, `'eu-west-1'`, `'sa-east-1'`
:type location: `basestring` or None
:param include_hidden: by default Flask-S3 will not upload hidden
files. Set this to true to force the upload of hidden files.
:type include_hidden: `bool`
:param filepath_filter_regex: if specified, then the upload of
static assets is limited to only those files whose relative path
matches this regular expression string. For example, to only
upload files within the 'css' directory of your app's static
store, set to r'^css'.
:type filepath_filter_regex: `basestring` or None
:param put_bucket_acl: by default Flask-S3 will set the bucket ACL
to public. Set this to false to leave the policy unchanged.
:type put_bucket_acl: `bool`
.. _bucket restrictions: http://docs.amazonwebservices.com/AmazonS3\
/latest/dev/BucketRestrictions.html
"""
user = user or app.config.get('AWS_ACCESS_KEY_ID')
password = password or app.config.get('AWS_SECRET_ACCESS_KEY')
bucket_name = bucket_name or app.config.get('FLASKS3_BUCKET_NAME')
if not bucket_name:
raise ValueError("No bucket name provided.")
location = location or app.config.get('FLASKS3_REGION')
# build list of static files
all_files = _gather_files(app, include_hidden,
filepath_filter_regex=filepath_filter_regex)
logger.debug("All valid files: %s" % all_files)
# connect to s3
s3 = boto3.client("s3",
region_name=location or None,
aws_access_key_id=user,
aws_secret_access_key=password)
# get_or_create bucket
try:
s3.head_bucket(Bucket=bucket_name)
except ClientError as e:
if int(e.response['Error']['Code']) == 404:
# Create the bucket
bucket = s3.create_bucket(Bucket=bucket_name)
else:
raise
if put_bucket_acl:
s3.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
if get_setting('FLASKS3_ONLY_MODIFIED', app):
try:
hashes_object = s3.get_object(Bucket=bucket_name, Key='.file-hashes')
hashes = json.loads(str(hashes_object['Body'].read().decode()))
except ClientError as e:
logger.warn("No file hashes found: %s" % e)
hashes = None
new_hashes = _upload_files(s3, app, all_files, bucket_name, hashes=hashes)
try:
s3.put_object(Bucket=bucket_name,
Key='.file-hashes',
Body=json.dumps(dict(new_hashes)),
ACL='private')
except boto3.exceptions.S3UploadFailedError as e:
logger.warn("Unable to upload file hashes: %s" % e)
else:
_upload_files(s3, app, all_files, bucket_name)
class FlaskS3(object):
"""
The FlaskS3 object allows your application to use Flask-S3.
When initialising a FlaskS3 object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
you may provide it later by using the :meth:`init_app` method.
:param app: optional :class:`flask.Flask` application object
:type app: :class:`flask.Flask` or None
"""
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
"""
An alternative way to pass your :class:`flask.Flask` application
object to Flask-S3. :meth:`init_app` also takes care of some
default `settings`_.
:param app: the :class:`flask.Flask` application object.
"""
for k, v in DEFAULT_SETTINGS.items():
app.config.setdefault(k, v)
if app.debug and not get_setting('FLASKS3_DEBUG', app):
app.config['FLASKS3_ACTIVE'] = False
if get_setting('FLASKS3_ACTIVE', app):
app.jinja_env.globals['url_for'] = url_for
if get_setting('FLASKS3_USE_CACHE_CONTROL', app) and app.config.get('FLASKS3_CACHE_CONTROL'):
cache_control_header = get_setting('FLASKS3_CACHE_CONTROL', app)
app.config['FLASKS3_HEADERS']['Cache-Control'] = cache_control_header | PypiClean |
/OBP_reliability_pillar-0.2.0.tar.gz/OBP_reliability_pillar-0.2.0/README.md | # OBP_Reliability_Pillar
OBP_Reliability_Pillar package can be used to get the compliance details of Operational Best Practices for AWS Well Architected Reliability Pillar.
Release Notes:
# v0.0.8
1. Added following compliance checks:
- DbInstanceBackupEnabled
- S3BucketDefaultLockEnabled
- LambdaInsideVpc
- GuarddutyEnabledCentralized
- DynamodbPitrEnabled
- RedshiftClusterMaintenancesettingsCheck
2. Minor bug fixes
# v0.0.7
1. Added following compliance checks:
- Elastic Search In VPC only
- ELB deletion protection enabled
- Instance in VPC
- Lambda dlq check
# v0.0.4
1. Added Description column on Compliance Data set
2. Added method for S3 Bucket replication compliance
3. Fixes bugs
| PypiClean |
/COMPAS-1.17.5.tar.gz/COMPAS-1.17.5/src/compas/colors/color.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
basestring
except NameError:
basestring = str
import colorsys
import re
from compas.colors.html_colors import HTML_TO_RGB255
from compas.data import Data
BASE16 = "0123456789abcdef"
try:
HEX_DEC = {v: int(v, base=16) for v in [x + y for x in BASE16 for y in BASE16]}
except Exception:
HEX_DEC = {v: int(v, 16) for v in [x + y for x in BASE16 for y in BASE16]}
class ColorError(Exception):
"""Raise if color input is not a color."""
class Color(Data):
"""Class for working with colors.
Parameters
----------
red : float
The red component in the range of 0-1.
green : float
The green component in the range of 0-1.
blue : float
The blue component in the range of 0-1.
alpha : float, optional
Transparency setting.
If ``alpha = 0.0``, the color is fully transparent.
If ``alpha = 1.0``, the color is fully opaque.
Other Parameters
----------------
**kwargs : dict, optional
See :class:`Data` for more information.
Attributes
----------
r : float
Red component of the color in RGB1 color space.
g : float
Green component of the color in RGB1 color space.
b : float
Blue component of the color in RGB1 color space.
rgb : tuple[float, float, float]
RGB1 color tuple, with components in the range 0-1.
rgb255 : tuple[int, int, int]
RGB255 color tuple, with components in the range 0-255.
hex : str
Hexadecimal color string.
hls : tuple[float, float, float]
Hue, Lightness, Saturation.
hsv : tuple[float, float, float]
Hue, Saturation, Value / Brightness.
lightness : float
How much white the color appears to contain.
This is the "Lightness" in HLS.
Making a color "lighter" is like adding more white.
brightness : float
How well-lit the color appears to be.
This is the "Value" in HSV.
Making a color "brighter" is like shining a stronger light on it, or illuminating it better.
is_light : bool
If True, the color is considered light.
Examples
--------
>>> Color(1, 0, 0)
Color(1.0, 0.0, 0.0, 1.0)
>>> Color.red()
Color(1.0, 0.0, 0.0, 1.0)
>>> Color(1, 0, 0) == Color.red()
True
>>> Color.magenta()
Color(1.0, 0.0, 1.0, 1.0)
>>> Color.lime()
Color(0.5, 1.0, 0.0, 1.0)
>>> Color.navy()
Color(0.0, 0.0, 0.5, 1.0)
>>> Color.olive()
Color(0.5, 0.5, 0.0, 1.0)
>>> Color.lime().is_light
True
>>> Color.navy().is_light
False
"""
def __init__(self, red, green, blue, alpha=1.0, **kwargs):
super(Color, self).__init__(**kwargs)
self._r = 1.0
self._g = 1.0
self._b = 1.0
self._a = 1.0
self.r = red
self.g = green
self.b = blue
self.a = alpha
# --------------------------------------------------------------------------
# data
# --------------------------------------------------------------------------
@property
def data(self):
return {"red": self.r, "green": self.g, "blue": self.b, "alpha": self.a}
@data.setter
def data(self, data):
self.r = data["red"]
self.g = data["green"]
self.b = data["blue"]
self.a = data["alpha"]
@classmethod
def from_data(cls, data):
return cls(data["red"], data["green"], data["blue"], data["alpha"])
# --------------------------------------------------------------------------
# properties
# --------------------------------------------------------------------------
@property
def r(self):
return self._r
@r.setter
def r(self, red):
if red > 1.0 or red < 0.0:
raise ValueError("Components of an RGBA color should be in the range 0-1.")
self._r = float(red)
@property
def g(self):
return self._g
@g.setter
def g(self, green):
if green > 1.0 or green < 0.0:
raise ValueError("Components of an RGBA color should be in the range 0-1.")
self._g = float(green)
@property
def b(self):
return self._b
@b.setter
def b(self, blue):
if blue > 1.0 or blue < 0.0:
raise ValueError("Components of an RGBA color should be in the range 0-1.")
self._b = float(blue)
@property
def a(self):
return self._a
@a.setter
def a(self, alpha):
if alpha > 1.0 or alpha < 0.0:
raise ValueError("Components of an RGBA color should be in the range 0-1.")
self._a = float(alpha)
@property
def rgb(self):
r = self.r
g = self.g
b = self.b
return r, g, b
@property
def rgb255(self):
r = int(self.r * 255)
g = int(self.g * 255)
b = int(self.b * 255)
return r, g, b
@property
def rgba(self):
r, g, b = self.rgb
a = self.a
return r, g, b, a
@property
def rgba255(self):
r, g, b = self.rgb255
a = int(self.a * 255)
return r, g, b, a
@property
def hex(self):
return "#{0:02x}{1:02x}{2:02x}".format(*self.rgb255)
@property
def hls(self):
return colorsys.rgb_to_hls(*self.rgb)
@property
def hsv(self):
return colorsys.rgb_to_hsv(*self.rgb)
@property
def lightness(self):
return self.hls[1]
@property
def brightness(self):
return self.hsv[2]
@property
def is_light(self):
return self.luminance > 0.179
@property
def yuv(self):
"""tuple[float, float, float] :
Luma and chroma components, with chroma defined by the blue and red projections.
"""
y = self.luma
u, v = self.chroma
return y, u, v
@property
def luma(self):
"""float :
The brightness of a yuv signal.
"""
return 0.299 * self.r + 0.587 * self.g + 0.114 * self.b
@property
def chroma(self):
"""tuple[float, float] :
The color of a yuv signal.
"How different from a grey of the same lightness the color appears to be."
"""
y = self.luma
u = 0.492 * (self.b - y)
v = 0.877 * (self.r - y)
return u, v
@property
def luminance(self):
"""float :
The amount of light that passes through, is emitted from, or is reflected from a particular area.
Here, it expresses the preceived brightness of the color.
Note that this is not the same as the "Lightness" of HLS or the "Value/Brightness" of HSV.
"""
return 0.2126 * self.r + 0.7152 * self.g + 0.0722 * self.b
@property
def saturation(self):
"""float : The perceived freedom of whiteness."""
maxval = max(self.r, self.g, self.b)
minval = min(self.r, self.g, self.b)
return (maxval - minval) / maxval
# --------------------------------------------------------------------------
# descriptor
# --------------------------------------------------------------------------
def __set_name__(self, owner, name):
self.public_name = name
self.private_name = "_" + name
def __get__(self, obj, otype=None):
return getattr(obj, self.private_name, None) or self
def __set__(self, obj, value):
if not obj:
return
if not value:
return
if Color.is_rgb255(value):
value = Color.from_rgb255(value[0], value[1], value[2])
elif Color.is_hex(value):
value = Color.from_hex(value)
else:
value = Color(value[0], value[1], value[2])
setattr(obj, self.private_name, value)
# --------------------------------------------------------------------------
# customization
# --------------------------------------------------------------------------
def __repr__(self):
return "Color({}, {}, {}, {})".format(self.r, self.g, self.b, self.a)
def __getitem__(self, key):
if key == 0:
return self.r
if key == 1:
return self.g
if key == 2:
return self.b
raise KeyError
def __len__(self):
return 3
def __iter__(self):
return iter(self.rgb)
def __eq__(self, other):
return all(a == b for a, b in zip(self, other))
# --------------------------------------------------------------------------
# constructors
# --------------------------------------------------------------------------
@classmethod
def from_rgb255(cls, r, g, b):
"""Construct a color from RGB255 components.
Parameters
----------
r : int & valuerange[0, 255]
Red component.
g : int & valuerange[0, 255]
Green component.
b : int & valuerange[0, 255]
Blue component.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(r / 255, g / 255, b / 255)
@classmethod
def from_hls(cls, h, l, s): # noqa: E741
"""Construct a color from Hue, Luminance, and Saturation.
Parameters
----------
h : float
Hue.
l : float
Luminance.
s : float
Saturation.
Returns
-------
:class:`~compas.colors.Color`
See Also
--------
https://en.wikipedia.org/wiki/HSL_and_HSV
"""
r, g, b = colorsys.hls_to_rgb(h, l, s)
return cls(r, g, b)
@classmethod
def from_hsv(cls, h, s, v):
"""Construct a color from Hue, Saturation, and Value.
Parameters
----------
h : float
Hue.
s : float
Saturation.
v : float
Value.
Returns
-------
:class:`~compas.colors.Color`
See Also
--------
https://en.wikipedia.org/wiki/HSL_and_HSV
"""
r, g, b = colorsys.hsv_to_rgb(h, s, v)
return cls(r, g, b)
@classmethod
def from_yiq(cls, y, i, q):
"""Construct a color from components in the YIQ color space.
Parameters
----------
y : float
Luma.
i : float
Orange-blue chroma.
q : float
Purple-green chroma.
Returns
-------
:class:`~compas.colors.Color`
See Also
--------
https://en.wikipedia.org/wiki/YIQ
"""
r, g, b = colorsys.yiq_to_rgb(y, i, q)
return cls(r, g, b)
@classmethod
def from_yuv(cls, y, u, v):
"""Construct a color from components in the YUV color space.
Parameters
----------
y : float
Luma.
u : float
Blue projection chroma.
v : float
Red projection chroma.
Returns
-------
:class:`~compas.colors.Color`
See Also
--------
https://en.wikipedia.org/wiki/YUV
"""
r = y + 1.140 * v
g = y - 0.395 * u - 0.581 * v
b = y + 2.032 * u
return cls(r, g, b)
@classmethod
def from_i(cls, i):
"""Construct a color from a single number in the range 0-1.
Parameters
----------
i : float
Number in the range 0-1, representing the color.
Returns
-------
:class:`~compas.colors.Color`
"""
if i == 0.0:
r, g, b = 0, 0, 255
elif 0.0 < i < 0.25:
r, g, b = 0, int(255 * (4 * i)), 255
elif i == 0.25:
r, g, b = 0, 255, 255
elif 0.25 < i < 0.5:
r, g, b = 0, 255, int(255 - 255 * 4 * (i - 0.25))
elif i == 0.5:
r, g, b = 0, 255, 0
elif 0.5 < i < 0.75:
r, g, b = int(0 + 255 * 4 * (i - 0.5)), 255, 0
elif i == 0.75:
r, g, b = 255, 255, 0
elif 0.75 < i < 1.0:
r, g, b, = (
255,
int(255 - 255 * 4 * (i - 0.75)),
0,
)
elif i == 1.0:
r, g, b = 255, 0, 0
else:
r, g, b = 0, 0, 0
return cls(r / 255.0, g / 255.0, b / 255.0)
@classmethod
def from_hex(cls, value):
"""Construct a color from a hexadecimal color value.
Parameters
----------
value : str
The hexadecimal color.
Returns
-------
:class:`~compas.colors.Color`
"""
value = value.lstrip("#").lower()
r = HEX_DEC[value[0:2]]
g = HEX_DEC[value[2:4]]
b = HEX_DEC[value[4:6]]
return cls(r / 255.0, g / 255.0, b / 255.0)
@classmethod
def from_name(cls, name):
"""Construct a color from a name in the extended color table of HTML/CSS/SVG.
Parameters
----------
name : str
The color name. The name is case-insensitive.
Returns
-------
:class:`~compas.colors.Color`
See Also
--------
https://www.w3.org/TR/css-color-3/#svg-color
"""
rgb255 = HTML_TO_RGB255.get(name.lower())
if rgb255 is None:
raise ValueError("Color name not found.")
return cls.from_rgb255(*rgb255)
# --------------------------------------------------------------------------
# presets
# --------------------------------------------------------------------------
@classmethod
def white(cls):
"""Construct the color white.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 1.0, 1.0)
@classmethod
def black(cls):
"""Construct the color black.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 0.0, 0.0)
@classmethod
def grey(cls):
"""Construct the color grey.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.5, 0.5)
@classmethod
def red(cls):
"""Construct the color red.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 0.0, 0.0)
@classmethod
def orange(cls):
"""Construct the color orange.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 0.5, 0.0)
@classmethod
def yellow(cls):
"""Construct the color yellow.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 1.0, 0.0)
@classmethod
def lime(cls):
"""Construct the color lime (or chartreuse green).
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 1.0, 0.0)
@classmethod
def green(cls):
"""Construct the color green.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 1.0, 0.0)
@classmethod
def mint(cls):
"""Construct the color mint (or spring green).
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 1.0, 0.5)
@classmethod
def cyan(cls):
"""Construct the color cyan.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 1.0, 1.0)
@classmethod
def azure(cls):
"""Construct the color azure.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 0.5, 1.0)
@classmethod
def blue(cls):
"""Construct the color blue.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 0.0, 1.0)
@classmethod
def violet(cls):
"""Construct the color violet.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.0, 1.0)
@classmethod
def magenta(cls):
"""Construct the color magenta.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 0.0, 1.0)
@classmethod
def pink(cls):
"""Construct the color pink.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(1.0, 0.0, 0.5)
# --------------------------------------------------------------------------
# other presets
# --------------------------------------------------------------------------
@classmethod
def maroon(cls):
"""Construct the color maroon.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.0, 0.0)
@classmethod
def brown(cls):
"""Construct the color brown.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.25, 0.0)
@classmethod
def olive(cls):
"""Construct the color olive.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.5, 0.0)
@classmethod
def teal(cls):
"""Construct the color teal.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 0.5, 0.5)
@classmethod
def navy(cls):
"""Construct the color navy.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.0, 0.0, 0.5)
@classmethod
def purple(cls):
"""Construct the color purple.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.5, 0.0, 0.5)
@classmethod
def silver(cls):
"""Construct the color silver.
Returns
-------
:class:`~compas.colors.Color`
"""
return cls(0.75, 0.75, 0.75)
# ochre
# beige
# bordeaux
# hotpink
# steel
# midnight
# salmon
# --------------------------------------------------------------------------
# methods
# --------------------------------------------------------------------------
@staticmethod
def coerce(color):
"""Coerce a color input into a color.
Parameters
----------
color : str | tuple[int, int, int] | tuple[float, float, float] | :class:`~compas.colors.Color`
The color input.
Returns
-------
:class:`~compas.colors.Color` | None
Raises
------
ColorError
"""
if not color:
return
if Color.is_rgb255(color):
return Color.from_rgb255(*list(color))
if Color.is_hex(color):
return Color.from_hex(color)
if Color.is_rgb1(color):
return Color(*list(color))
raise ColorError
@staticmethod
def is_rgb1(color):
"""Verify that the color is in the RGB 1 color space.
Returns
-------
bool
"""
return color and all(isinstance(c, float) and (c >= 0 and c <= 1) for c in color)
@staticmethod
def is_rgb255(color):
"""Verify that the color is in the RGB 255 color space.
Returns
-------
bool
"""
return color and all(isinstance(c, int) and (c >= 0 and c <= 255) for c in color)
@staticmethod
def is_hex(color):
"""Verify that the color is in hexadecimal format.
Returns
-------
bool
"""
if isinstance(color, basestring):
match = re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", color)
if match:
return True
return False
return False
def lighten(self, factor=10):
"""Lighten the color.
Parameters
----------
factor : float, optional
Percentage of lightness increase.
Returns
-------
None
Raises
------
ValueError
If the percentage of lightness increase is not in the range 0-100.
"""
if factor > 100 or factor < 0:
raise ValueError("Percentage of increased lightness should be in the range 0-100.")
factor = 1.0 + factor / 100
h, l, s = self.hls
r, g, b = colorsys.hls_to_rgb(h, min(1.0, l * factor), s)
self.r = r
self.g = g
self.b = b
def lightened(self, factor=10):
"""Return a lightened copy of the color.
Parameters
----------
factor : float, optional
Percentage of lightness increase.
Returns
-------
:class:`~compas.colors.Color`
Raises
------
ValueError
If the percentage of lightness increase is not in the range 0-100.
"""
color = self.copy()
color.lighten(factor=factor)
return color
def darken(self, factor=10):
"""Darken the color.
Parameters
----------
factor : float, optional
Percentage of lightness reduction.
Returns
-------
None
Raises
------
ValueError
If the percentage of lightness reduction is not in the range 0-100.
"""
if factor > 100 or factor < 0:
raise ValueError("Percentage of reduced lightness should be in the range 0-100.")
factor = 1.0 - factor / 100
h, l, s = self.hls
r, g, b = colorsys.hls_to_rgb(h, max(0.0, l * factor), s)
self.r = r
self.g = g
self.b = b
def darkened(self, factor=10):
"""Return a darkened copy of the color.
Parameters
----------
factor : float, optional
Percentage of lightness reduction.
Returns
-------
:class:`~compas.colors.Color`
Raises
------
ValueError
If the percentage of lightness reduction is not in the range 0-100.
"""
color = self.copy()
color.darken(factor=factor)
return color
def invert(self):
"""Invert the current color wrt to the RGB color circle.
Returns
-------
None
"""
self.r = 1.0 - self.r
self.g = 1.0 - self.g
self.b = 1.0 - self.b
def inverted(self):
"""Return an inverted copy of the color.
Returns
-------
:class:`~compas.colors.Color`
"""
color = self.copy()
color.invert()
return color
def saturate(self, factor=10):
"""Saturate the color by a given percentage.
Parameters
----------
factor : float, optional
Percentage of saturation increase.
Returns
-------
None
Raises
------
ValueError
If the percentage of saturation is not in the range 0-100.
"""
if factor > 100 or factor < 0:
raise ValueError("Percentage of saturation should be in the range 0-100.")
factor = 1.0 + factor / 100
h, l, s = self.hls
r, g, b = colorsys.hls_to_rgb(h, l, min(1.0, s * factor))
self.r = r
self.g = g
self.b = b
def saturated(self, factor=10):
"""Return a saturated copy of the color.
Parameters
----------
factor : float, optional
Percentage of saturation increase.
Returns
-------
:class:`~compas.colors.Color`
Raises
------
ValueError
If the percentage of desaturation is not in the range 0-100.
"""
color = self.copy()
color.saturate(factor=factor)
return color
def desaturate(self, factor=10):
"""Desaturate the color by a given percentage.
Parameters
----------
factor : float, optional
Percentage of saturation reduction.
Returns
-------
None
Raises
------
ValueError
If the percentage of desaturation is not in the range 0-100.
"""
if factor > 100 or factor < 0:
raise ValueError("Percentage of desaturation should be in the range 0-100.")
factor = 1.0 - factor / 100
h, l, s = self.hls
r, g, b = colorsys.hls_to_rgb(h, l, max(0.0, s * factor))
self.r = r
self.g = g
self.b = b
def desaturated(self, factor=10):
"""Return a desaturated copy of the color.
Parameters
----------
factor : float, optional
Percentage of saturation reduction.
Returns
-------
:class:`~compas.colors.Color`
Raises
------
ValueError
If the percentage of desaturation is not in the range 0-100.
"""
color = self.copy()
color.desaturate(factor=factor)
return color | PypiClean |
/ConfigFramework-4.0.2.tar.gz/ConfigFramework-4.0.2/config_framework/loaders/toml_read_only.py | import tomllib as toml_loader_lib
from functools import partial
from os import PathLike
from pathlib import Path
from typing import Union, Optional, MutableMapping, Any, Callable, Dict
from config_framework.types.abstract import AbstractLoader
class TomlReadOnly(AbstractLoader):
path: Union[PathLike, Path]
encoding: str
toml_loader: Callable
toml_dumper: Callable
def __init__(
self, data: MutableMapping[str, Any],
defaults: MutableMapping[str, Any],
path: Union[PathLike, Path],
encoding: str,
toml_loader: Callable,
toml_dumper: Callable
):
super().__init__(data, defaults)
self.path = path
self.encoding = encoding
setattr(self, "toml_loader", toml_loader)
setattr(self, "toml_dumper", toml_dumper)
@classmethod
def load(
cls, path: Union[PathLike, Path],
defaults: Optional[MutableMapping[str, Any]] = None,
loader_kwargs: Optional[Dict[Any, Any]] = None,
dumper_kwargs: Optional[Dict[Any, Any]] = None,
encoding: str = "utf8",
):
"""
Initializes loader for read only toml.
:param path: path that is used to load config.
:param defaults: default values.
:param loader_kwargs: used for specifying parameters, according to tomllib documentation of `tomllib.load`
function.
:param dumper_kwargs: not used.
:param encoding: which encoding should be used for a file.
:return: instance of TomlReadOnly class.
"""
with open(path, encoding=encoding, mode="b") as data_f:
data = toml_loader_lib.load(data_f)
if loader_kwargs is None:
loader_kwargs = dict()
return cls(
data=data, defaults=defaults or {},
path=path, encoding=encoding,
toml_loader=partial(toml_loader_lib.load, **loader_kwargs),
toml_dumper=lambda *args, **kwargs: None # it doesn't work for this class
)
def dump(self, include_defaults: bool = False) -> None:
raise RuntimeError(
"You don't have dependency installed to write to toml files."
) | PypiClean |
/HydDown-0.16.2.tar.gz/HydDown-0.16.2/scripts/streamlit_app.py |
import streamlit as st
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from PIL import Image
import base64
try:
from hyddown import HydDown
except:
import sys
import os
hyddown_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),"..","src")
sys.path.append(os.path.abspath(hyddown_path))
from hyddown import HydDown
def get_table_download_link(df,filename):
"""
Generates a link allowing the data in a given panda dataframe to be downloaded
in: dataframe
out: href string
"""
csv = df.to_csv(index=False)
b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here
filename=filename+'.csv'
return f'<a href="data:application/octet-stream;base64,{b64}" download={filename}>Download csv file</a>'
def read_input():
sideb = st.sidebar
with sideb:
try:
image_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),"..","docs","img","Sketch.png")
icon = Image.open(image_path)
st.image(icon, use_column_width=True, caption="HydDown")
except:
pass
with st.form(key='my_form'):
submit_button = st.form_submit_button(label='Run calculation')
heattran = st.checkbox("Include heat transfer",value=True)
c1,c2 = st.columns(2)
with c2:
length = st.text_input('Vessel length (m):',0.463)
diam = st.text_input('Vessel diam (m):',0.254)
thk = st.text_input('Vessel thichness (m):',0.016)
orientation = st.selectbox('Vessel orientation', ('horizontal', 'vertical'))
orifice_diam = st.text_input('Orifice diam (mm):',0.40)
orifice_diam = float(orifice_diam)/1000
tstep = st.text_input('Time step (s):',1.0)
with c1:
pres = st.text_input('Initial pressure (bar):', 50.)
pres = float(pres)*1e5
back_pressure = st.text_input('Fill/back pres. (bar):',240)
back_pressure= float(back_pressure)*1e5
fluid = st.selectbox('Select fluid', ('H2', 'He', 'N2', 'air', 'CH4'))
mode = st.selectbox('Select mode', ('filling', 'discharge'))
temp = st.text_input('Initial temp. (C):',25)
temp = float(temp)+273.15
end_time = st.text_input('End time (s):',240)
density = st.text_input('Vessel material density (kg/m3):',7740)
density= float(density)
cp = st.text_input('Vessel material heat capacity (J/kg K):',470)
cp= float(cp)
input={}
input['calculation'] = {}
input['vessel'] = {}
input['initial'] = {}
input['valve'] = {}
input['heat_transfer'] = {}
input['calculation']['type'] = 'energybalance'
input['calculation']['time_step'] = float(tstep)
input['calculation']['end_time'] = float(end_time)
input['vessel']['length'] = float(length)
input['vessel']['diameter'] = float(diam)
input['vessel']['heat_capacity']=cp
input['vessel']['density']=density
input['vessel']['orientation']=orientation
input['vessel']['thickness']=float(thk)
input['initial']['pressure'] = pres
input['initial']['temperature'] = temp
input['initial']['fluid'] = fluid
input['valve']['flow'] = mode
input['valve']['type'] = 'orifice'
input['valve']['diameter'] = float(orifice_diam)
input['valve']['discharge_coef'] = 0.84
input['valve']['back_pressure'] = back_pressure
#input['valve']['end_pressure']=end_pressure
input['heat_transfer']['type']='specified_h'
input['heat_transfer']['temp_ambient']=298
input['heat_transfer']['h_outer']=5
if heattran == True:
input['heat_transfer']['h_inner']='calc'
else:
input['heat_transfer']['h_inner']=0.0
input['heat_transfer']['D_throat']=float(diam)
return input
if __name__ == "__main__":
#matplotlib.use('TkAgg')
st.set_page_config(layout='wide')
input = read_input()
hdown=HydDown(input)
with st.spinner('Calculating, please wait....'):
hdown.run(disable_pbar=True)
st.title('HydDown rigorous demo')
st.subheader(r'https://github.com/andr1976/HydDown')
my_expander = st.expander("Description")
my_expander.write('Real gas vessel pressurisation/depressurisation with heat transfer from gas to vessel and ambient and vice versa. Orifice size (Cd = 0.84) is specified for desired pressurisation/depressurisation rate.')
my_expander.write('For more information about the calculations and validation of the code please refer to the [manual](https://github.com/andr1976/HydDown/raw/main/docs/MANUAL.pdf)')
df=hdown.get_dataframe()
file_name=st.text_input('Filename for saving data:','saved_data')
st.markdown(get_table_download_link(df,file_name), unsafe_allow_html=True)
col1, col2= st.columns(2)
if input['valve']['flow']=='discharge':
temp_data = pd.DataFrame({'Time (s)': hdown.time_array, 'Fluid temperature (C)': hdown.T_fluid-273.15, 'Wall temperature (C)': hdown.T_vessel-273.15, 'Vent temperature (C)': hdown.T_vent-273.15})
else:
temp_data = pd.DataFrame({'Time (s)': hdown.time_array, 'Fluid temperature (C)': hdown.T_fluid-273.15, 'Wall temperature (C)': hdown.T_vessel-273.15})
pres_data = pd.DataFrame({'Time (s)': hdown.time_array, 'Pressure (bar)': hdown.P/1e5})
col1.line_chart(pres_data.rename(columns={'Time (s)':'index'}).set_index('index'))
col1.text('Time (s)')
col2.line_chart(temp_data.rename(columns={'Time (s)':'index'}).set_index('index'))
col2.text('Time (s)')
mdot_data = pd.DataFrame({'Time (s)': hdown.time_array, 'Mass rate (kg/s)': hdown.mass_rate})
mass_data = pd.DataFrame({'Time (s)': hdown.time_array, 'Fluid inventory (kg)': hdown.mass_fluid})
col1.line_chart(mdot_data.rename(columns={'Time (s)':'index'}).set_index('index'))
col1.text('Time (s)')
col2.line_chart(mass_data.rename(columns={'Time (s)':'index'}).set_index('index'))
col2.text('Time (s)') | PypiClean |
/FP-SMC-ALS-test1-0.0.1.tar.gz/FP-SMC-ALS-test1-0.0.1/smc/vpn/policy.py | from smc.base.model import Element, ElementCreator, SubElement, ElementRef
from smc.api.exceptions import (
CreatePolicyFailed,
CreateElementFailed,
PolicyCommandFailed,
ElementNotFound,
)
from smc.base.collection import sub_collection
from smc.vpn.elements import VPNProfile, VPNSite
from smc.base.decorators import cached_property
from smc.base.util import element_resolver
from smc.core.engine import InternalEndpoint
class PolicyVPN(Element):
"""
Create a new VPN Policy.
::
>>> PolicyVPN.create(name='myvpn')
PolicyVPN(name=myvpn)
>>> v = PolicyVPN('myvpn')
>>> print(v.vpn_profile)
VPNProfile(name=VPN-A Suite)
When making VPN Policy modifications, you must first call :func:`open`,
make your modifications and then call :func:`save` followed by
:func:`close`.
:ivar VPNProfile vpn_profile: VPN Profile used by this Policy VPN
"""
typeof = "vpn"
vpn_profile = ElementRef("vpn_profile")
@classmethod
def create(cls, name, nat=False, mobile_vpn_toplogy_mode=None, vpn_profile=None):
"""
Create a new policy based VPN
:param name: name of vpn policy
:param bool nat: whether to apply NAT to the VPN (default False)
:param mobile_vpn_toplogy_mode: whether to allow remote vpn
:param VPNProfile vpn_profile: reference to VPN profile, or uses default
:rtype: PolicyVPN
"""
vpn_profile = element_resolver(vpn_profile) or VPNProfile("VPN-A Suite").href
json = {
"mobile_vpn_topology_mode": mobile_vpn_toplogy_mode,
"name": name,
"nat": nat,
"vpn_profile": vpn_profile,
}
try:
return ElementCreator(cls, json)
except CreateElementFailed as err:
raise CreatePolicyFailed(err)
@property
def nat(self):
"""
Is NAT enabled on this vpn policy
:return: NAT enabled
:rtype: bool
"""
return self.data.get("nat")
def enable_disable_nat(self):
"""
Enable or disable NAT on this policy. If NAT is disabled, it
will be enabled and vice versa.
:return: None
"""
if self.nat:
self.data["nat"] = False
else:
self.data["nat"] = True
@property
def central_gateway_node(self):
"""
Central Gateway Node acts as the hub of a hub-spoke VPN.
:rtype: SubElementCollection(GatewayNode)
"""
return sub_collection(
self.get_relation("central_gateway_node"),
type("CentralGatewayNode", (GatewayNode,), {}),
)
@property
def satellite_gateway_node(self):
"""
Node level settings for configured satellite gateways
:rtype: SubElementCollection(GatewayNode)
"""
return sub_collection(
self.get_relation("satellite_gateway_node"),
type("SatelliteGatewayNode", (GatewayNode,), {}),
)
@property
def mobile_gateway_node(self):
"""
Mobile Gateway's are represented by client endpoints connecting
to the policy based VPN.
:rtype: SubElementCollection(GatewayNode)
"""
return sub_collection(
self.get_relation("mobile_gateway_node"), type("MobileGatewayNode", (GatewayNode,), {})
)
@property
def tunnels(self):
"""
Return all tunnels for this VPN. A tunnel is defined as two end
points within the VPN topology. Endpoints are automatically
configureed based on whether they are a central gateway or
satellite gateway. This provides access to enabling/disabling
and setting the preshared key for the linked endpoints.
List all tunnel mappings for this policy vpn::
for tunnel in policy.tunnels:
tunnela = tunnel.tunnel_side_a
tunnelb = tunnel.tunnel_side_b
print(tunnela.gateway)
print(tunnelb.gateway)
:rtype: SubElementCollection(GatewayTunnel)
"""
return sub_collection(self.get_relation("gateway_tunnel"), GatewayTunnel)
def open(self):
"""
Open the policy for editing. This is only a valid method for
SMC version <= 6.1
:raises PolicyCommandFailed: couldn't open policy with reason
:return: None
"""
self.make_request(PolicyCommandFailed, method="create", resource="open")
def save(self):
"""
Save the policy after editing. This is only a valid method for
SMC version <= 6.1
:raises PolicyCommandFailed: save failed with reason
:return: None
"""
self.make_request(PolicyCommandFailed, method="create", resource="save")
self._del_cache()
def close(self):
"""
Close the policy. This is only a valid method for
SMC version <= 6.1
:raises PolicyCommandFailed: close failed with reason
:return: None
"""
self.make_request(PolicyCommandFailed, method="create", resource="close")
def validate(self):
"""
Return a validation string from the SMC after running validate on
this VPN policy.
:return: status as dict
:rtype: dict
"""
return self.make_request(resource="validate")
@property
def mobile_vpn_topology(self):
"""
Is the policy VPN configured for mobile VPN gateways.
Valid modes: 'Selected Gateways below', 'Only central Gateways from overall topology',
'All Gateways from overall topology', 'None'
"""
return self.data.get("mobile_vpn_topology_mode")
def add_mobile_gateway(self, all_central_gateways=False, all_gateways=False, gateways=None):
"""
Add a mobile VPN gateway to this policy VPN. You can select all central
gateways, all gateways in overall topology or specify a list of gateways
to allow for mobile VPN.
Example of adding or removing a mobile VPN gateway::
policy_vpn = PolicyVPN('myvpn')
policy_vpn.update(mobile_vpn_topology_mode='Selected Gateways below')
policy_vpn.open()
policy_vpn.add_mobile_vpn_gateway(gateways=Engine('azure'))
policy_vpn.save()
policy_vpn.close()
:param Engine,ExternalGateway gateway: An external gateway, engine or
href for the mobile gateway
:raises PolicyCommandFailed: could not add gateway
:rtype: None
"""
if all_central_gateways:
self.update(mobile_vpn_topology_mode="Only central Gateways from overall topology")
elif all_gateways:
self.update(mobile_vpn_topology_mode="All Gateways from overall topology")
if gateways and self.mobile_vpn_topology != "Selected Gateways below":
raise PolicyCommandFailed(
"You must first update the policy VPN with "
"the Selected Gateways below setting before adding members"
)
if gateways:
try:
gateway = gateways.vpn.internal_gateway.href # Engine
except AttributeError:
raise PolicyCommandFailed(
"VPN endpoint does not appear to " "be a managed engine: %s" % gateways
)
self.make_request(
PolicyCommandFailed,
method="create",
resource="mobile_gateway_node",
json={"gateway": gateway, "node_usage": "mobile"},
)
def add_central_gateway(self, gateway):
"""
Add SMC managed internal gateway to the Central Gateways of this VPN
:param Engine,ExternalGateway gateway: An external gateway, engine or
href for the central gateway
:raises PolicyCommandFailed: could not add gateway
:return: None
"""
try:
gateway = gateway.vpn.internal_gateway.href
except AttributeError:
gateway = element_resolver(gateway)
self.make_request(
PolicyCommandFailed,
method="create",
resource="central_gateway_node",
json={"gateway": gateway, "node_usage": "central"},
)
def add_satellite_gateway(self, gateway):
"""
Add gateway node as a satellite gateway for this VPN. You must first
have the gateway object created. This is typically used when you either
want a hub-and-spoke topology or the test_external gateway is a non-SMC
managed device.
:param Engine,ExternalGateway gateway: An external gateway, engine or
href for the central gateway
:raises PolicyCommandFailed: could not add gateway
:return: None
"""
try:
gateway = gateway.vpn.internal_gateway.href
except AttributeError:
gateway = element_resolver(gateway)
self.make_request(
PolicyCommandFailed,
method="create",
resource="satellite_gateway_node",
json={"gateway": gateway, "node_usage": "satellite"},
)
@staticmethod
def add_internal_gateway_to_vpn(internal_gateway_href, vpn_policy, vpn_role="central"):
"""
Add an internal gateway (managed engine node) to a VPN policy
based on the internal gateway href.
:param str internal_gateway_href: href for engine internal gw
:param str vpn_policy: name of vpn policy
:param str vpn_role: central|satellite
:return: True for success
:rtype: bool
"""
try:
vpn = PolicyVPN(vpn_policy)
vpn.open()
if vpn_role == "central":
vpn.add_central_gateway(internal_gateway_href)
else:
vpn.add_satellite_gateway(internal_gateway_href)
vpn.save()
vpn.close()
except ElementNotFound:
return False
return True
class GatewayNode(SubElement):
"""
Top level VPN gateway node operations. A gateway node is characterized
by a Central Gateway, Satellite Gateway or Mobile Gateway node.
This template class will return these as a collection. Gateway Node
references need to be obtained from a VPN Policy reference::
>>> vpn = PolicyVPN('sg_vm_vpn')
>>> vpn.open()
>>> for gw in vpn.central_gateway_node.all():
... list(gw.enabled_sites)
...
[GatewayTreeNode(name=Automatic Site for sg_vm_vpn)]
>>> vpn.close()
"""
@cached_property
def gateway(self):
"""
The VPN gateway for this node. This is either an internal gateway
or an external gateway
:return: the VPN gateway
:rtype: Element
"""
return Element.from_href(self.data["gateway"])
@property
def name(self):
"""
Get the name from the gateway_profile reference
"""
return self.gateway.name
@property
def enabled_sites(self):
"""
Return a collection of VPN Site elements that are enabled
for this VPN gateway.
:rtype: SubElementCollection(VPNSite)
"""
return sub_collection(self.get_relation("enabled_vpn_site"), GatewayTreeNode)
@property
def disabled_sites(self):
"""
Return a collection of VPN Site elements that are disabled
for this VPN gateway.
:rtype: SubElementCollection(VPNSite)
"""
return sub_collection(self.get_relation("disabled_vpn_site"), GatewayTreeNode)
class GatewayTreeNode(SubElement):
"""
Gateway Tree node is a list of VPN Site elements returned when retrieving
a VPN policies enabled or disabled site list. These provide an
enable_disable link to the VPN site.
::
for gw in policy.central_gateway_node.all():
for site in list(gw.enabled_sites):
site.enable_disable()
"""
@property
def name(self):
return self.vpn_site.name
"""
A gateway tree node is a VPN site within either the central or
satellite gateway configuration within a VPN.
"""
def enable_disable(self):
"""
Enable or disable this VPN Site from within the VPN policy context.
:raises PolicyCommandFailed: enabling or disabling failed
:return: None
"""
self.make_request(PolicyCommandFailed, method="delete", resource="self")
@property
def vpn_site(self):
"""
The VPN Site element associated with this gateway
:return VPNSite element
:rtype: VPNSite
"""
return VPNSite(href=self.data.get("vpn_site"))
def __str__(self):
return "{0}(name={1})".format(self.__class__.__name__, self.name)
def __repr__(self):
return str(self)
class GatewayTunnel(SubElement):
"""
A gateway tunnel represents the point to point connection
between two IPSEC endpoints in a PolicyVPN configuration.
The tunnel arrangement is based on whether the nodes are placed
as a central gateway or a satellite gateway. This provides access
to see the point to point connections, whether the link is enabled,
and setting the presharred key.
.. note:: Setting the preshared key is only required if using an
ExternalGateway element as one side of the VPN. Preshared keys
are generated automatically but read only, therefore if two
gateways are internally managed by SMC, the key is generated and
shared between the gateways automatically. However for external
gateways, you must set a new key to provide the same value to
the remote gateway.
"""
def enable_disable(self):
"""
Enable or disable the tunnel link between endpoints.
:raises UpdateElementFailed: failed with reason
:return: None
"""
if self.enabled:
self.update(enabled=False)
else:
self.update(enabled=True)
@property
def enabled(self):
"""
Whether the VPN link between endpoints is enabled
:rtype: bool
"""
return self.data.get("enabled", False)
def preshared_key(self, key):
"""
Set a new preshared key for the IPSEC endpoints.
:param str key: shared secret key to use
:raises UpdateElementFailed: fail with reason
:return: None
"""
self.update(preshared_key=key)
@property
def tunnel_side_a(self):
"""
Return the gateway node for tunnel side A. This will
be an instance of GatewayNode.
:rtype: GatewayNode
"""
return type("TunnelSideA", (GatewayNode,), {"href": self.data.get("gateway_node_1")})()
@property
def tunnel_side_b(self):
"""
Return the gateway node for tunnel side B. This will
be an instance of GatewayNode.
:rtype: GatewayNode
"""
return type("TunnelSideB", (GatewayNode,), {"href": self.data.get("gateway_node_2")})()
@property
def endpoint_tunnels(self):
"""
Return all Endpoint tunnels for this gateway tunnel. A tunnel
is defined as two end points within the VPN topology.
Endpoints are automatically configureed based on whether they
are a central gateway or satellite gateway. This provides
access to enabling/disabling and setting the preshared key
for the linked endpoints. List all Endpoint tunnel mappings
for this policy vpn::
for tunnel in policy.tunnels:
tunnela = tunnel.tunnel_side_a
tunnelb = tunnel.tunnel_side_b
print(tunnela.gateway)
print(tunnelb.gateway)
for endpointtunnel in tunnel.endpoint_tunnels:
print(endpointtunnel)
:rtype: SubElementCollection(GatewayTunnel)
"""
return sub_collection(self.get_relation("gateway_endpoint_tunnel"), EndpointTunnel)
def __str__(self):
return "{0}(tunnel_side_a={1},tunnel_side_b={2})".format(
self.__class__.__name__, self.tunnel_side_a.name, self.tunnel_side_b.name
)
def __repr__(self):
return str(self)
class ClientGateway(Element):
typeof = "client_gateway"
class EndpointTunnel(SubElement):
"""
An Endpoint tunnel represents the point to point connection
between two IPSEC endpoints in a PolicyVPN configuration.
The tunnel arrangement is based on whether the nodes are placed
as a central gateway or a satellite gateway. This provides access
to see the point to point connections, whether the link is enabled,
and setting the presharred key.
"""
def enable_disable(self):
"""
Enable or disable the tunnel link between endpoints.
:raises UpdateElementFailed: failed with reason
:return: None
"""
if self.enabled:
self.update(enabled=False)
else:
self.update(enabled=True)
@property
def enabled(self):
"""
Whether the VPN link between endpoints is enabled
:rtype: bool
"""
return self.data.get("enabled", False)
@property
def internal_endpoint_side_a(self):
"""
Return the Internal Endpoint for tunnel side A. This will
be an instance of InternalEndpoint.
:rtype: InternalEndpoint
"""
return type(
"EndpointTunnelSideA", (InternalEndpoint,), {"href": self.data.get("endpoint_1")}
)()
@property
def internal_endpoint_side_b(self):
"""
Return the Internal Endpoint for tunnel side B. This will
be an instance of InternalEndpoint.
:rtype: InternalEndpoint
"""
return type(
"EndpointTunnelSideB", (InternalEndpoint,), {"href": self.data.get("endpoint_2")}
)()
def __str__(self):
return "{0}(name={1})".format(self.__class__.__name__, self.name)
def __repr__(self):
return str(self) | PypiClean |
/BEETools-4.4.0.tar.gz/BEETools-4.4.0/src/beetools/beescript.py | import configparser
import os
from pathlib import Path
import shlex
import subprocess
import sys
from termcolor import colored
from beetools.beearchiver import msg_info
from beetools import beeutils, beevenv
_PROJ_DESC = __doc__.split('\n')[0]
_PROJ_PATH = Path(__file__)
_PROJ_NAME = _PROJ_PATH.stem
_PROJ_VERSION = '3.2.0'
def exec_batch_in_session(
p_script_cmds,
p_switches=None,
p_script_name=False,
p_verbose=False,
p_shell=False,
) -> int:
"""Execute a script in the same session
Useful when commands has to be exucuted in one session for instance if
it a a virtual environment is envoked and the commands must be executed
in the virtual environment.
Parameters
----------
p_script_cmds
Commands to execute in a list
p_switches
Switches for the bash script.
Default is None.
p_script_name
Name of the script to use
Default is False and will be set to "do_bashs_cript_temp"
p_verbose
Give feedback (or not)
Default is False
p_shell
Run the script in a shell. See https://docs.python.org/3.9/library/subprocess.html#subprocess.run
Default is False
Returns
-------
subprocess.CompletedProcess
See https://docs.python.org/3.9/library/subprocess.html#subprocess.CompletedProcess
Examples
--------
# No proper doctest (<<<) because it is os dependent
tmp_test = get_tmp_dir() / 'test'
tmp_t1 = tmp_test / 't1'
cmd = ['mkdir -p {}'.format(tmp_t1), 'ls -l {}'.format(tmp_test), 'rm -R {}'.format(tmp_test)]
exec_batch_in_session(cmd)
"""
if isinstance(p_switches, list):
switches = p_switches
elif isinstance(p_switches, str):
switches = list(shlex.shlex(p_switches))
else:
switches = []
if beeutils.get_os() in [beeutils.LINUX, beeutils.MACOS]:
script = ['bash'] + switches
ext = 'sh'
contents = '#!/bin/bash\n'
elif beeutils.get_os() == beeutils.WINDOWS:
script = []
ext = 'bat'
contents = ''
else:
print(
colored(
'Unknown OS ({})\nSystem terminated!'.format(beeutils.get_os()), 'red'
)
)
sys.exit()
if not p_script_name:
p_script_name = 'exec_batch_in_session_temp'
batch_pth = beeutils.get_tmp_dir() / Path('{}.{}'.format(p_script_name, ext))
script.append(str(batch_pth))
contents += write_script(batch_pth, p_script_cmds)
if beeutils.get_os() == beeutils.MACOS:
batch_pth.chmod(0o777)
if p_verbose:
print(
msg_info(
'==[Start {0}]====\n{1}==[ End {0} ]===='.format(batch_pth, contents)
)
)
rc = exec_cmd(script, p_verbose=p_verbose, p_shell=p_shell)
if os.path.isfile(batch_pth):
os.remove(batch_pth)
return rc
def exec_batch(p_batch: list, p_verbose: bool = False) -> list:
"""Execute a batch of commands independnatly.
Each command will be executed independantly of the previous one i.e it
will be in a different session.
:param p_batch:
List of the independent commands to execute.
:param p_verbose:
Write output to console.
:return:
list
A list with the return code for each batch command.
See https://docs.python.org/3.9/library/subprocess.html#subprocess.CompletedProcess
:example:
>>> from beetools import exec_batch
>>> exec_batch([[ 'echo', 'Hello'],['echo','Goodbye']])
True
"""
rc = []
for cmd in p_batch:
rc.append(exec_cmd(cmd, p_verbose=p_verbose))
return rc
def exec_cmd(p_cmd, p_shell=None, p_verbose=True) -> int:
"""Execute a command line instruction on tools.LINUX or tools.WINDOWS
Parameters
----------
p_cmd
Command to execute. See See https://docs.python.org/3.9/library/subprocess.html#subprocess.run
p_shell
Run the script in a shell. See https://docs.python.org/3.9/library/subprocess.html#subprocess.run
Default is None
p_verbose
Give feedback (or not)
Default is False
Returns
-------
bool
If successful it returns True (subprocess.CompletedProcess = 0)
alternatively it returns a subprocess.CompletedProcess
See https://docs.python.org/3.9/library/subprocess.html#subprocess.CompletedProcess
Examples
--------
>>> from beetools import exec_cmd
>>> exec_cmd([ 'echo', 'Hello'])
True
"""
p_cmd = [str(s) for s in p_cmd]
inst_str = ' '.join(p_cmd)
if p_verbose:
print(msg_info('{}'.format(inst_str)))
if beeutils.get_os() in [beeutils.LINUX, beeutils.MACOS] and not p_shell:
shell = False
elif beeutils.get_os() == beeutils.WINDOWS and not p_shell:
shell = True
elif beeutils.get_os() not in [beeutils.WINDOWS, beeutils.LINUX, beeutils.MACOS]:
print(colored(f'Unknow OS ({beeutils.get_os()})\nSystem terminated!', 'red'))
sys.exit()
else:
shell = p_shell
try:
comp_proc = subprocess.run(
p_cmd, capture_output=False, shell=shell, check=False
)
comp_proc.check_returncode()
except subprocess.CalledProcessError:
if p_verbose:
print('\nCmd:\t{}\nrc:\t{}'.format(inst_str, comp_proc.returncode))
finally:
rc = comp_proc.returncode
return rc
def write_script(p_pth, p_contents):
"""Write a script to disk
Parameters
----------
p_pth
Path to the script
p_contents
Contents of the script
Returns
-------
subprocess.CompletedProcess
See https://docs.python.org/3.9/library/subprocess.html#subprocess.CompletedProcess
Examples
--------
>>> from beetools import beeutils, beevenv
>>> beevenv.set_up(beeutils.get_tmp_dir(),'new-project',['pip','wheel'],p_verbose=False)
True
"""
contents = ''
for line in p_contents:
if isinstance(line, list):
contents += ' '.join(line) + '\n'
else:
contents += '{}\n'.format(line)
p_pth.write_text(contents)
return contents
def example_scripting():
"""Standard example to illustrate standard use.
Parameters
----------
Returns
-------
bool
Successful execution [ b_tls.archive_path | False ]
Examples
--------
"""
success = True
# Run a few commands in a script. Useful when executing commands in a
# venv in the same session.
tmp_test = beeutils.get_tmp_dir() / 'test'
tmp_t1 = tmp_test / 'T1'
if beeutils.get_os() == beeutils.WINDOWS:
batch = [
'md {}'.format(tmp_t1),
'dir /B {}'.format(tmp_test),
]
else:
batch = [
'mkdir -p {}'.format(tmp_t1),
'ls -l {}'.format(tmp_test),
]
if exec_batch_in_session(batch, p_verbose=False) != 0:
success = False
# Execute some commands in a batch
if beeutils.get_os() == beeutils.WINDOWS:
cmds = [
['rd', '/S', '/Q', '{}'.format(tmp_t1)],
['md', '{}'.format(tmp_t1)],
['dir', '/B', '{}'.format(tmp_test)],
]
else:
cmds = [
['mkdir', '-p', '{}'.format(tmp_t1)],
['ls', '-l', '{}'.format(tmp_test)],
]
if exec_batch(cmds) != [0, 0, 0]:
success = False
# Write a script
script_pth = beeutils.get_tmp_dir() / _PROJ_NAME
cmds = [
['echo', 'Hello'],
['echo', 'Goodbye'],
]
contents = write_script(script_pth, cmds)
print(contents)
# Create a few files to the previous example and the remove the tree
t_file = tmp_test / Path('t.tmp')
t_file.touch(mode=0o666, exist_ok=True)
t_file = tmp_t1 / Path('t.tmp')
t_file.touch(mode=0o666, exist_ok=True)
success = beeutils.rm_tree(tmp_test, p_crash=True) and success
# Attempt to remove a temporary locked file.
venv_name = 'new-project'
success = (
beeutils.rm_temp_locked_file(beevenv.get_dir(beeutils.get_tmp_dir(), venv_name))
and success
)
# Read an option from an ini for a particular os and setup
cnf = configparser.ConfigParser()
cnf.read_dict(
{
'Folders': {
'windows1_MyFolderOnSystem': 'c:\\Program Files',
'windows2_MyFolderOnSystem': 'c:\\Program Files (x86)',
'linux1_MyFolderOnSystem': '/usr/local/bin',
'linux2_MyFolderOnSystem': '/bin',
'macos1_MyFolderOnSystem': '/System',
'macos2_MyFolderOnSystem': '/Application',
}
}
)
os_system_flder = beeutils.select_os_dir_from_config(
cnf, 'Folders', 'MyFolderOnSystem'
)
print(os_system_flder)
if not os_system_flder:
success = False
beeutils.result_rep(success, p_comment='Done')
return success
def do_examples(p_cls=True):
"""Example to illustrate usage
Parameters
----------
p_cls
Clear the screen before start
Default is True
Returns
-------
bool
Successful execution [ b_tls.archive_path | False ]
Examples
--------
"""
# Initiate the Archiver
success = True
b_tls = beeutils.Archiver(_PROJ_DESC, _PROJ_PATH)
b_tls.print_header(p_cls=p_cls)
success = example_scripting() and success
b_tls.print_footer()
if success:
return True
return False
if __name__ == '__main__':
do_examples()
# end __main__ | PypiClean |
/FreeClimb-4.5.0-py3-none-any.whl/freeclimb/model/update_call_request.py | import re # noqa: F401
import sys # noqa: F401
from freeclimb.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from freeclimb.exceptions import ApiAttributeError
def lazy_import():
from freeclimb.model.update_call_request_status import UpdateCallRequestStatus
globals()['UpdateCallRequestStatus'] = UpdateCallRequestStatus
class UpdateCallRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'status': (UpdateCallRequestStatus,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'status': 'status', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, status, *args, **kwargs): # noqa: E501
"""UpdateCallRequest - a model defined in OpenAPI
Args:
status (UpdateCallRequestStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, status, *args, **kwargs): # noqa: E501
"""UpdateCallRequest - a model defined in OpenAPI
Args:
status (UpdateCallRequestStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/Flappy-0.3.7.tar.gz/Flappy-0.3.7/samples/parasprites.py |
import flappy
from flappy.display import Sprite, Bitmap, BitmapData, Tilesheet
from flappy.events import Event, MouseEvent
from flappy.geom import Rectangle, Point
import math
import random
from time import time
WIDTH = 1000
HEIGHT = 600
START_PARASPRITES = 5
MAX_PARASPRITES = 1000
class ParaspritesExample(Sprite):
def __init__(self):
super(ParaspritesExample, self).__init__()
background = Bitmap(BitmapData.load('./resources/ponyville.jpg'))
background.width = WIDTH
background.height = HEIGHT
self.addChild(background)
self.tile_layer = Sprite()
self.addChild(self.tile_layer)
parasprites_bd = BitmapData.load('./resources/parasprite_sheet.png')
self.parasprites_ts = Tilesheet(parasprites_bd)
twidth = parasprites_bd.width / 4.0
theight = parasprites_bd.height / 3.0
for i in range(3):
for j in range(4):
self.parasprites_ts.addTileRect(
Rectangle(twidth * j, theight * i, twidth, theight),
Point(twidth * 0.5, theight * 0.5))
self.parasprites = [ ]
for i in range(START_PARASPRITES):
self.parasprites.append(
Parasprite(
random.uniform(WIDTH * 0.1, WIDTH * 0.9),
random.uniform(HEIGHT * 0.1, HEIGHT * 0.9),
))
self.addEventListener(Event.ENTER_FRAME, self.on_enter_frame)
self.tile_layer.addEventListener(MouseEvent.CLICK, self.on_click)
self.old_time = time()
def on_enter_frame(self, event):
new_time = time()
dt = new_time - self.old_time
tilesheet_data = []
for parasprite in self.parasprites:
parasprite.process(dt)
tilesheet_data += parasprite.get_tile_data()
self.tile_layer.graphics.clear()
self.tile_layer.graphics.drawTiles(
self.parasprites_ts, tilesheet_data,
Tilesheet.TILE_ROTATION | \
Tilesheet.TILE_SCALE | \
Tilesheet.TILE_SMOOTH)
self.old_time = new_time
def on_click(self, event):
if len(self.parasprites) <= MAX_PARASPRITES:
self.parasprites.append(Parasprite(event.stageX, event.stageY))
class Parasprite(object):
ANIM_TIME = 0.025
NFRAMES = 4
RADIUS = 25.0
def __init__(self, x, y):
self.x, self.y = x, y
self.frame_time = 0.0
self.frame = 0
self.anim_dir = 1
self.tileset = random.randint(0, 2)
self.speed = random.uniform(80.0, 250.0)
direction_angle = random.uniform(0.0, math.pi * 2.0)
self.direction_x = math.cos(direction_angle)
self.direction_y = math.sin(direction_angle)
self.rotation = random.uniform(-math.pi * 0.2, math.pi * 0.2)
self.scale = random.uniform(1.0, 1.2)
self.radius = self.RADIUS * self.scale
def process(self, dt):
self.frame_time += dt
if self.frame_time >= self.ANIM_TIME:
self.frame_time = 0.0
self.frame += self.anim_dir
if not (0 <= self.frame < self.NFRAMES):
self.frame -= self.anim_dir
self.anim_dir = -self.anim_dir
self.x += self.speed * dt * self.direction_x
self.y += self.speed * dt * self.direction_y
if not (self.radius < self.x < (WIDTH - self.radius)):
self.direction_x = -self.direction_x
self.x = max(self.radius, min(WIDTH - self.radius, self.x))
if not (self.radius < self.y < (HEIGHT - self.radius)):
self.direction_y = -self.direction_y
self.y = max(self.radius, min(HEIGHT - self.radius, self.y))
def get_tile_data(self):
return [
self.x,
self.y,
self.tileset * self.NFRAMES + self.frame,
self.scale,
self.rotation,
]
if __name__ == '__main__':
flappy.start(ParaspritesExample, width=WIDTH,
height=HEIGHT, title=__file__) | PypiClean |
/CherryMusic-0.41.3.tar.gz/CherryMusic-0.41.3/res/bootstrap3/js/carousel.js | +function ($) { "use strict";
// CAROUSEL CLASS DEFINITION
// =========================
var Carousel = function (element, options) {
this.$element = $(element)
this.$indicators = this.$element.find('.carousel-indicators')
this.options = options
this.paused =
this.sliding =
this.interval =
this.$active =
this.$items = null
this.options.pause == 'hover' && this.$element
.on('mouseenter', $.proxy(this.pause, this))
.on('mouseleave', $.proxy(this.cycle, this))
}
Carousel.DEFAULTS = {
interval: 5000
, pause: 'hover'
, wrap: true
}
Carousel.prototype.cycle = function (e) {
e || (this.paused = false)
this.interval && clearInterval(this.interval)
this.options.interval
&& !this.paused
&& (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
return this
}
Carousel.prototype.getActiveIndex = function () {
this.$active = this.$element.find('.item.active')
this.$items = this.$active.parent().children()
return this.$items.index(this.$active)
}
Carousel.prototype.to = function (pos) {
var that = this
var activeIndex = this.getActiveIndex()
if (pos > (this.$items.length - 1) || pos < 0) return
if (this.sliding) return this.$element.one('slid', function () { that.to(pos) })
if (activeIndex == pos) return this.pause().cycle()
return this.slide(pos > activeIndex ? 'next' : 'prev', $(this.$items[pos]))
}
Carousel.prototype.pause = function (e) {
e || (this.paused = true)
if (this.$element.find('.next, .prev').length && $.support.transition.end) {
this.$element.trigger($.support.transition.end)
this.cycle(true)
}
this.interval = clearInterval(this.interval)
return this
}
Carousel.prototype.next = function () {
if (this.sliding) return
return this.slide('next')
}
Carousel.prototype.prev = function () {
if (this.sliding) return
return this.slide('prev')
}
Carousel.prototype.slide = function (type, next) {
var $active = this.$element.find('.item.active')
var $next = next || $active[type]()
var isCycling = this.interval
var direction = type == 'next' ? 'left' : 'right'
var fallback = type == 'next' ? 'first' : 'last'
var that = this
if (!$next.length) {
if (!this.options.wrap) return
$next = this.$element.find('.item')[fallback]()
}
this.sliding = true
isCycling && this.pause()
var e = $.Event('slide.bs.carousel', { relatedTarget: $next[0], direction: direction })
if ($next.hasClass('active')) return
if (this.$indicators.length) {
this.$indicators.find('.active').removeClass('active')
this.$element.one('slid', function () {
var $nextIndicator = $(that.$indicators.children()[that.getActiveIndex()])
$nextIndicator && $nextIndicator.addClass('active')
})
}
if ($.support.transition && this.$element.hasClass('slide')) {
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$next.addClass(type)
$next[0].offsetWidth // force reflow
$active.addClass(direction)
$next.addClass(direction)
$active
.one($.support.transition.end, function () {
$next.removeClass([type, direction].join(' ')).addClass('active')
$active.removeClass(['active', direction].join(' '))
that.sliding = false
setTimeout(function () { that.$element.trigger('slid') }, 0)
})
.emulateTransitionEnd(600)
} else {
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$active.removeClass('active')
$next.addClass('active')
this.sliding = false
this.$element.trigger('slid')
}
isCycling && this.cycle()
return this
}
// CAROUSEL PLUGIN DEFINITION
// ==========================
var old = $.fn.carousel
$.fn.carousel = function (option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.carousel')
var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option)
var action = typeof option == 'string' ? option : options.slide
if (!data) $this.data('bs.carousel', (data = new Carousel(this, options)))
if (typeof option == 'number') data.to(option)
else if (action) data[action]()
else if (options.interval) data.pause().cycle()
})
}
$.fn.carousel.Constructor = Carousel
// CAROUSEL NO CONFLICT
// ====================
$.fn.carousel.noConflict = function () {
$.fn.carousel = old
return this
}
// CAROUSEL DATA-API
// =================
$(document).on('click.bs.carousel.data-api', '[data-slide], [data-slide-to]', function (e) {
var $this = $(this), href
var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7
var options = $.extend({}, $target.data(), $this.data())
var slideIndex = $this.attr('data-slide-to')
if (slideIndex) options.interval = false
$target.carousel(options)
if (slideIndex = $this.attr('data-slide-to')) {
$target.data('bs.carousel').to(slideIndex)
}
e.preventDefault()
})
$(window).on('load', function () {
$('[data-ride="carousel"]').each(function () {
var $carousel = $(this)
$carousel.carousel($carousel.data())
})
})
}(window.jQuery); | PypiClean |
/freespeak-0.2.0.tar.gz/freespeak-0.2.0/freespeak/ui/settings.py |
import gtk
from freespeak import utils
import freespeak.ui.utils as uiutils
from freespeak.ui.translation_box import TranslatorCombo
class Settings(gtk.Dialog):
@utils.syncronized
def __init__(self, application):
gtk.Dialog.__init__ (self, _('Preferences'), application.main_window, 0,
(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.application = application
self.set_border_width (6)
self.set_modal (True)
self.setup_layout ()
self.setup_clipboard ()
self.setup_translator ()
self.connect ('response', self.on_response)
self.show ()
def setup_layout (self):
self.vbox.set_spacing (6)
def setup_clipboard (self):
frame = uiutils.Frame(_('Clipboard preferences'))
frame.show ()
vbox = gtk.VBox (spacing=6)
vbox.show ()
self.w_clipboard_get = gtk.CheckButton (_("_Get text from clipboard automatically"))
self.w_clipboard_get.set_active (self.application.config.get ('get_clipboard'))
self.w_clipboard_get.show ()
vbox.pack_start(self.w_clipboard_get, False)
self.w_clipboard_set = gtk.CheckButton (_("_Save translated text to clipboard"))
self.w_clipboard_set.set_active (self.application.config.get ('set_clipboard'))
self.w_clipboard_set.show ()
vbox.pack_start(self.w_clipboard_set, False)
frame.add (vbox)
frame.show ()
self.vbox.pack_start (frame)
def setup_translator (self):
frame = uiutils.Frame (_("Translator preferences"))
frame.show ()
vbox = gtk.VBox (spacing=6)
vbox.show ()
hbox = gtk.HBox(spacing=4)
hbox.show ()
label = gtk.Label ("_Preferred Translator")
label.set_use_underline (True)
label.show ()
hbox.pack_start (label, False)
self.w_preferred_translator = TranslatorCombo (self.application)
self.w_preferred_translator.show ()
label.set_mnemonic_widget (self.w_preferred_translator)
hbox.pack_start (self.w_preferred_translator)
vbox.pack_start(hbox)
frame.add (vbox)
frame.show ()
self.vbox.pack_start (frame)
def on_response (self, dialog, response):
self.application.config.set ('get_clipboard', self.w_clipboard_get.get_active ())
self.application.config.set ('set_clipboard', self.w_clipboard_set.get_active ())
translator = self.w_preferred_translator.get_active_translator ()
if translator:
self.application.config.set ('default_translator', translator.module_name)
else:
self.application.config.set ('default_translator', '')
self.destroy() | PypiClean |
/FastGets-0.3.5.tar.gz/FastGets-0.3.5/fastgets/web/static/dist/plugins/advlist/plugin.min.js | !(function () { var a = {}, b = function (b) { for (var c = a[b], e = c.deps, f = c.defn, g = e.length, h = new Array(g), i = 0; i < g; ++i)h[i] = d(e[i]); var j = f.apply(null, h); if (void 0 === j) throw 'module [' + b + '] returned undefined'; c.instance = j; }, c = function (b, c, d) { if (typeof b !== 'string') throw 'module id must be a string'; if (void 0 === c) throw 'no dependencies for ' + b; if (void 0 === d) throw 'no definition function for ' + b; a[b] = {deps: c, defn: d, instance: void 0}; }, d = function (c) { var d = a[c]; if (void 0 === d) throw 'module [' + c + '] was undefined'; return void 0 === d.instance && b(c), d.instance; }, e = function (a, b) { for (var c = a.length, e = new Array(c), f = 0; f < c; ++f)e[f] = d(a[f]); b.apply(null, e); }, f = {}; f.bolt = {module: {api: {define: c, require: e, demand: d}}}; var g = c, h = function (a, b) { g(a, [], function () { return b; }); }; h('5', tinymce.util.Tools.resolve), g('1', ['5'], function (a) { return a('tinymce.PluginManager'); }), g('2', ['5'], function (a) { return a('tinymce.util.Tools'); }), g('6', [], function () { var a = function (a, b, c) { var d = b === 'UL' ? 'InsertUnorderedList' : 'InsertOrderedList'; a.execCommand(d, !1, c === !1 ? null : {'list-style-type': c}); }; return {applyListFormat: a}; }), g('3', ['6'], function (a) { var b = function (b) { b.addCommand('ApplyUnorderedListStyle', function (c, d) { a.applyListFormat(b, 'UL', d['list-style-type']); }), b.addCommand('ApplyOrderedListStyle', function (c, d) { a.applyListFormat(b, 'OL', d['list-style-type']); }); }; return {register: b}; }), g('7', [], function () { var a = function (a) { var b = a.getParam('advlist_number_styles', 'default,lower-alpha,lower-greek,lower-roman,upper-alpha,upper-roman'); return b ? b.split(/[ ,]/) : []; }, b = function (a) { var b = a.getParam('advlist_bullet_styles', 'default,circle,disc,square'); return b ? b.split(/[ ,]/) : []; }; return {getNumberStyles: a, getBulletStyles: b}; }), g('8', [], function () { var a = function (a, b) { return a.$.contains(a.getBody(), b); }, b = function (b) { return function (c) { return c && /^(OL|UL|DL)$/.test(c.nodeName) && a(b, c); }; }, c = function (a) { var b = a.dom.getParent(a.selection.getNode(), 'ol,ul'); return a.dom.getStyle(b, 'listStyleType') || ''; }; return {isListNode: b, getSelectedStyleType: c}; }), g('9', ['2'], function (a) { var b = function (a) { return a.replace(/\-/g, ' ').replace(/\b\w/g, function (a) { return a.toUpperCase(); }); }, c = function (c) { return a.map(c, function (a) { var c = b(a), d = a === 'default' ? '' : a; return {text: c, data: d}; }); }; return {toMenuItems: c}; }), g('4', ['2', '7', '6', '8', '9'], function (a, b, c, d, e) { var f = function (b, c) { return function (e) { var f = e.control; b.on('NodeChange', function (e) { var g = a.grep(e.parents, d.isListNode(b)); f.active(g.length > 0 && g[0].nodeName === c); }); }; }, g = function (a) { return function (b) { var c = d.getSelectedStyleType(a); b.control.items().each(function (a) { a.active(a.settings.data === c); }); }; }, h = function (a, b, d, h, i, j) { a.addButton(b, {type: 'splitbutton', tooltip: d, menu: e.toMenuItems(j), onPostRender: f(a, i), onshow: g(a), onselect: function (b) { c.applyListFormat(a, i, b.control.settings.data); }, onclick: function () { a.execCommand(h); }}); }, i = function (a, b, c, d, e, g) { a.addButton(b, {type: 'button', tooltip: c, onPostRender: f(a, e), onclick: function () { a.execCommand(d); }}); }, j = function (a, b, c, d, e, f) { f.length > 0 ? h(a, b, c, d, e, f) : i(a, b, c, d, e, f); }, k = function (a) { j(a, 'numlist', 'Numbered list', 'InsertOrderedList', 'OL', b.getNumberStyles(a)), j(a, 'bullist', 'Bullet list', 'InsertUnorderedList', 'UL', b.getBulletStyles(a)); }; return {register: k}; }), g('0', ['1', '2', '3', '4'], function (a, b, c, d) { return a.add('advlist', function (a) { var e = function (a, c) { var d = a.settings.plugins ? a.settings.plugins : ''; return b.inArray(d.split(/[ ,]/), c) !== -1; }; e(a, 'lists') && (d.register(a), c.register(a)); }), function () {}; }), d('0')(); }()); | PypiClean |
/LAD_SOMPY-1.1-py3-none-any.whl/sompy/visualization/plot_tools2.py | import math
import numpy as np
from matplotlib import cm, pyplot as plt
from matplotlib.collections import RegularPolyCollection
from mpl_toolkits.axes_grid1 import make_axes_locatable
def plot_rect_map(d_matrix, titles=[], colormap=cm.gray, shape=[1, 1], comp_width=5, hex_shrink=1.0, fig=None,
colorbar=True):
"""
Plot hexagon map where each neuron is represented by a hexagon. The hexagon
color is given by the distance between the neurons (D-Matrix)
Args:
- grid: Grid dictionary (keys: centers, x, y ),
- d_matrix: array contaning the distances between each neuron
- w: width of the map in inches
- title: map title
Returns the Matplotlib SubAxis instance
"""
d_matrix = np.flip(d_matrix, axis=0)
def create_grid_coordinates(x, y):
coordinates = [x for row in -1 * np.array(list(range(x))) for x in
list(zip(np.arange(((row) % 2) * 0, y + ((row) % 2) * 0), [0.8660254 * (row)] * y))]
return (np.array(list(reversed(coordinates))), x, y)
if d_matrix.ndim < 3:
d_matrix = np.expand_dims(d_matrix, 2)
if len(titles) != d_matrix.shape[2]:
titles = [""] * d_matrix.shape[2]
n_centers, x, y = create_grid_coordinates(*d_matrix.shape[:2])
# Size of figure in inches
if fig is None:
xinch, yinch = comp_width * shape[1], comp_width * (x / y) * shape[0]
fig = plt.figure(figsize=(xinch, yinch), dpi=72.)
for comp, title in zip(range(d_matrix.shape[2]), titles):
ax = fig.add_subplot(shape[0], shape[1], comp + 1, aspect='equal')
# Get pixel size between two data points
xpoints = n_centers[:, 0]
ypoints = n_centers[:, 1]
ax.scatter(xpoints, ypoints, s=0.0, marker='s')
ax.axis([min(xpoints) - 1., max(xpoints) + 1.,
min(ypoints) - 1., max(ypoints) + 1.])
xy_pixels = ax.transData.transform(np.vstack([xpoints, ypoints]).T)
xpix, ypix = xy_pixels.T
# discover radius and hexagon
apothem = hex_shrink * (xpix[1] - xpix[0]) / math.sqrt(3)
area_inner_circle = math.pi * (apothem ** 2)
dm = d_matrix[:, :, comp].reshape(np.multiply(*d_matrix.shape[:2]))
collection_bg = RegularPolyCollection(
numsides=4, # a square
rotation=np.pi/4,
sizes=(area_inner_circle,),
array=dm,
cmap=colormap,
offsets=n_centers,
transOffset=ax.transData,
)
ax.add_collection(collection_bg, autolim=True)
ax.axis('off')
ax.autoscale_view()
ax.set_title(title)#, fontdict={"fontsize": 3 * comp_width})
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = plt.colorbar(collection_bg, cax=cax)
if not colorbar:
cbar.remove()
#cbar.ax.tick_params(labelsize=3 * comp_width)
return ax, list(reversed(n_centers)) | PypiClean |
/Kivy-2.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/kivy/uix/spinner.py | __all__ = ('Spinner', 'SpinnerOption')
from kivy.compat import string_types
from kivy.factory import Factory
from kivy.properties import ListProperty, ObjectProperty, BooleanProperty
from kivy.uix.button import Button
from kivy.uix.dropdown import DropDown
class SpinnerOption(Button):
'''Special button used in the :class:`Spinner` dropdown list. By default,
this is just a :class:`~kivy.uix.button.Button` with a size_hint_y of None
and a height of :meth:`48dp <kivy.metrics.dp>`.
'''
pass
class Spinner(Button):
'''Spinner class, see module documentation for more information.
'''
values = ListProperty()
'''Values that can be selected by the user. It must be a list of strings.
:attr:`values` is a :class:`~kivy.properties.ListProperty` and defaults to
[].
'''
text_autoupdate = BooleanProperty(False)
'''Indicates if the spinner's :attr:`text` should be automatically
updated with the first value of the :attr:`values` property.
Setting it to True will cause the spinner to update its :attr:`text`
property every time attr:`values` are changed.
.. versionadded:: 1.10.0
:attr:`text_autoupdate` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
option_cls = ObjectProperty(SpinnerOption)
'''Class used to display the options within the dropdown list displayed
under the Spinner. The `text` property of the class will be used to
represent the value.
The option class requires:
- a `text` property, used to display the value.
- an `on_release` event, used to trigger the option when pressed/touched.
- a :attr:`~kivy.uix.widget.Widget.size_hint_y` of None.
- the :attr:`~kivy.uix.widget.Widget.height` to be set.
:attr:`option_cls` is an :class:`~kivy.properties.ObjectProperty` and
defaults to :class:`SpinnerOption`.
.. versionchanged:: 1.8.0
If you set a string, the :class:`~kivy.factory.Factory` will be used to
resolve the class.
'''
dropdown_cls = ObjectProperty(DropDown)
'''Class used to display the dropdown list when the Spinner is pressed.
:attr:`dropdown_cls` is an :class:`~kivy.properties.ObjectProperty` and
defaults to :class:`~kivy.uix.dropdown.DropDown`.
.. versionchanged:: 1.8.0
If set to a string, the :class:`~kivy.factory.Factory` will be used to
resolve the class name.
'''
is_open = BooleanProperty(False)
'''By default, the spinner is not open. Set to True to open it.
:attr:`is_open` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
.. versionadded:: 1.4.0
'''
sync_height = BooleanProperty(False)
'''Each element in a dropdown list uses a default/user-supplied height.
Set to True to propagate the Spinner's height value to each dropdown
list element.
.. versionadded:: 1.10.0
:attr:`sync_height` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
def __init__(self, **kwargs):
self._dropdown = None
super(Spinner, self).__init__(**kwargs)
fbind = self.fbind
build_dropdown = self._build_dropdown
fbind('on_release', self._toggle_dropdown)
fbind('dropdown_cls', build_dropdown)
fbind('option_cls', build_dropdown)
fbind('values', self._update_dropdown)
fbind('size', self._update_dropdown_size)
fbind('text_autoupdate', self._update_dropdown)
build_dropdown()
def _build_dropdown(self, *largs):
if self._dropdown:
self._dropdown.unbind(on_select=self._on_dropdown_select)
self._dropdown.unbind(on_dismiss=self._close_dropdown)
self._dropdown.dismiss()
self._dropdown = None
cls = self.dropdown_cls
if isinstance(cls, string_types):
cls = Factory.get(cls)
self._dropdown = cls()
self._dropdown.bind(on_select=self._on_dropdown_select)
self._dropdown.bind(on_dismiss=self._close_dropdown)
self._update_dropdown()
def _update_dropdown_size(self, *largs):
if not self.sync_height:
return
dp = self._dropdown
if not dp:
return
container = dp.container
if not container:
return
h = self.height
for item in container.children[:]:
item.height = h
def _update_dropdown(self, *largs):
dp = self._dropdown
cls = self.option_cls
values = self.values
text_autoupdate = self.text_autoupdate
if isinstance(cls, string_types):
cls = Factory.get(cls)
dp.clear_widgets()
for value in values:
item = cls(text=value)
item.height = self.height if self.sync_height else item.height
item.bind(on_release=lambda option: dp.select(option.text))
dp.add_widget(item)
if text_autoupdate:
if values:
if not self.text or self.text not in values:
self.text = values[0]
else:
self.text = ''
def _toggle_dropdown(self, *largs):
if self.values:
self.is_open = not self.is_open
def _close_dropdown(self, *largs):
self.is_open = False
def _on_dropdown_select(self, instance, data, *largs):
self.text = data
self.is_open = False
def on_is_open(self, instance, value):
if value:
self._dropdown.open(self)
else:
if self._dropdown.attach_to:
self._dropdown.dismiss() | PypiClean |
/Mopidy-Radio-Rough-3.14.15.tar.gz/Mopidy-Radio-Rough-3.14.15/README.rst | ****************************
Radio Rough
****************************
Radio rough is a computer program for listening to internet.
For those who care, it is based on the excellent `Mopidy <https://www.mopidy.com/>`_ framework, written in python 2.7, using tk inter for the front end. It has only been tested on linux based machines, would love to hear from anyone who gets it working on Windows or OIS.
Once installed it will let you search YouTube, browse thousands of internet radio streams (via TuneIn) or podcasts published on iTunes. It will also support any additional Mopidy extensions you care to install (look `here <https://docs.mopidy.com/en/latest/ext/backends/>`_ for what's available, much interesting stuff like internet archive, spotify, soma fm ... ).
It's lightweight and perfect for Raspberry Pi and runs happily even on Pi Zero :) (tested extensively under Ubuntu too)
The look and feel are based on the principles of rough design. It serves its purpose without trying to sell anything, including itself.
Installation
============
On Raspbian machines download `installation script from here <https://raw.githubusercontent.com/unusualcomputers/unusualcomputers/master/code/mopidy/mopidyradiorough/rr.desktop>`_ (right click on the link and choose 'save as') and place it on the desktop (has to be on the desktop), then double click it. It will install all sorts of dependencies and will ask you if that's ok a couple of times along the way.
This will create an entry in the start menu in "Audio & Video" section, click on it and enjoy.
To download the above script from the command line run:
::
wget https://goo.gl/gBdWGw -O ~/Desktop/rr.desktop
This should work on all debian based machines. It downloads and runs this `shell script <https://github.com/unusualcomputers/unusualcomputers/blob/master/code/mopidy/mopidyradiorough/rasp_radio_rough_install.sh>`_
If you already have `mopidy installed <https://docs.mopidy.com/en/latest/installation/>`_ and running, you can install just by running on a command line:
::
sudo pip install Mopidy-Radio-Rough
Configuration
=============
Once installed it will work, you can disable it by setting enabled=false in [radio_rough] section of `mopidy.conf <https://docs.mopidy.com/en/latest/config/>`_.
How to use radio rough
======================
Radio rough looks like this
.. image:: https://github.com/unusualcomputers/unusualcomputers/blob/master/writing/pics/radio_rough_start.png
The most interesting part is the list. By double-clicking on it you navigate through sources of sound. Directories of things have square brackets around the name, albums have round brackets, files on your local disk have a star in front of them.
Right click opens up a menu that tells you what you can do, it changes a bit depending on where you are in the lists.
All this can be done using keyboard too, see the list of shortcuts below.
.. image:: https://github.com/unusualcomputers/unusualcomputers/blob/master/writing/pics/radio_rough_menu.png
Double click plays the stream or the file (give it a couple of seconds, it needs to be fetched from somewhere on the internet).
If what you are listening to has a start and an end, once a playback starts the pretty orange line will show you where you are. You can click on it to skip or rewind. Buttons on the right do what you think they do, the small slider changes volume.
When you float the mouse pointer over something in the list radio rough will get what information it can about it and show it in a tool tip. Very handy when choosing podcasts.
.. image:: https://github.com/unusualcomputers/unusualcomputers/blob/master/writing/pics/radio_rough_tooltip.png
While playing tracks this information will also be shown in the bottom part of the screen.
.. image:: https://github.com/unusualcomputers/unusualcomputers/blob/master/writing/pics/radio_rough_podcast.png
As long as it is connected to the internet it will happily stream content directly.
For when you are not it can download podcasts for you.
If you subscribe to a podcast channel it will check for new episodes daily and download the latest one. It may also delete some old ones, unless you choose to keep them, there is a 'keep' option in the menu for this. (Mind you, if you will always be online when using radio rough it is much better to just mark them as favourites.)
You can also mark things as favourites and you can queue tracks to be played in order and play good stuff in loops.
Radio rough does not support playlists yet, mostly because I don't ever use them. If you would like to have them drop me a line on unusual.computers(at)gmail.com, we can design them together and I will implement them - or you can if you like.
Finally, there is a few keyboard shortcuts:
================ ========================
Return Select item in the list
Backspace Back one level
Space Play/Pause
Control-a Select all in the list
Control-Shift-a Deselect all in the list
Control-s Search
Up/Down Move up/down in the list
Left/Right Volume up/down
Menu button Same as right-click
================ ========================
Data
====
Radio rough will keep history of played tracks only for the duration of a session, once you switch it off it will all be gone. It will never create or emit any information on the internet other than what is required by providers it gets tracks from - the usual metadata that sites like YouTube, iTunes etc hoover up. It will keep track of your favourites and subscriptions on the disk even when switched off - the files are saved in ~/.rough folder.
Feedback
========
We love to hear about bugs, poor solutions and missing features, write to us: unusual.computers(at)gmail.com.
It is also nice to hear about how well it works, just saying.
`unusual computers collective <https://unusualcomputerscollective.org/>`_
| PypiClean |
/LFake-18.9.0.tar.gz/LFake-18.9.0/lfake/providers/person/th_TH/__init__.py | from collections import OrderedDict
from .. import Provider as PersonProvider
class Provider(PersonProvider):
# weights are arbitrarily assigned
formats_female = OrderedDict(
(
("{{first_name_female}} {{last_name}}", 0.97),
("{{prefix_female}}{{first_name_female}} {{last_name}}", 0.015),
("{{first_name_female}} {{last_name}} {{suffix_female}}", 0.001),
("{{prefix_female}}{{first_name_female}} {{last_name}} {{suffix}}", 0.001),
)
)
formats_male = OrderedDict(
(
("{{first_name_male}} {{last_name}}", 0.97),
("{{prefix_male}}{{first_name_male}} {{last_name}}", 0.015),
("{{first_name_male}} {{last_name}} {{suffix_male}}", 0.001),
("{{prefix_male}}{{first_name_male}} {{last_name}} {{suffix}}", 0.001),
)
)
formats_nonbinary = OrderedDict(
(
("{{first_name_nonbinary}} {{last_name}}", 0.97),
("{{prefix_nonbinary}}{{first_name_nonbinary}} {{last_name}}", 0.015),
("{{first_name_nonbinary}} {{last_name}} {{suffix_nonbinary}}", 0.001),
(
"{{prefix_nonbinary}}{{first_name_nonbinary}} {{last_name}} {{suffix}}",
0.001,
),
)
)
# Thai prefix, adapted from
# http://www.stou.ac.th/thai/grad_stdy/Apply/prefix.asp
# weights are arbitrarily assigned
prefixes_female = OrderedDict(
(
("นาง", 0.3),
("น.ส.", 0.2),
("นางสาว", 0.15),
("ด.ญ.", 0.15),
("เด็กหญิง", 0.05),
("จ.ต.", 0.001),
("จ.ท.", 0.001),
("จ.ส.ต.", 0.001),
("จ.ส.ท.", 0.001),
("จ.ส.อ.", 0.001),
("จ.อ.", 0.001),
("ด.ต.", 0.001),
("น.ต.", 0.001),
("น.ท.", 0.001),
("น.อ.", 0.001),
("พ.จ.ต.", 0.001),
("พ.จ.ท.", 0.001),
("พ.จ.อ.", 0.001),
("พ.ต.", 0.001),
("พ.ต.ต.", 0.001),
("พ.ต.ท.", 0.001),
("พ.ต.อ.", 0.001),
("พ.ท.", 0.001),
("พ.อ.", 0.001),
("พ.อ.ต.", 0.001),
("พ.อ.ท.", 0.001),
("พ.อ.อ.", 0.001),
("ร.ต.", 0.001),
("ร.ต.ต.", 0.001),
("ร.ต.ท.", 0.001),
("ร.ต.อ.", 0.001),
("ร.ท.", 0.001),
("ร.อ.", 0.001),
("ส.ต.", 0.001),
("ส.ต.ต.", 0.001),
("ส.ต.ท.", 0.001),
("ส.ต.อ.", 0.001),
("ส.ท.", 0.001),
("ส.อ.", 0.001),
("พล.ต.", 0.0001),
("พล.ต.ต.", 0.0001),
("พล.ต.ท.", 0.0001),
("พล.ต.อ.", 0.0001),
("พล.ท.", 0.0001),
("พล.ร.ต.", 0.0001),
("พล.ร.ท.", 0.0001),
("พล.ร.อ.", 0.0001),
("พล.อ.", 0.0001),
("พล.อ.ต.", 0.0001),
("พล.อ.ท.", 0.0001),
("พล.อ.อ.", 0.0001),
("ม.ร.ว.", 0.0001),
("ม.ล.", 0.0001),
("หม่อมราชวงศ์", 0.0001),
("หม่อมหลวง", 0.0001),
)
)
prefixes_male = OrderedDict(
(
("นาย", 0.6),
("ด.ช.", 0.3),
("จ.ต.", 0.001),
("จ.ท.", 0.001),
("จ.ส.ต.", 0.001),
("จ.ส.ท.", 0.001),
("จ.ส.อ.", 0.001),
("จ.อ.", 0.001),
("ด.ต.", 0.001),
("น.ต.", 0.001),
("น.ท.", 0.001),
("น.อ.", 0.001),
("พ.จ.ต.", 0.001),
("พ.จ.ท.", 0.001),
("พ.จ.อ.", 0.001),
("พ.ต.", 0.001),
("พ.ต.ต.", 0.001),
("พ.ต.ท.", 0.001),
("พ.ต.อ.", 0.001),
("พ.ท.", 0.001),
("พ.อ.", 0.001),
("พ.อ.ต.", 0.001),
("พ.อ.ท.", 0.001),
("พ.อ.อ.", 0.001),
("ร.ต.", 0.001),
("ร.ต.ต.", 0.001),
("ร.ต.ท.", 0.001),
("ร.ต.อ.", 0.001),
("ร.ท.", 0.001),
("ร.อ.", 0.001),
("ส.ต.", 0.001),
("ส.ต.ต.", 0.001),
("ส.ต.ท.", 0.001),
("ส.ต.อ.", 0.001),
("ส.ท.", 0.001),
("ส.อ.", 0.001),
("พล.ต.", 0.0001),
("พล.ต.ต.", 0.0001),
("พล.ต.ท.", 0.0001),
("พล.ต.อ.", 0.0001),
("พล.ท.", 0.0001),
("พล.ร.ต.", 0.0001),
("พล.ร.ท.", 0.0001),
("พล.ร.อ.", 0.0001),
("พล.อ.", 0.0001),
("พล.อ.ต.", 0.0001),
("พล.อ.ท.", 0.0001),
("พล.อ.อ.", 0.0001),
("ม.ร.ว.", 0.0001),
("ม.ล.", 0.0001),
("หม่อมราชวงศ์", 0.0001),
("หม่อมหลวง", 0.0001),
("พระ", 0.0001),
("สามเณร", 0.001),
("พระครูธรรมธร", 0.00001),
("พระครูปลัด", 0.00001),
("พระครูวินัยธร", 0.00001),
("พระครูสมุห์", 0.00001),
("พระครูใบฎีกา", 0.00001),
("พระปลัด", 0.00001),
("พระมหา", 0.00001),
("พระสมุห์", 0.00001),
("พระอธิการ", 0.00001),
("พระใบฎีกา", 0.00001),
("เจ้าอธิการ", 0.00001),
)
)
prefixes_nonbinary = OrderedDict(
(
("จ.ต.", 0.001),
("จ.ท.", 0.001),
("จ.ส.ต.", 0.001),
("จ.ส.ท.", 0.001),
("จ.ส.อ.", 0.001),
("จ.อ.", 0.001),
("ด.ต.", 0.001),
("น.ต.", 0.001),
("น.ท.", 0.001),
("น.อ.", 0.001),
("พ.จ.ต.", 0.001),
("พ.จ.ท.", 0.001),
("พ.จ.อ.", 0.001),
("พ.ต.", 0.001),
("พ.ต.ต.", 0.001),
("พ.ต.ท.", 0.001),
("พ.ต.อ.", 0.001),
("พ.ท.", 0.001),
("พ.อ.", 0.001),
("พ.อ.ต.", 0.001),
("พ.อ.ท.", 0.001),
("พ.อ.อ.", 0.001),
("ร.ต.", 0.001),
("ร.ต.ต.", 0.001),
("ร.ต.ท.", 0.001),
("ร.ต.อ.", 0.001),
("ร.ท.", 0.001),
("ร.อ.", 0.001),
("ส.ต.", 0.001),
("ส.ต.ต.", 0.001),
("ส.ต.ท.", 0.001),
("ส.ต.อ.", 0.001),
("ส.ท.", 0.001),
("ส.อ.", 0.001),
("พล.ต.", 0.0001),
("พล.ต.ต.", 0.0001),
("พล.ต.ท.", 0.0001),
("พล.ต.อ.", 0.0001),
("พล.ท.", 0.0001),
("พล.ร.ต.", 0.0001),
("พล.ร.ท.", 0.0001),
("พล.ร.อ.", 0.0001),
("พล.อ.", 0.0001),
("พล.อ.ต.", 0.0001),
("พล.อ.ท.", 0.0001),
("พล.อ.อ.", 0.0001),
("ม.ร.ว.", 0.0001),
("ม.ล.", 0.0001),
("หม่อมราชวงศ์", 0.0001),
("หม่อมหลวง", 0.0001),
)
)
prefixes = prefixes_female.copy()
prefixes.update(prefixes_male)
prefixes_nonbinary = prefixes.copy()
# get 250 female names and 250 male names randomly
# (with approximate fair distribution of length) from
# https://github.com/PyThaiNLP/pythainlp/blob/dev/pythainlp/corpus/person_names_female_th.txt
# https://github.com/PyThaiNLP/pythainlp/blob/dev/pythainlp/corpus/person_names_male_th.txt
first_names_female = (
"กนกเนตร",
"กวาง",
"กองสิน",
"กะดิรัตน์",
"กันตวรรณ",
"กิ่งแก้ว",
"กิติกานต์",
"กิติยาธรณ์",
"กุลปรียา",
"กุลภาวลัย",
"เกศรา",
"เกษรา",
"แกมแพร",
"ใกล้รุ่ง",
"ขอดิเยาะ",
"เขมจิรา",
"คณภรณ์",
"คมคาย",
"คำ",
"จณิตตา",
"จณิสตา",
"จรรยพร",
"จริยฉัตร",
"จักรีรัตน์",
"จันทนา",
"จันทภา",
"จิณภัทตา",
"จิตตานันท์",
"จิตรลดา",
"จินต์จุฑา",
"จิราภรณ์",
"จิฬาภรณ์",
"จีราภรณ์",
"จุฑาภรณ์",
"จุฑารัตน์",
"ฉัตรปรียา",
"ชนิศา",
"ชรินทร์ทิพย์",
"ชลิดา",
"ชัญญานุนาย",
"ชัฎชา",
"ชิดชนก",
"ซูรัยดา",
"ซูไรดา",
"ซูฮัยดา",
"ฐิตาพร",
"ฐิติกุล",
"ฐิติณัฐฐา",
"ฐิติยาพร",
"ณภัทร",
"ณัฐญาดา",
"ณัฐติญา",
"ณัฐธภรณ์",
"ณัฐธิตา",
"ณัฐพิชา",
"ณัฐวรินทร",
"ณาร์รีมาน",
"ณิชนันท์",
"ณิชาภัทร",
"ดวงจันทร์",
"ดวงพร",
"ดวงสมร",
"ดารุนี",
"ตรีนุช",
"ทองสิริ",
"ทับทิม",
"ทานตะวัน",
"ทินพร",
"ทิพย์วารี",
"ทิพรดา",
"ทิมาภรณ์",
"เทพนารี",
"ธมลพรรณ",
"ธัชญา",
"ธัญญกัญญา",
"ธัญญามาศ",
"ธีริสรา",
"นพรัตน์",
"นพวรรณ",
"นภัสรินทร์",
"นราวรรณ",
"นรีกานต์",
"นรีรัตน์",
"นวรรษนันท์",
"นันทวรรณ",
"นันทิกานต์",
"นาตยา",
"นารดา",
"นาวีตา",
"น้ำเพชร",
"นิติยา",
"นิภา",
"นิวิลดาน",
"นุจรี",
"เนตรฤดี",
"บุญทิวา",
"บุญเทียน",
"บุญพา",
"เบญญาทิพย์",
"ปฐวีกานต์",
"ปภาวรินทร์",
"ประจิน",
"ประไพพักตร์",
"ประภัทร์สรณ์",
"ปริญญา",
"ปัญญาพร",
"ปัณณธร",
"ปาริตา",
"ปิ่นบุญญา",
"ปิยนาฎ",
"ปิยนุช",
"ปิยวดี",
"ปิยะชาติ",
"ผกาทิพย์",
"พชรภรณ์",
"พรชนก",
"พรชีวิน",
"พรเบญญา",
"พรปราณี",
"พรพิไล",
"พรรณปพร",
"พรสวรรค์",
"พลานุช",
"พัชรีนิษฐ์",
"พันเกล้า",
"พัสวี",
"พาดีล๊ะ",
"พาสุข",
"พิชญ์สินี",
"พิมพกานต์",
"พิมพ์ประภา",
"พิมพ์พิชญา",
"พิมพ์สุดา",
"พิมพ์สุตา",
"พิไลพร",
"พิศพรรณ",
"พีรภัทร์",
"เพชรมณี",
"เพ็ญพรรษา",
"เพ็ญยุภา",
"เพียงกมล",
"ฟารินี",
"ฟิรยา",
"ภัคชัญญา",
"ภัคศุภางค์",
"ภัทรนาฎ",
"ภัทราวุธ",
"ภานิณี",
"ภารวี",
"ภาสินี",
"มณียา",
"มนรัตน์",
"มนัญชยา",
"มลิวรรณ",
"มะลีวัลย์",
"มัตติกา",
"มาซีเตาะ",
"มารีนี",
"มาสิตะ",
"เมทนี",
"เมษา",
"ยนงคราญ",
"ยุภา",
"ยุลิน",
"เยาวรัตน์",
"โยธิการ์",
"รมิตา",
"รวิวาน",
"รอกีเย๊าะ",
"รอซีด๊ะ",
"รักชนก",
"รังสินี",
"ราณี",
"รูไกยะฮ์",
"โรสชา",
"ลักษมี",
"ลัดดา",
"วณัฐดา",
"วนาลี",
"วรดาพร",
"วรนาฎ",
"วรรณกร",
"วรรณนิสา",
"วรรณรัตน์",
"วรรณาต",
"วสิตา",
"วันชนก",
"วัลยา",
"วิเชียร",
"วีร์สุดา",
"ศจีกาญจน์",
"ศรินยา",
"ศศิธร",
"ศศินา",
"ศศิยา",
"ศศิรินทร์",
"ศิริเกศ",
"ศิริญา",
"ศิรินันท์",
"ศุภกรชนา",
"ศุภนุนาย",
"สมใจ",
"สมมล",
"สราญจิตต์",
"สโรชา",
"สหัสมณี",
"สายสุรีย์",
"สิราพร",
"สิริกานต์",
"สิริลัดดา",
"สิริ",
"สุกฤษตา",
"สุธาวี",
"สุธินันท์",
"สุปรานี",
"สุพัตร",
"สุพัตรา",
"สุภัทริดา",
"สุภาพร",
"สุภาลินี",
"สุมัชญา",
"สุรการณ์",
"สุรนีย์",
"โสภณิตา",
"โสภา",
"หรรษา",
"หฤทัย",
"อณัฐตา",
"อธิตยา",
"อเนชา",
"อรจิรา",
"อรพิณ",
"อริสรา",
"อรุณี",
"อลิษา",
"อัญชัญ",
"อัญชิษฐา",
"อัญธิกา",
"อัญพัชร์",
"อันธิกา",
"อาซือมะ",
"อาภัศรา",
"อารีย์",
"อาแอเสาะ",
"อำพร",
"อำไพ",
"อุดมลักษณ์",
"อุลัยพร",
"อุษณีย์",
"ฮามีย๊ะ",
)
first_names_male = (
"กรพนธ์",
"กระสุน",
"กฤตพร",
"กฤตเมธ",
"กวีฉัฏฐ",
"กษิดิฐ",
"กิติชัย",
"กิติวัฒน์",
"กุลเชษฐ",
"กุลดิลก",
"เกริกพล",
"เกษตร",
"เกษมชัย",
"เกียรติก้อง",
"เกียรติศักดิ์",
"โกมล",
"โกวิทย์",
"ขวัญรุ้ง",
"เขียว",
"คมกริบ",
"คมกฤชญ์",
"คมสัน",
"คำปลิว",
"คำมั่น",
"จด",
"จักรกฤนาย",
"จักรชัย",
"จักรพันธ์",
"จำรัส",
"จิม",
"จิรวิทย์",
"จีรยุทธ",
"เจตธนากร",
"เจตพินิษฐ์",
"เจษฎากร",
"เจษฏาภรณ์",
"ใจกลาง",
"ฉลองชัย",
"เฉลิมพล",
"เฉลิมรัฐ",
"เฉลิมรัตน์",
"ชัชนันท์",
"ชัชเวศย์",
"เชิงชาย",
"โชคภาดล",
"โชติวุฒิ",
"ไชยภพ",
"ซุกรี",
"ฌาฆีภัตฐ์",
"ญาณพันธุ์",
"ฐิติวุฒิ",
"ณปภัช",
"ณัฐจศักดิ์",
"ณัฐศักดิ์",
"ณิชเชฏฐ์",
"ดิลก",
"ตอฮา",
"ถนอมชัย",
"เถลิงยศ",
"ทรรศนชัย",
"ทวีวัฒน์",
"ทองรัตน์",
"ทัตธน",
"ทินวัฒน์",
"เทพณรงค์",
"เทอดศักดิ์",
"เทียมศักดิ์",
"ธนกิตต์",
"ธนนนท์",
"ธนภณ",
"ธนวันต์",
"ธเนษฐ",
"ธมน",
"ธราวิทญ์",
"ธวัศชา",
"ธารา",
"ธาเอก",
"ธีร์ธวันาย",
"ธีรลักษณ์",
"ธีรวัช",
"ธีรวุฒิ",
"ธีราทัต",
"นนทกาญจน์",
"นพ",
"นภนต์",
"นัฐพล",
"นันทวุฒิ",
"นัสรุน",
"นาทภูวพัฒน์",
"นาย",
"นิชนันท์",
"นิติ",
"นิมุ",
"นิรันดร์",
"นิรุตต์",
"เนติลักษณ์",
"บุญเกิด",
"บุญญกัลป์",
"บุญญามี",
"บุญนพ",
"บุญเอก",
"ปฐม",
"ปรมินทร์",
"ประเดิม",
"ประยุทธ์",
"ประวี",
"ประสิทธิ์",
"ประเสริฐ",
"ปรายกานต์",
"ปวีณ",
"ปัณณวัชร",
"ปัตถพงษ์",
"ปิยบุตร",
"ปิยวัจน์",
"ปิยะนันท์",
"ปุณณรัตน์",
"แปลง",
"ผดุงชาติ",
"ผดุงพล",
"พงษ์นเรศ",
"พลภูมิ",
"พศร",
"พัชรพร",
"พันเทพ",
"พันธุ์เทพ",
"พิชาภพ",
"พิพิธธน",
"พีรพัฒน์",
"พีระพงศ์พันธ์",
"พุทธ",
"พุทธิพงษ์",
"เพทาย",
"ไพสิฐ",
"ภควัฒน์",
"ภัคชนน",
"ภานุพล",
"ภานุวัตร",
"ภาสวุฒิ",
"ภูมิปัญญา",
"ภูวรา",
"ภูวฤณ",
"ภูวัน",
"ภูวิช",
"มนัส",
"มะสูเกียน",
"มาโนชญ์",
"มารุด",
"มูฮัมหมัดอิมรอน",
"มูฮำมัด",
"ไมล์",
"ยศพงศ์",
"ยศพนต์",
"ยศวัฒน์",
"ยอดแมน",
"ยุศรอน",
"ยูซุฟ",
"รชตกร",
"รภัสพงษ์",
"รัฐพงษ์",
"ราชพฤกษ์",
"ราชันทร์",
"ราชัน",
"เรืองเกียรติ",
"ฤทธิ์ชกร",
"เลิศเดช",
"วรปรัชญ์",
"วรรณชนะชัย",
"วรศาสส์",
"วรินทธิ์ธร",
"วันฉัตร",
"วัลลภ",
"วาร์มูฮำหมัด",
"วาสุเทพ",
"วิกิจ",
"วิชชากร",
"วิชา",
"วิถี",
"วิทูลย์",
"วิพุธ",
"วิรชัย",
"วิรศักดิ์",
"วิสาร",
"วีรชัย",
"วีระโชติ",
"วีระวัฒน์",
"วุฒิ",
"ไวพจน์",
"ศดิศ",
"ศภัคชคง",
"ศรลักษณ์",
"ศรายุธ",
"ศรีสวัสดิ์",
"ศิรณัฐ",
"ศุภชัย",
"ศุภาศิล",
"สนั่น",
"สมเกียรติ",
"สมนึก",
"สมปอง",
"สมพิศ",
"สมหมาย",
"สรรเพชญ์",
"สรายุทธ",
"สัญชาน",
"สันชัย",
"สันติราษฎร์",
"สิทธัญ",
"สิทธิชัย",
"สินสมุทร",
"สิรวัฒน์",
"สิริรัตน์",
"สีหราช",
"สุชิน",
"สุทกร",
"สุทธิณัฐ",
"สุทธิพจน์",
"สุพนธ์",
"สุรธัช",
"สุรนัย",
"สุรวัช",
"สุไฮลัน",
"เสรี",
"โสภณ",
"หรรษธร",
"หลักทรัพย์",
"หล้า",
"หลี",
"อชิตะวีร์",
"อณาวิน",
"อดิสรณ์",
"อธิวัตร",
"อนิวัฒน์",
"อนุบาล",
"อนุวัช",
"อภิลักษณ์",
"อมัด",
"อรรจน์",
"อัครพนธ์",
"อับดุลเลาะห์",
"อัษฏา",
"อาฮามัด",
"อินทรีย์",
"อิสรันดร์",
"เอกชัย",
"เอกวิทย์",
"เอกอธิพงษ์",
"เอนกพงศ์",
"โอภาส",
"ฮานาฟี",
"ฮาฟิต",
)
first_names = first_names_male + first_names_female
first_names_nonbinary = first_names_male + first_names_female
# last name that has given by senior officer
# http://www.reurnthai.com/index.php?topic=5273.45
# also partially from
# https://github.com/PyThaiNLP/pythainlp/blob/dev/pythainlp/corpus/family_names_th.txt
last_names = (
"กุมารบุญ",
"แก้วชลคราม",
"แก้วอยู่",
"ขอหมั่นกลาง",
"ขันธุลา",
"ขำเอนก",
"ขุนดำ",
"เขียวขุ้ย",
"เขียวอ่อน",
"คณานุรักษ์",
"คำลือ",
"งามพิเชษฐ์",
"จ้อยนุแสง",
"จันทา",
"จันอ้น",
"เจริญรัมย์",
"แจ้งสว่าง",
"ฉัตรอภิเที่ยงค่ำ",
"ฉัพพรรณธนกูร",
"ฉายแสง",
"ฉิมพาลี",
"ชำนาญวาด",
"ชุมวระ",
"เช้าวันดี",
"ไชยภา",
"ซาซุม",
"ซูสารอ",
"เณรานุสนธิ์",
"ดวงทับทิม",
"ด้วงโสน",
"ดัตพันธุ์",
"ดาตู",
"ดาบเงิน",
"ดาบเพ็ชร์",
"ดาวกระจาย",
"ดาวอร่าม",
"ดำริห์ชอบ",
"ดิศดใน",
"ดิสกะประกาย",
"ดีตพันธุ์",
"ดุริยพันธุ์",
"ดุษฎีวนิช",
"เดชคุ้ม",
"เดชวา",
"ตระกูลบุญ",
"ตระกูลไม้เรียง",
"ตราชู",
"ตรีครุธพันธุ์",
"ตรีเภรินทร์",
"ตวงทอง",
"ตวันเยี่ยม",
"ตะละภัฏ",
"ตั้งกุลงาม",
"ตั้งเผ่า",
"ตั้งรบ",
"ตัณฑนุช",
"ตัณสถิตย์",
"ตันตราจิณ",
"ตันเผ่า",
"ตันยา",
"ติณรัตน์",
"ติระคมน์",
"เตชะกำพุ",
"เตมิยะเดช",
"แต้กุล",
"ไตรบรรพ",
"ถนอมกุลบุตร",
"ถนอมพลกรัง",
"ถนอมพล",
"ถนอมมนุษย์",
"ถนัดกลึง",
"ถนัดการเขียน",
"ถนัดการยนต์",
"ถนัดเดินข่าว",
"ถนัดพิมพการ",
"ถนัดภาษา",
"ถนัดรบ",
"ถนัดรักษา",
"ถนัดหัตถกรรม",
"ถนัดอักษร",
"ถนัดอาวุธ",
"ถนิมมาศ",
"ถมปัด",
"ถมังรักษสัตว์",
"ถ้วนศรี",
"ถะเกิงชศ",
"ถาวรรัตน",
"ถาวระวรณ์",
"ถาวรายุศม์",
"ถิรสวัสดิ์",
"ถุงเงิน",
"แถมธน",
"ทรงโกมล",
"ทรัพย์ธำรงค์",
"ทรัพย์สาร",
"ทวนไชย์",
"ทวนทอง",
"ทวีเดช",
"ทศโยธิน",
"ทหารแท้",
"ทองแท้",
"ทองเนื้อดี",
"ทองประดิฐ",
"ทองปากน้ำ",
"ทองลาภ",
"ทองสินธุ์",
"ทองสีไพล",
"ทองสุกเลิศ",
"ทองอยู่",
"ทันยุค",
"ทับทิมไทย",
"ทัศนสุทธิ",
"ทำประดู่",
"ทีฆะ",
"ทุมะบุตร์",
"แท่นทอง",
"ไทไชโย",
"ไทนิยม",
"ไทยแท้",
"ไทยสุชาต",
"ธนประทีป",
"ธนรักษ์",
"ธนูปกรณ์",
"ธรรมทินนา",
"ธรรมนิยม",
"ธรรมเมธา",
"ธรรมฤดี",
"ธรรมสถิตไพศาล",
"ธัญเสถียร",
"ธัญาโภชน์",
"ธาราธร",
"ธีวร",
"ธุวะนุติ์",
"ธูปหอม",
"ธูปะวิโรจน์",
"เธียรายัน",
"นกทอง",
"นครเทพ",
"นพคเชนทร์",
"นพตระกูล",
"นรวิทย์โชติกุล",
"นฤทุกข์",
"นฤภัย",
"นวลฉวี",
"นวลเพ็ง",
"นะวะมันดร",
"นักรบ",
"นักสำหรวจ",
"นับเนื่องนอ",
"นากกนก",
"นาคพันธุ์",
"นาควงษ์",
"นาคสุทิน",
"นาคะนคร",
"นาฏคายี",
"นาถะเดชะ",
"นาถะพินธุ",
"นานายน",
"นามขำ",
"นามเสวตร",
"น้ำทิพย์",
"นำธวัช",
"นิติสาขา",
"นิยมเซียม",
"นิยมธรรม",
"นิยมสำหรวจ",
"นิระหานี",
"นิลวรรณ",
"นิลวิมล",
"นิลสลัว",
"นิลสุวรรณ์",
"นิลเสนา",
"นิละทัต",
"นิษประปัญจ์",
"นุชแนวนุ่ม",
"นุตตาร",
"นุ่มกัน",
"เนตร์มณี",
"เนื่องนนท์",
"เนื้อนุ่ม",
"แน่นดุจป้อม",
"แนวพญา",
"แนวพนิช",
"บัวเผื่อน",
"บินดี",
"บุญญาภิรมย์",
"บุญญาไลย์",
"บุญบำรุง",
"บุญศล",
"บุญส่ง",
"บุณยะภาชน์",
"บุตดา",
"บุตราช",
"บุนยะตีรณะ",
"บุนยะศัพท์",
"บุนยาภิสนท์",
"ประจันตะเสน",
"ปรีชากุลเศรษฐ์",
"ปานสุวรรณ",
"ผลบุญ",
"พงศ์ฉบับนภา",
"พรมอ่อน",
"พรรษาสกุล",
"พรสีมา",
"พานเกล้า",
"พีระเพ็ญกุล",
"เพียยา",
"โพธิสัตย์",
"ภูภักดี",
"มนทอง",
"มิ่งขวัญ",
"เมืองสุข",
"ไม้แดง",
"ยะผา",
"ยางสวย",
"ยาปะโลหิต",
"เยาวธนโชค",
"ร่มธิติรัตน์",
"ราชมณี",
"เลขะพันธุ์",
"เลิศกิ่ง",
"วะคีมัน",
"วาทา",
"วิลาสินี",
"วุฑฒยากร",
"เวียงจันทึก",
"ศรทอง",
"ศรีตะวัน",
"ศรีธนะเวทย์",
"ศรีเผด็จ",
"ศรีวงค์",
"ศรีสัตย์",
"ศรีอุ่น",
"ศาสตร์ศิลป์",
"ศิวะวรเวท",
"สงประเสริฐ",
"สังข์กรด",
"สันตะวงศ์",
"สาระพันธ์",
"สุวรรณหงษ์",
"ไสยกิจ",
"หนักแน่น",
"หนุนสุข",
"หอมพิกุล",
"หอมสิน",
"หิรัญสาลี",
"แหยมศิริ",
"อัตตนาถ",
"อุ่นอก",
"อุลหัสสา",
)
# thai suffix that has given by the king
# https://th.wikipedia.org/wiki/%E0%B8%99%E0%B8%B2%E0%B8%A1%E0%B8%AA%E0%B8%81%E0%B8%B8%E0%B8%A5%E0%B8%9E%E0%B8%A3%E0%B8%B0%E0%B8%A3%E0%B8%B2%E0%B8%8A%E0%B8%97%E0%B8%B2%E0%B8%99
suffixes = (
"ณ กาฬสินธุ์",
"ณ จัมปาศักดิ์",
"ณ เชียงใหม่",
"ณ ตะกั่วทุ่ง",
"ณ ถลาง",
"ณ นคร",
"ณ น่าน",
"ณ บางช้าง",
"ณ ป้อมเพชร์",
"ณ พัทลุง",
"ณ พิศณุโลก",
"ณ มโนรม",
"ณ มหาไชย",
"ณ ร้อยเอ็จ",
"ณ ระนอง",
"ณ ลำปาง",
"ณ ลำพูน",
"ณ วิเชียร",
"ณ สงขลา",
"ณ หนองคาย",
"ณ อุบล",
) | PypiClean |
/MonQueue-0.3.3.tar.gz/MonQueue-0.3.3/README.rst | .. image:: https://img.shields.io/travis/rexzhang/monqueue.svg
:target: https://travis-ci.org/rexzhang/monqueue
.. image:: https://img.shields.io/coveralls/rexzhang/monqueue.svg
:target: https://coveralls.io/github/rexzhang/monqueue?branch=master
.. image:: https://img.shields.io/pypi/v/monqueue.svg
:target: https://pypi.org/project/MonQueue/
.. image:: https://img.shields.io/pypi/pyversions/monqueue.svg
:target: https://pypi.org/project/MonQueue/
.. image:: https://img.shields.io/pypi/dm/monqueue.svg
:target: https://pypi.org/project/MonQueue/
========
MonQueue
========
:Info: MonQueue is a Python library that allows you to use MongoDB as a message queue.
:Author: Rex Zhang (http://github.com/rexzhang)
Install
=======
To install it, just run:
.. code-block:: console
pip install -U monqueue
Coverage Report
===============
.. code-block:: console
py.test --cov=. --cov-report html
Contributing
============
The source is available on `GitHub <http://github.com/rexzhang/monqueue>`_. To contribute to the project, fork it on GitHub and send a pull request, all contributions and suggestions are welcome.
| PypiClean |
/Bis-Miner-3.11.1.tar.gz/Bis-Miner-3.11.0/Orange/widgets/model/owadaboost.py | from AnyQt.QtCore import Qt
from Orange.base import Learner
from Orange.data import Table
from Orange.modelling import SklAdaBoostLearner, SklTreeLearner
from Orange.widgets import gui
from Orange.widgets.settings import Setting
from Orange.widgets.utils.owlearnerwidget import OWBaseLearner
from Orange.widgets.widget import Msg, Input
class OWAdaBoost(OWBaseLearner):
name = "AdaBoost"
description = "一个结合弱学习器的集合元算法,适应每个训练样本的'复杂度'"
icon = "icons/AdaBoost.svg"
replaces = [
"Orange.widgets.classify.owadaboost.OWAdaBoostClassification",
"Orange.widgets.regression.owadaboostregression.OWAdaBoostRegression",
]
priority = 80
LEARNER = SklAdaBoostLearner
class Inputs(OWBaseLearner.Inputs):
learner = Input("学习器", Learner)
#: Algorithms for classification problems
algorithms = ["SAMME", "SAMME.R"]
#: Losses for regression problems
losses = ["Linear", "Square", "Exponential"]
n_estimators = Setting(50)
learning_rate = Setting(1.)
algorithm_index = Setting(1)
loss_index = Setting(0)
use_random_seed = Setting(False)
random_seed = Setting(0)
DEFAULT_BASE_ESTIMATOR = SklTreeLearner()
class Error(OWBaseLearner.Error):
no_weight_support = Msg('基础学习器不支持权重。')
def add_main_layout(self):
box = gui.widgetBox(self.controlArea, "Parameters")
self.base_estimator = self.DEFAULT_BASE_ESTIMATOR
self.base_label = gui.label(
box, self, "Base estimator: " + self.base_estimator.name.title())
self.n_estimators_spin = gui.spin(
box, self, "n_estimators", 1, 100, label="Number of estimators:",
alignment=Qt.AlignRight, controlWidth=80,
callback=self.settings_changed)
self.learning_rate_spin = gui.doubleSpin(
box, self, "learning_rate", 1e-5, 1.0, 1e-5,
label="Learning rate:", decimals=5, alignment=Qt.AlignRight,
controlWidth=80, callback=self.settings_changed)
self.random_seed_spin = gui.spin(
box, self, "random_seed", 0, 2 ** 31 - 1, controlWidth=80,
label="Fixed seed for random generator:", alignment=Qt.AlignRight,
callback=self.settings_changed, checked="use_random_seed",
checkCallback=self.settings_changed)
# Algorithms
box = gui.widgetBox(self.controlArea, "Boosting method")
self.cls_algorithm_combo = gui.comboBox(
box, self, "algorithm_index", label="Classification algorithm:",
items=self.algorithms,
orientation=Qt.Horizontal, callback=self.settings_changed)
self.reg_algorithm_combo = gui.comboBox(
box, self, "loss_index", label="Regression loss function:",
items=self.losses,
orientation=Qt.Horizontal, callback=self.settings_changed)
def create_learner(self):
if self.base_estimator is None:
return None
return self.LEARNER(
base_estimator=self.base_estimator,
n_estimators=self.n_estimators,
learning_rate=self.learning_rate,
random_state=self.random_seed,
preprocessors=self.preprocessors,
algorithm=self.algorithms[self.algorithm_index],
loss=self.losses[self.loss_index].lower())
@Inputs.learner
def set_base_learner(self, learner):
self.Error.no_weight_support.clear()
if learner and not learner.supports_weights:
# Clear the error and reset to default base learner
self.Error.no_weight_support()
self.base_estimator = None
self.base_label.setText("Base estimator: INVALID")
else:
self.base_estimator = learner or self.DEFAULT_BASE_ESTIMATOR
self.base_label.setText(
"Base estimator: %s" % self.base_estimator.name.title())
if self.auto_apply:
self.apply()
def get_learner_parameters(self):
return (("Base estimator", self.base_estimator),
("Number of estimators", self.n_estimators),
("Algorithm (classification)", self.algorithms[
self.algorithm_index].capitalize()),
("Loss (regression)", self.losses[
self.loss_index].capitalize()))
if __name__ == "__main__":
import sys
from AnyQt.QtWidgets import QApplication
a = QApplication(sys.argv)
ow = OWAdaBoost()
ow.resetSettings()
ow.set_data(Table(sys.argv[1] if len(sys.argv) > 1 else 'iris'))
ow.show()
a.exec_()
ow.saveSettings() | PypiClean |
/CleanAdminDjango-1.5.3.1.tar.gz/CleanAdminDjango-1.5.3.1/django/contrib/contenttypes/generic.py | from __future__ import unicode_literals
from collections import defaultdict
from functools import partial
from django.core.exceptions import ObjectDoesNotExist
from django.db import connection
from django.db.models import signals
from django.db import models, router, DEFAULT_DB_ALIAS
from django.db.models.fields.related import RelatedField, Field, ManyToManyRel
from django.db.models.loading import get_model
from django.forms import ModelForm
from django.forms.models import BaseModelFormSet, modelformset_factory, save_instance
from django.contrib.admin.options import InlineModelAdmin, flatten_fieldsets
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import smart_text
class GenericForeignKey(object):
"""
Provides a generic relation to any object through content-type/object-id
fields.
"""
def __init__(self, ct_field="content_type", fk_field="object_id"):
self.ct_field = ct_field
self.fk_field = fk_field
def contribute_to_class(self, cls, name):
self.name = name
self.model = cls
self.cache_attr = "_%s_cache" % name
cls._meta.add_virtual_field(self)
# For some reason I don't totally understand, using weakrefs here doesn't work.
signals.pre_init.connect(self.instance_pre_init, sender=cls, weak=False)
# Connect myself as the descriptor for this field
setattr(cls, name, self)
def instance_pre_init(self, signal, sender, args, kwargs, **_kwargs):
"""
Handles initializing an object with the generic FK instaed of
content-type/object-id fields.
"""
if self.name in kwargs:
value = kwargs.pop(self.name)
kwargs[self.ct_field] = self.get_content_type(obj=value)
kwargs[self.fk_field] = value._get_pk_val()
def get_content_type(self, obj=None, id=None, using=None):
# Convenience function using get_model avoids a circular import when
# using this model
ContentType = get_model("contenttypes", "contenttype")
if obj:
return ContentType.objects.db_manager(obj._state.db).get_for_model(obj)
elif id:
return ContentType.objects.db_manager(using).get_for_id(id)
else:
# This should never happen. I love comments like this, don't you?
raise Exception("Impossible arguments to GFK.get_content_type!")
def get_prefetch_query_set(self, instances):
# For efficiency, group the instances by content type and then do one
# query per model
fk_dict = defaultdict(set)
# We need one instance for each group in order to get the right db:
instance_dict = {}
ct_attname = self.model._meta.get_field(self.ct_field).get_attname()
for instance in instances:
# We avoid looking for values if either ct_id or fkey value is None
ct_id = getattr(instance, ct_attname)
if ct_id is not None:
fk_val = getattr(instance, self.fk_field)
if fk_val is not None:
fk_dict[ct_id].add(fk_val)
instance_dict[ct_id] = instance
ret_val = []
for ct_id, fkeys in fk_dict.items():
instance = instance_dict[ct_id]
ct = self.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
# For doing the join in Python, we have to match both the FK val and the
# content type, so we use a callable that returns a (fk, class) pair.
def gfk_key(obj):
ct_id = getattr(obj, ct_attname)
if ct_id is None:
return None
else:
model = self.get_content_type(id=ct_id,
using=obj._state.db).model_class()
return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)),
model)
return (ret_val,
lambda obj: (obj._get_pk_val(), obj.__class__),
gfk_key,
True,
self.cache_attr)
def is_cached(self, instance):
return hasattr(instance, self.cache_attr)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_attr)
except AttributeError:
rel_obj = None
# Make sure to use ContentType.objects.get_for_id() to ensure that
# lookups are cached (see ticket #5570). This takes more code than
# the naive ``getattr(instance, self.ct_field)``, but has better
# performance when dealing with GFKs in loops and such.
f = self.model._meta.get_field(self.ct_field)
ct_id = getattr(instance, f.get_attname(), None)
if ct_id:
ct = self.get_content_type(id=ct_id, using=instance._state.db)
try:
rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field))
except ObjectDoesNotExist:
pass
setattr(instance, self.cache_attr, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
ct = None
fk = None
if value is not None:
ct = self.get_content_type(obj=value)
fk = value._get_pk_val()
setattr(instance, self.ct_field, ct)
setattr(instance, self.fk_field, fk)
setattr(instance, self.cache_attr, value)
class GenericRelation(RelatedField, Field):
"""Provides an accessor to generic related objects (e.g. comments)"""
def __init__(self, to, **kwargs):
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = GenericRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', True))
# Override content-type/object-id field names on the related class
self.object_id_field_name = kwargs.pop("object_id_field", "object_id")
self.content_type_field_name = kwargs.pop("content_type_field", "content_type")
kwargs['blank'] = True
kwargs['editable'] = False
kwargs['serialize'] = False
Field.__init__(self, **kwargs)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def value_to_string(self, obj):
qs = getattr(obj, self.name).all()
return smart_text([instance._get_pk_val() for instance in qs])
def m2m_db_table(self):
return self.rel.to._meta.db_table
def m2m_column_name(self):
return self.object_id_field_name
def m2m_reverse_name(self):
return self.rel.to._meta.pk.column
def m2m_target_field_name(self):
return self.model._meta.pk.name
def m2m_reverse_target_field_name(self):
return self.rel.to._meta.pk.name
def contribute_to_class(self, cls, name):
super(GenericRelation, self).contribute_to_class(cls, name)
# Save a reference to which model this class is on for future use
self.model = cls
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseGenericRelatedObjectsDescriptor(self))
def contribute_to_related_class(self, cls, related):
pass
def set_attributes_from_rel(self):
pass
def get_internal_type(self):
return "ManyToManyField"
def db_type(self, connection):
# Since we're simulating a ManyToManyField, in effect, best return the
# same db_type as well.
return None
def extra_filters(self, pieces, pos, negate):
"""
Return an extra filter to the queryset so that the results are filtered
on the appropriate content type.
"""
if negate:
return []
ContentType = get_model("contenttypes", "contenttype")
content_type = ContentType.objects.get_for_model(self.model)
prefix = "__".join(pieces[:pos + 1])
return [("%s__%s" % (prefix, self.content_type_field_name),
content_type)]
def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS):
"""
Return all objects related to ``objs`` via this ``GenericRelation``.
"""
return self.rel.to._base_manager.db_manager(using).filter(**{
"%s__pk" % self.content_type_field_name:
ContentType.objects.db_manager(using).get_for_model(self.model).pk,
"%s__in" % self.object_id_field_name:
[obj.pk for obj in objs]
})
class ReverseGenericRelatedObjectsDescriptor(object):
"""
This class provides the functionality that makes the related-object
managers available as attributes on a model class, for fields that have
multiple "remote" values and have a GenericRelation defined in their model
(rather than having another model pointed *at* them). In the example
"article.publications", the publications attribute is a
ReverseGenericRelatedObjectsDescriptor instance.
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# This import is done here to avoid circular import importing this module
from django.contrib.contenttypes.models import ContentType
# Dynamically create a class that subclasses the related model's
# default manager.
rel_model = self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_generic_related_manager(superclass)
qn = connection.ops.quote_name
content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(instance)
manager = RelatedManager(
model = rel_model,
instance = instance,
symmetrical = (self.field.rel.symmetrical and instance.__class__ == rel_model),
source_col_name = qn(self.field.m2m_column_name()),
target_col_name = qn(self.field.m2m_reverse_name()),
content_type = content_type,
content_type_field_name = self.field.content_type_field_name,
object_id_field_name = self.field.object_id_field_name,
prefetch_cache_name = self.field.attname,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
manager.clear()
for obj in value:
manager.add(obj)
def create_generic_related_manager(superclass):
"""
Factory function for a manager that subclasses 'superclass' (which is a
Manager) and adds behavior for generic related objects.
"""
class GenericRelatedObjectManager(superclass):
def __init__(self, model=None, instance=None, symmetrical=None,
source_col_name=None, target_col_name=None, content_type=None,
content_type_field_name=None, object_id_field_name=None,
prefetch_cache_name=None):
super(GenericRelatedObjectManager, self).__init__()
self.model = model
self.content_type = content_type
self.symmetrical = symmetrical
self.instance = instance
self.source_col_name = source_col_name
self.target_col_name = target_col_name
self.content_type_field_name = content_type_field_name
self.object_id_field_name = object_id_field_name
self.prefetch_cache_name = prefetch_cache_name
self.pk_val = self.instance._get_pk_val()
self.core_filters = {
'%s__pk' % content_type_field_name: content_type.id,
'%s__exact' % object_id_field_name: instance._get_pk_val(),
}
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).get_query_set().using(db).filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
db = self._db or router.db_for_read(self.model, instance=instances[0])
query = {
'%s__pk' % self.content_type_field_name: self.content_type.id,
'%s__in' % self.object_id_field_name:
set(obj._get_pk_val() for obj in instances)
}
qs = super(GenericRelatedObjectManager, self).get_query_set().using(db).filter(**query)
# We (possibly) need to convert object IDs to the type of the
# instances' PK in order to match up instances:
object_id_converter = instances[0]._meta.pk.to_python
return (qs,
lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)),
lambda obj: obj._get_pk_val(),
False,
self.prefetch_cache_name)
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
setattr(obj, self.content_type_field_name, self.content_type)
setattr(obj, self.object_id_field_name, self.pk_val)
obj.save()
add.alters_data = True
def remove(self, *objs):
db = router.db_for_write(self.model, instance=self.instance)
for obj in objs:
obj.delete(using=db)
remove.alters_data = True
def clear(self):
db = router.db_for_write(self.model, instance=self.instance)
for obj in self.all():
obj.delete(using=db)
clear.alters_data = True
def create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
create.alters_data = True
return GenericRelatedObjectManager
class GenericRel(ManyToManyRel):
def __init__(self, to, related_name=None, limit_choices_to=None, symmetrical=True):
self.to = to
self.related_name = related_name
self.limit_choices_to = limit_choices_to or {}
self.symmetrical = symmetrical
self.multiple = True
self.through = None
class BaseGenericInlineFormSet(BaseModelFormSet):
"""
A formset for generic inline objects to a parent.
"""
def __init__(self, data=None, files=None, instance=None, save_as_new=None,
prefix=None, queryset=None):
# Avoid a circular import.
from django.contrib.contenttypes.models import ContentType
opts = self.model._meta
self.instance = instance
self.rel_name = '-'.join((
opts.app_label, opts.object_name.lower(),
self.ct_field.name, self.ct_fk_field.name,
))
if self.instance is None or self.instance.pk is None:
qs = self.model._default_manager.none()
else:
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{
self.ct_field.name: ContentType.objects.get_for_model(self.instance),
self.ct_fk_field.name: self.instance.pk,
})
super(BaseGenericInlineFormSet, self).__init__(
queryset=qs, data=data, files=files,
prefix=prefix
)
@classmethod
def get_default_prefix(cls):
opts = cls.model._meta
return '-'.join((opts.app_label, opts.object_name.lower(),
cls.ct_field.name, cls.ct_fk_field.name,
))
def save_new(self, form, commit=True):
# Avoid a circular import.
from django.contrib.contenttypes.models import ContentType
kwargs = {
self.ct_field.get_attname(): ContentType.objects.get_for_model(self.instance).pk,
self.ct_fk_field.get_attname(): self.instance.pk,
}
new_obj = self.model(**kwargs)
return save_instance(form, new_obj, commit=commit)
def generic_inlineformset_factory(model, form=ModelForm,
formset=BaseGenericInlineFormSet,
ct_field="content_type", fk_field="object_id",
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True,
max_num=None,
formfield_callback=None):
"""
Returns a ``GenericInlineFormSet`` for the given kwargs.
You must provide ``ct_field`` and ``fk_field`` if they are different from
the defaults ``content_type`` and ``object_id`` respectively.
"""
opts = model._meta
# Avoid a circular import.
from django.contrib.contenttypes.models import ContentType
# if there is no field called `ct_field` let the exception propagate
ct_field = opts.get_field(ct_field)
if not isinstance(ct_field, models.ForeignKey) or ct_field.rel.to != ContentType:
raise Exception("fk_name '%s' is not a ForeignKey to ContentType" % ct_field)
fk_field = opts.get_field(fk_field) # let the exception propagate
if exclude is not None:
exclude = list(exclude)
exclude.extend([ct_field.name, fk_field.name])
else:
exclude = [ct_field.name, fk_field.name]
FormSet = modelformset_factory(model, form=form,
formfield_callback=formfield_callback,
formset=formset,
extra=extra, can_delete=can_delete, can_order=can_order,
fields=fields, exclude=exclude, max_num=max_num)
FormSet.ct_field = ct_field
FormSet.ct_fk_field = fk_field
return FormSet
class GenericInlineModelAdmin(InlineModelAdmin):
ct_field = "content_type"
ct_fk_field = "object_id"
formset = BaseGenericInlineFormSet
def get_formset(self, request, obj=None, **kwargs):
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# GenericInlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"ct_field": self.ct_field,
"fk_field": self.ct_fk_field,
"form": self.form,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"formset": self.formset,
"extra": self.extra,
"can_delete": can_delete,
"can_order": False,
"fields": fields,
"max_num": self.max_num,
"exclude": exclude
}
defaults.update(kwargs)
return generic_inlineformset_factory(self.model, **defaults)
class GenericStackedInline(GenericInlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class GenericTabularInline(GenericInlineModelAdmin):
template = 'admin/edit_inline/tabular.html' | PypiClean |
/MozPhab-1.4.3-py3-none-any.whl/mozphab/updater.py |
import json
import sys
import time
import urllib.request
from typing import Optional
from setuptools import Distribution
from packaging.version import Version
from pathlib import Path
from pkg_resources import parse_version
from mozphab import environment
from .config import config
from .environment import MOZPHAB_VERSION
from .exceptions import Error
from .logger import logger
from .subprocess_wrapper import check_call
SELF_UPDATE_FREQUENCY = 24 * 3 # hours
def get_pypi_json() -> dict:
"""Get data about `MozPhab` from the JSON API endpoint."""
url = "https://pypi.org/pypi/MozPhab/json"
output = urllib.request.urlopen(urllib.request.Request(url), timeout=30).read()
response = json.loads(output.decode("utf-8"))
return response
def get_simple_json() -> dict:
"""Get data about `MozPhab` from the `simple` API endpoint."""
url = "https://pypi.org/simple/MozPhab"
request = urllib.request.Request(
url, headers={"Accept": "application/vnd.pypi.simple.v1+json"}
)
output = urllib.request.urlopen(request, timeout=30).read()
return json.loads(output.decode("utf-8"))
def parse_latest_prerelease_version(simple_json: dict) -> str:
"""Parse PyPI's API response for `moz-phab` to determine the latest version."""
# Get all the returned `.tar.gz` file entries.
filenames = map(lambda entry: entry["filename"], simple_json["files"])
# The format is `MozPhab-<version>.tar.gz`, so remove the prefix and
# suffix to get the version strings.
versions = (
filename[len("MozPhab-") :][: -len(".tar.gz")]
for filename in filenames
if filename.startswith("MozPhab-") and filename.endswith(".tar.gz")
)
# Turn each version string into a `Version`, get the max, then return as `str`.
return str(max(Version(version) for version in versions))
def log_windows_update_message():
"""Log a message telling users to update `moz-phab` to the latest version."""
logger.warning(
"Please run `pip install --upgrade MozPhab` to ugprade to the latest version."
)
def check_for_updates(force_check: bool = False) -> Optional[str]:
"""Check if an update is available for `moz-phab`.
Log a message about the new version, return the version as a `str` if it is
found or return `None`. Use `force_check` to check for updates even when the
usual conditions aren't met.
"""
self_update_disabled = config.self_last_check < 0
last_check_before_frequency = (
time.time() - config.self_last_check <= SELF_UPDATE_FREQUENCY * 60 * 60
)
# Return if our check conditions aren't met.
if not force_check and (self_update_disabled or not last_check_before_frequency):
return
config.self_last_check = int(time.time())
current_version = MOZPHAB_VERSION
pypi_json = get_pypi_json()
pypi_info = pypi_json["info"]
if not config.get_pre_releases:
# Use the latest full release.
pypi_version = pypi_info["version"]
else:
# Find the latest pre-release version manually since the "version" key
# only contains the latest full release on PyPI.
simple_json = get_simple_json()
pypi_version = parse_latest_prerelease_version(simple_json)
logger.debug(f"Versions - local: {current_version}, PyPI: {pypi_version}")
# convert ">=3.6" to (3, 6)
try:
required_python_version = tuple(
[int(i) for i in pypi_info["requires_python"][2:].split(".")]
)
except ValueError:
required_python_version = ()
if sys.version_info < required_python_version:
raise Error(
"Unable to upgrade to version {}.\n"
"MozPhab requires Python in version {}".format(
pypi_version, pypi_info["requires_python"]
)
)
config.write()
if parse_version(current_version) >= parse_version(pypi_version):
logger.debug("update check not required")
return
logger.warning(f"Version {pypi_version} of `moz-phab` is now available")
return pypi_version
def self_upgrade():
"""Upgrade ourselves with pip."""
# Run pip using the current python executable to accommodate for virtualenvs
command = (
[sys.executable]
+ ["-m", "pip"]
+ ["install", "MozPhab"]
+ ["--upgrade"]
+ ["--no-cache-dir"]
+ ["--disable-pip-version-check"]
)
if config.get_pre_releases:
command += ["--pre"]
if not environment.DEBUG:
command += ["--quiet"]
# `sys.argv[0]` is the path to the script that was used to start python.
# For example: `/home/connor/.virtualenvs/moz-phab-dev/bin/moz-phab`. Run
# `.resolve()` to make sure we have a full path, and then the `parent` is
# the directory for the script.
script_dir = Path(sys.argv[0]).resolve().parent
# If moz-phab was installed with --user, we need to pass it to pip
# Create "install" setuptools command with --user to find the scripts_path
d = Distribution()
d.parse_config_files()
i = d.get_command_obj("install", create=True)
# Forcing the environment detected by Distribution to the --user one
i.user = True
i.prefix = i.exec_prefix = i.home = i.install_base = i.install_platbase = None
i.finalize_options()
# Checking if the moz-phab script is installed in user's scripts directory
user_dir = Path(i.install_scripts).resolve()
if script_dir == user_dir:
command.append("--user")
if environment.IS_WINDOWS:
# Windows does not allow to remove the exe file of the running process.
# Renaming the `moz-phab.exe` file to allow pip to install a new version.
temp_exe = script_dir / "moz-phab-temp.exe"
try:
temp_exe.unlink()
except FileNotFoundError:
pass
exe = script_dir / "moz-phab.exe"
exe.rename(temp_exe)
try:
check_call(command)
except Exception:
temp_exe.rename(exe)
raise
if not exe.is_file():
# moz-phab.exe is not created - install wasn't needed.
temp_exe.rename(exe)
else:
check_call(command) | PypiClean |
/Flask-State-1.1.4.tar.gz/Flask-State-1.1.4/src/flask_state/migrations/versions/b6b1ecfc9524_.py | import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from flask_state.conf.config import Config
# revision identifiers, used by Alembic.
revision = "b6b1ecfc9524"
down_revision = "637920a840f7"
branch_labels = None
depends_on = None
def upgrade(engine_name):
if engine_name != Config.DEFAULT_BIND_SQLITE:
return
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
if engine_name != Config.DEFAULT_BIND_SQLITE:
return
globals()["downgrade_%s" % engine_name]()
def upgrade_flask_state_sqlite():
op.add_column(
"flask_state_io",
sa.Column(
"packets_recv",
mysql.BIGINT(unsigned=True),
nullable=True,
server_default=sa.text("0"),
),
)
op.add_column(
"flask_state_io",
sa.Column(
"packets_sent",
mysql.BIGINT(unsigned=True),
nullable=True,
server_default=sa.text("0"),
),
)
op.add_column(
"flask_state_io",
sa.Column(
"read_count",
mysql.BIGINT(unsigned=True),
nullable=True,
server_default=sa.text("0"),
),
)
op.add_column(
"flask_state_io",
sa.Column(
"write_count",
mysql.BIGINT(unsigned=True),
nullable=True,
server_default=sa.text("0"),
),
)
def downgrade_flask_state_sqlite():
op.create_table(
"flask_state_io_dg_tmp",
sa.Column(
"id",
mysql.INTEGER(unsigned=True),
autoincrement=True,
nullable=False,
),
sa.Column(
"create_time",
mysql.DATETIME(),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=True,
),
sa.Column(
"update_time",
mysql.DATETIME(),
server_default=sa.text("(CURRENT_TIMESTAMP)"),
nullable=True,
),
sa.Column(
"net_sent",
mysql.BIGINT(unsigned=True),
server_default=sa.text("0"),
nullable=True,
),
sa.Column(
"net_recv",
mysql.BIGINT(unsigned=True),
server_default=sa.text("0"),
nullable=True,
),
sa.Column(
"disk_read",
mysql.BIGINT(unsigned=True),
server_default=sa.text("0"),
nullable=True,
),
sa.Column(
"disk_write",
mysql.BIGINT(unsigned=True),
server_default="",
nullable=True,
),
sa.Column(
"ts",
mysql.BIGINT(unsigned=True),
server_default=sa.text("0"),
nullable=True,
),
sa.PrimaryKeyConstraint("id"),
)
op.execute(
"insert into flask_state_io_dg_tmp(id, create_time, update_time, net_sent, net_recv, disk_read, disk_write, ts) select id, create_time, update_time, net_sent, net_recv, disk_read, disk_write, ts from flask_state_io;"
)
op.drop_table("flask_state_io")
op.rename_table("flask_state_io_dg_tmp", "flask_state_io")
op.create_index(
"idx_io_ts", "flask_state_io", [sa.text("ts DESC")], unique=False
) | PypiClean |
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/37721.28b126da78bbc7db9fa3.min.js | (self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[37721],{37721:function(e){var n="[0-9](_*[0-9])*",a=`\\.(${n})`,s="[0-9a-fA-F](_*[0-9a-fA-F])*",i={className:"number",variants:[{begin:`(\\b(${n})((${a})|\\.)?|(${a}))[eE][+-]?(${n})[fFdD]?\\b`},{begin:`\\b(${n})((${a})[fFdD]?\\b|\\.([fFdD]\\b)?)`},{begin:`(${a})[fFdD]?\\b`},{begin:`\\b(${n})[fFdD]\\b`},{begin:`\\b0[xX]((${s})\\.?|(${s})?\\.(${s}))[pP][+-]?(${n})[fFdD]?\\b`},{begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:`\\b0[xX](${s})[lL]?\\b`},{begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}],relevance:0};e.exports=function(e){var n="[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",a=n+"(<"+n+"(\\s*,\\s*"+n+")*>)?",s="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",r={className:"meta",begin:"@"+n,contains:[{begin:/\(/,end:/\)/,contains:["self"]}]};const t=i;return{name:"Java",aliases:["jsp"],keywords:s,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),{begin:/import java\.[a-z]+\./,keywords:"import",relevance:2},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface enum",end:/[{;=]/,excludeEnd:!0,relevance:1,keywords:"class interface enum",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"class",begin:"record\\s+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,excludeEnd:!0,end:/[{;=]/,keywords:s,contains:[{beginKeywords:"record"},{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:s,relevance:0,contains:[e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"function",begin:"("+a+"\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:s,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:s,relevance:0,contains:[r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,t,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},t,r]}}}}]);
//# sourceMappingURL=37721.28b126da78bbc7db9fa3.min.js.map | PypiClean |
/MikeT_messenger_client-0.4.1.tar.gz/MikeT_messenger_client-0.4.1/client/client/main_window.py | import json
import logging
import base64
from PyQt5.QtWidgets import QMainWindow, qApp, QMessageBox, QApplication, QListView
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QBrush, QColor
from PyQt5.QtCore import pyqtSlot, QEvent, Qt
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from client.main_window_conv import Ui_MainClientWindow
from client.add_contact import AddContactDialog
from client.del_contact import DelContactDialog
from common.errors import ServerError
from common.variables import *
logger = logging.getLogger('client')
class ClientMainWindow(QMainWindow):
"""
Класс - основное окно пользователя.
Содержит всю основную логику работы клиентского модуля.
Конфигурация окна создана в QTDesigner и загружается из
конвертированого файла main_window_conv.py
"""
def __init__(self, database, transport, keys):
super().__init__()
self.database = database
self.transport = transport
self.decrypter = PKCS1_OAEP.new(keys)
self.ui = Ui_MainClientWindow()
self.ui.setupUi(self)
self.ui.menu_exit.triggered.connect(qApp.exit)
self.ui.btn_send.clicked.connect(self.send_message)
self.ui.btn_add_contact.clicked.connect(self.add_contact_window)
self.ui.menu_add_contact.triggered.connect(self.add_contact_window)
self.ui.btn_remove_contact.clicked.connect(self.delete_contact_window)
self.ui.menu_del_contact.triggered.connect(self.delete_contact_window)
self.contacts_model = None
self.history_model = None
self.messages = QMessageBox()
self.current_chat = None
self.current_chat_key = None
self.encryptor = None
self.ui.list_messages.setHorizontalScrollBarPolicy(
Qt.ScrollBarAlwaysOff)
self.ui.list_messages.setWordWrap(True)
self.ui.list_contacts.doubleClicked.connect(self.select_active_user)
self.clients_list_update()
self.set_disabled_input()
self.show()
def set_disabled_input(self):
""" Метод делающий поля ввода неактивными"""
self.ui.label_new_message.setText(
'Для выбора получателя дважды кликните на нем в окне контактов.')
self.ui.text_message.clear()
if self.history_model:
self.history_model.clear()
self.ui.btn_clear.setDisabled(True)
self.ui.btn_send.setDisabled(True)
self.ui.text_message.setDisabled(True)
self.encryptor = None
self.current_chat = None
self.current_chat_key = None
def history_list_update(self):
"""
Метод заполняющий соответствующий QListView
историей переписки с текущим собеседником.
"""
list = sorted(
self.database.get_history(
self.current_chat),
key=lambda item: item[3])
if not self.history_model:
self.history_model = QStandardItemModel()
self.ui.list_messages.setModel(self.history_model)
self.history_model.clear()
length = len(list)
start_index = 0
if length > 20:
start_index = length - 20
for i in range(start_index, length):
item = list[i]
if item[1] == 'in':
mess = QStandardItem(
f'Входящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setBackground(QBrush(QColor(255, 213, 213)))
mess.setTextAlignment(Qt.AlignLeft)
self.history_model.appendRow(mess)
else:
mess = QStandardItem(
f'Исходящее от {item[3].replace(microsecond=0)}:\n {item[2]}')
mess.setEditable(False)
mess.setTextAlignment(Qt.AlignRight)
mess.setBackground(QBrush(QColor(204, 255, 204)))
self.history_model.appendRow(mess)
self.ui.list_messages.scrollToBottom()
def select_active_user(self):
"""Метод обработчик события двойного клика по списку контактов."""
self.current_chat = self.ui.list_contacts.currentIndex().data()
self.set_active_user()
def set_active_user(self):
"""Метод активации чата с собеседником."""
try:
self.current_chat_key = self.transport.key_request(
self.current_chat)
logger.debug(f'Загружен открытый ключ для {self.current_chat}')
if self.current_chat_key:
self.encryptor = PKCS1_OAEP.new(
RSA.import_key(self.current_chat_key))
except (OSError, json.JSONDecodeError):
self.current_chat_key = None
self.encryptor = None
logger.debug(f'Не удалось получить ключ для {self.current_chat}')
if not self.current_chat_key:
self.messages.warning(
self, 'Ошибка', 'Для выбранного пользователя нет ключа шифрования.')
return
self.ui.label_new_message.setText(
f'Введите сообщенние для {self.current_chat}:')
self.ui.btn_clear.setDisabled(False)
self.ui.btn_send.setDisabled(False)
self.ui.text_message.setDisabled(False)
self.history_list_update()
def clients_list_update(self):
"""Метод обновляющий список контактов."""
contacts_list = self.database.get_contacts()
self.contacts_model = QStandardItemModel()
for i in sorted(contacts_list):
item = QStandardItem(i)
item.setEditable(False)
self.contacts_model.appendRow(item)
self.ui.list_contacts.setModel(self.contacts_model)
def add_contact_window(self):
"""Метод создающий окно - диалог добавления контакта"""
global select_dialog
select_dialog = AddContactDialog(self.transport, self.database)
select_dialog.btn_ok.clicked.connect(
lambda: self.add_contact_action(select_dialog))
select_dialog.show()
def add_contact_action(self, item):
"""Метод обработчк нажатия кнопки "Добавить\""""
new_contact = item.selector.currentText()
self.add_contact(new_contact)
item.close()
def add_contact(self, new_contact):
"""
Метод добавляющий контакт в серверную и клиентсткую BD.
После обновления баз данных обновляет и содержимое окна.
"""
try:
self.transport.add_contact(new_contact)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.add_contact(new_contact)
new_contact = QStandardItem(new_contact)
new_contact.setEditable(False)
self.contacts_model.appendRow(new_contact)
logger.info(f'Успешно добавлен контакт {new_contact}')
self.messages.information(
self, 'Успех', 'Контакт успешно добавлен.')
def delete_contact_window(self):
"""Метод создающий окно удаления контакта."""
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect(
lambda: self.delete_contact(remove_dialog))
remove_dialog.show()
def delete_contact(self, item):
"""
Метод удаляющий контакт из серверной и клиентсткой BD.
После обновления баз данных обновляет и содержимое окна.
"""
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
if selected == self.current_chat:
self.current_chat = None
self.set_disabled_input()
def send_message(self):
"""
Функция отправки сообщения текущему собеседнику.
Реализует шифрование сообщения и его отправку.
"""
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if not message_text:
return
message_text_encrypted = self.encryptor.encrypt(
message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(
message_text_encrypted)
try:
self.transport.send_message(
self.current_chat,
message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(
self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(
f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()
@pyqtSlot(dict)
def message(self, message):
"""
Слот обработчик поступаемых сообщений, выполняет дешифровку
поступаемых сообщений и их сохранение в истории сообщений.
Запрашивает пользователя если пришло сообщение не от текущего
собеседника. При необходимости меняет собеседника.
"""
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(
self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
self.database.save_message(
self.current_chat,
'in',
decrypted_message.decode('utf8'))
sender = message[SENDER]
if sender == self.current_chat:
self.history_list_update()
else:
if self.database.check_contact(sender):
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}, открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.current_chat = sender
self.set_active_user()
else:
print('NO')
if self.messages.question(
self,
'Новое сообщение',
f'Получено новое сообщение от {sender}.\n '
f'Данного пользователя нет в вашем контакт-листе.\n '
f'Добавить в контакты и открыть чат с ним?',
QMessageBox.Yes,
QMessageBox.No) == QMessageBox.Yes:
self.add_contact(sender)
self.current_chat = sender
self.database.save_message(
self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()
@pyqtSlot()
def connection_lost(self):
"""
Слот обработчик потери соеднинения с сервером.
Выдаёт окно предупреждение и завершает работу приложения.
"""
self.messages.warning(
self,
'Сбой соединения',
'Потеряно соединение с сервером. ')
self.close()
@pyqtSlot()
def sig_205(self):
"""
Слот выполняющий обновление баз данных по команде сервера.
"""
if self.current_chat and not self.database.check_user(
self.current_chat):
self.messages.warning(
self,
'Сочувствую',
'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()
def make_connection(self, trans_obj):
"""Метод обеспечивающий соединение сигналов и слотов."""
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205) | PypiClean |
/Daarmaan-0.2.2.tar.gz/Daarmaan-0.2.2/docs/build/html/_static/doctools.js | * select a different prefix for underscore
*/
$u = _.noConflict();
/**
* make the code below compatible with browsers without
* an installed firebug like debugger
if (!window.console || !console.firebug) {
var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
"dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
"profile", "profileEnd"];
window.console = {};
for (var i = 0; i < names.length; ++i)
window.console[names[i]] = function() {};
}
*/
/**
* small helper function to urldecode strings
*/
jQuery.urldecode = function(x) {
return decodeURIComponent(x).replace(/\+/g, ' ');
}
/**
* small helper function to urlencode strings
*/
jQuery.urlencode = encodeURIComponent;
/**
* This function returns the parsed url parameters of the
* current request. Multiple values per key are supported,
* it will always return arrays of strings for the value parts.
*/
jQuery.getQueryParameters = function(s) {
if (typeof s == 'undefined')
s = document.location.search;
var parts = s.substr(s.indexOf('?') + 1).split('&');
var result = {};
for (var i = 0; i < parts.length; i++) {
var tmp = parts[i].split('=', 2);
var key = jQuery.urldecode(tmp[0]);
var value = jQuery.urldecode(tmp[1]);
if (key in result)
result[key].push(value);
else
result[key] = [value];
}
return result;
};
/**
* small function to check if an array contains
* a given item.
*/
jQuery.contains = function(arr, item) {
for (var i = 0; i < arr.length; i++) {
if (arr[i] == item)
return true;
}
return false;
};
/**
* highlight a given string on a jquery object by wrapping it in
* span elements with the given class name.
*/
jQuery.fn.highlightText = function(text, className) {
function highlight(node) {
if (node.nodeType == 3) {
var val = node.nodeValue;
var pos = val.toLowerCase().indexOf(text);
if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
var span = document.createElement("span");
span.className = className;
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
node.parentNode.insertBefore(span, node.parentNode.insertBefore(
document.createTextNode(val.substr(pos + text.length)),
node.nextSibling));
node.nodeValue = val.substr(0, pos);
}
}
else if (!jQuery(node).is("button, select, textarea")) {
jQuery.each(node.childNodes, function() {
highlight(this);
});
}
}
return this.each(function() {
highlight(this);
});
};
/**
* Small JavaScript module for the documentation.
*/
var Documentation = {
init : function() {
this.fixFirefoxAnchorBug();
this.highlightSearchWords();
this.initIndexTable();
},
/**
* i18n support
*/
TRANSLATIONS : {},
PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
LOCALE : 'unknown',
// gettext and ngettext don't access this so that the functions
// can safely bound to a different name (_ = Documentation.gettext)
gettext : function(string) {
var translated = Documentation.TRANSLATIONS[string];
if (typeof translated == 'undefined')
return string;
return (typeof translated == 'string') ? translated : translated[0];
},
ngettext : function(singular, plural, n) {
var translated = Documentation.TRANSLATIONS[singular];
if (typeof translated == 'undefined')
return (n == 1) ? singular : plural;
return translated[Documentation.PLURALEXPR(n)];
},
addTranslations : function(catalog) {
for (var key in catalog.messages)
this.TRANSLATIONS[key] = catalog.messages[key];
this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
this.LOCALE = catalog.locale;
},
/**
* add context elements like header anchor links
*/
addContextElements : function() {
$('div[id] > :header:first').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this headline')).
appendTo(this);
});
$('dt[id]').each(function() {
$('<a class="headerlink">\u00B6</a>').
attr('href', '#' + this.id).
attr('title', _('Permalink to this definition')).
appendTo(this);
});
},
/**
* workaround a firefox stupidity
*/
fixFirefoxAnchorBug : function() {
if (document.location.hash && $.browser.mozilla)
window.setTimeout(function() {
document.location.href += '';
}, 10);
},
/**
* highlight the search words provided in the url in the text
*/
highlightSearchWords : function() {
var params = $.getQueryParameters();
var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
if (terms.length) {
var body = $('div.body');
window.setTimeout(function() {
$.each(terms, function() {
body.highlightText(this.toLowerCase(), 'highlighted');
});
}, 10);
$('<p class="highlight-link"><a href="javascript:Documentation.' +
'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
.appendTo($('#searchbox'));
}
},
/**
* init the domain index toggle buttons
*/
initIndexTable : function() {
var togglers = $('img.toggler').click(function() {
var src = $(this).attr('src');
var idnum = $(this).attr('id').substr(7);
$('tr.cg-' + idnum).toggle();
if (src.substr(-9) == 'minus.png')
$(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
else
$(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
}).css('display', '');
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
togglers.click();
}
},
/**
* helper function to hide the search marks again
*/
hideSearchWords : function() {
$('#searchbox .highlight-link').fadeOut(300);
$('span.highlighted').removeClass('highlighted');
},
/**
* make the url absolute
*/
makeURL : function(relativeURL) {
return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
},
/**
* get the current relative url
*/
getCurrentURL : function() {
var path = document.location.pathname;
var parts = path.split(/\//);
$.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
if (this == '..')
parts.pop();
});
var url = parts.join('/');
return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
}
};
// quick alias for translations
_ = Documentation.gettext;
$(document).ready(function() {
Documentation.init();
}); | PypiClean |
/Euphorie-15.0.2.tar.gz/Euphorie-15.0.2/src/euphorie/client/resources/oira/script/chunks/94470.f77a609f5da063b460fa.min.js | (self.webpackChunk_patternslib_patternslib=self.webpackChunk_patternslib_patternslib||[]).push([[94470],{94470:function(e,a,r){!function(e){"use strict";
//! moment.js locale configuration
var a=/^(janvier|février|mars|avril|mai|juin|juillet|août|septembre|octobre|novembre|décembre)/i,r=/(janv\.?|févr\.?|mars|avr\.?|mai|juin|juil\.?|août|sept\.?|oct\.?|nov\.?|déc\.?)/i,i=/(janv\.?|févr\.?|mars|avr\.?|mai|juin|juil\.?|août|sept\.?|oct\.?|nov\.?|déc\.?|janvier|février|mars|avril|mai|juin|juillet|août|septembre|octobre|novembre|décembre)/i,s=[/^janv/i,/^févr/i,/^mars/i,/^avr/i,/^mai/i,/^juin/i,/^juil/i,/^août/i,/^sept/i,/^oct/i,/^nov/i,/^déc/i];e.defineLocale("fr",{months:"janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre".split("_"),monthsShort:"janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.".split("_"),monthsRegex:i,monthsShortRegex:i,monthsStrictRegex:a,monthsShortStrictRegex:r,monthsParse:s,longMonthsParse:s,shortMonthsParse:s,weekdays:"dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi".split("_"),weekdaysShort:"dim._lun._mar._mer._jeu._ven._sam.".split("_"),weekdaysMin:"di_lu_ma_me_je_ve_sa".split("_"),weekdaysParseExact:!0,longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD/MM/YYYY",LL:"D MMMM YYYY",LLL:"D MMMM YYYY HH:mm",LLLL:"dddd D MMMM YYYY HH:mm"},calendar:{sameDay:"[Aujourd’hui à] LT",nextDay:"[Demain à] LT",nextWeek:"dddd [à] LT",lastDay:"[Hier à] LT",lastWeek:"dddd [dernier à] LT",sameElse:"L"},relativeTime:{future:"dans %s",past:"il y a %s",s:"quelques secondes",ss:"%d secondes",m:"une minute",mm:"%d minutes",h:"une heure",hh:"%d heures",d:"un jour",dd:"%d jours",w:"une semaine",ww:"%d semaines",M:"un mois",MM:"%d mois",y:"un an",yy:"%d ans"},dayOfMonthOrdinalParse:/\d{1,2}(er|)/,ordinal:function(e,a){switch(a){case"D":return e+(1===e?"er":"");default:case"M":case"Q":case"DDD":case"d":return e+(1===e?"er":"e");case"w":case"W":return e+(1===e?"re":"e")}},week:{dow:1,doy:4}})}(r(30381))}}]);
//# sourceMappingURL=94470.f77a609f5da063b460fa.min.js.map | PypiClean |
/FFmpyg_DavidRodriguezSoaresCUI-0.0.6.tar.gz/FFmpyg_DavidRodriguezSoaresCUI-0.0.6/src/FFmpyg/ffmpeg_encoder.py | import logging
import re
from pathlib import Path
# import re
from typing import Any, Callable, Dict, List, Optional, Union
import yaml
from DRSlib.cli_ui import choose_from_list, user_input
from DRSlib.utils import assertTrue
from .encoder_spec import (
ffmpeg_supported_encoders,
get_codec_from_encoder,
read_encoder_parameters,
)
from .enums import StreamType
LOG = logging.getLogger(__file__)
AVAILABLE_ENCODERS: Dict[Union[str, Path], List[str]] = {}
def is_available_encoder(encoder: str, ffmpeg: Union[str, Path]) -> bool:
"""Memoized function that check if encoder is available for given FFMPEG"""
if ffmpeg not in AVAILABLE_ENCODERS:
_available_encoders = []
for _encoders in ffmpeg_supported_encoders(ffmpeg).values():
_available_encoders.extend(_encoders)
AVAILABLE_ENCODERS[ffmpeg] = _available_encoders
return encoder in AVAILABLE_ENCODERS[ffmpeg]
class Encoder:
"""Validates encoder parameters and writes FFMPEG-compliant command parts
Initialization requires calling ffmpeg to check for avaiability of given encoder
h264_encoder = Encoder('libx264', threads=4)
h264_encoder.set_parameters({
'crf': 24,
'preset': 'slow'
})
"""
def __init__(
self,
encoder: str,
ffmpeg: Union[str, Path] = "ffmpeg",
threads: Optional[int] = None,
) -> None:
self.is_copy = encoder == "copy"
assertTrue(
self.is_copy or is_available_encoder(encoder, ffmpeg),
"Encoder '{}' not available; available encoders: {}",
encoder,
AVAILABLE_ENCODERS[ffmpeg],
)
self.encoder = encoder
self.parameters = {}
self.spec = (
read_encoder_parameters(self.encoder, ffmpeg) if encoder != "copy" else {}
)
self.codec = None if self.is_copy else get_codec_from_encoder(ffmpeg, encoder)
self.stream_type = None if self.is_copy else self.spec["type"]
# Set threads if supported
if threads is not None:
if "threads" not in self.spec.get("capabilities", {}):
LOG.warning("Encoder '%s' doesn't support '-threads'", self.encoder)
else:
self.parameters["threads"] = threads
@classmethod
def build_interactively(cls, ffmpeg: Union[str, Path] = "ffmpeg") -> "Encoder":
"""Asks user to build encoder"""
_type = StreamType.from_ffmpeg_code(
user_input(
"Encoder type [V=video,A=audio,S=subtitle]", accepted=["V", "A", "S"]
)
)
_available_encoders = list(sorted(ffmpeg_supported_encoders(ffmpeg)[_type]))
_encoder = choose_from_list(choices=_available_encoders)
_encoder_info = read_encoder_parameters(_encoder, ffmpeg)
_supports_threads = "threads" in _encoder_info.get("capabilities", {})
_threads = None
if _supports_threads:
_threads = int(
user_input(
"Threads",
accepted=lambda s: len(s) > 0 and s.isdigit(),
default="4",
)
)
_supported_parameters = list(sorted(_encoder_info.get("options", {}).keys()))
print(f"Valid parameters : {_supported_parameters}")
_parameters = {}
while True:
_tmp = user_input(
"Parameter (leave blank to stop)", accepted=lambda _: True, default=""
).strip()
if len(_tmp) == 0:
break
_tmp2 = _tmp.split()
if len(_tmp2) < 2:
print(
f"Invalid input '{_tmp}' doesn't contain space-separated key-value"
)
continue
_par, _val = _tmp.split()
if _par not in _supported_parameters:
print(f"Invalid parameter '{_par}'")
continue
_parameters[_par] = _val
res = Encoder(encoder=_encoder, ffmpeg=ffmpeg, threads=_threads)
res.set_parameters(**_parameters)
_encoder_save_name = user_input(
"Save encoder configuration with name (leve blank to skip)",
accepted=lambda _: True,
).strip()
if len(_encoder_save_name):
res.save(_encoder_save_name, overwrite=True)
return res
def save(self, name: str, overwrite: bool) -> None:
"""Save to YAML file"""
_param = dict(self.parameters)
_threads = _param.pop("threads", None)
_spec = {"encoder": self.encoder, "parameters": _param, "threads": _threads}
_save_file = Encoder.yaml_file_path(name).resolve()
if _save_file.exists() and not overwrite:
LOG.info("Did not overwrite '%s'", _save_file)
return
with _save_file.open("w", encoding="utf8") as f:
yaml.dump(_spec, f, encoding="utf8")
LOG.info("Saved encoder to '%s'", _save_file)
@classmethod
def load(cls, name: str, ffmpeg: Union[str, Path] = "ffmpeg") -> "Encoder":
"""Load from YAML file"""
with Encoder.yaml_file_path(name).open("r", encoding="utf8") as f:
_spec = yaml.safe_load(f)
_encoder = Encoder(_spec["encoder"], ffmpeg, _spec["threads"])
_encoder.set_parameters(**_spec["parameters"])
return _encoder
@staticmethod
def yaml_file_path(name: str) -> Path:
"""Returns save YAML file path with given name"""
return Path(".") / f"Encoder.{name}.yaml"
@staticmethod
def available_configs() -> Dict[str, Path]:
"""Returns list of save YAML files path that are available to be loaded"""
extract_name_pattern = re.compile(r".+?\.(.+)\.yaml")
_configs = {}
for _item in Path(__file__).parent.glob(Path(__file__).stem + ".*.yaml"):
if not _item.is_file():
continue
_match = re.match(extract_name_pattern, _item.name)
if not _match:
continue
_configs[_match.group(1)] = _item
return _configs
def set_parameters(self, **kwargs) -> None:
"""Sets encoder-specific parameters. Check yaml files or run
`ffmpeg --help encoder=<encoder>` for details."""
available_parameters = {
opt: spec.get("type") for opt, spec in self.spec.get("options", {}).items()
}
if "threads" in self.parameters:
available_parameters["threads"] = int
for p_name, p_value in kwargs.items():
if p_name not in available_parameters:
print(f"WARNING: Unknown parameter '{p_name}'")
continue
caster: Callable = available_parameters[p_name]
try:
self.parameters[p_name] = p_value if caster is None else caster(p_value)
except ValueError as e:
print(
f"WARNING: Couldn't cast value '{p_value}' to type {caster} for parameter {p_name} : {e}"
)
def to_command_parts(self) -> List[Any]:
"""FFMPEG-compatible CLI arguments for encoding stream"""
command: List[Any] = []
for k, v in self.parameters.items():
command.append("-" + k)
if v:
command.append(v)
return command | PypiClean |
/Kamaelia-0.6.0.tar.gz/Kamaelia-0.6.0/Examples/TCP_Systems/HTTP/cookbook_2.py |
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import SimpleServer
from Kamaelia.Protocol.HTTP.HTTPServer import HTTPServer
from Kamaelia.Protocol.HTTP.Handlers.Minimal import Minimal
import Kamaelia.Protocol.HTTP.ErrorPages as ErrorPages
from Kamaelia.Chassis.Pipeline import Pipeline
homedirectory = "/srv/www/htdocs/"
indexfilename = "index.html"
def requestHandlers(URLHandlers):
def createRequestHandler(request):
if request.get("bad"):
return ErrorPages.websiteErrorPage(400, request.get("errormsg",""))
else:
for (prefix, handler) in URLHandlers:
if request["raw-uri"][:len(prefix)] == prefix:
request["uri-prefix-trigger"] = prefix
request["uri-suffix"] = request["raw-uri"][len(prefix):]
return handler(request)
return ErrorPages.websiteErrorPage(404, "No resource handlers could be found for the requested URL.")
return createRequestHandler
class HelloHandler(Axon.Component.component):
def __init__(self, request):
super(HelloHandler, self).__init__()
self.request = request
def main(self):
resource = {
"type" : "text/html",
"statuscode" : "200",
"length": len("<html><body><h1>Hello World</h1><P> Game Over!! </body></html>"),
}
self.send(resource, "outbox"); yield 1
page = {
"data" : "<html><body><h1>Hello World</h1><P> Game Over!! </body></html>"
}
self.send(page, "outbox"); yield 1
self.send(Axon.Ipc.producerFinished(self), "signal")
yield 1
class Cat(Axon.Component.component):
def __init__(self, *args):
super(Cat, self).__init__()
self.args = args
def main(self):
self.send(self.args, "outbox")
self.send(Axon.Ipc.producerFinished(self), "signal")
yield 1
class ExampleWrapper(Axon.Component.component):
def main(self):
# Tell the browser the type of data we're sending!
resource = {
"type" : "text/html",
"statuscode" : "200",
}
self.send(resource, "outbox"); yield 1
# Send the header
header = {
"data" : "<html><body>"
}
self.send(header, "outbox"); yield 1
# Wait for it....
while not self.dataReady("inbox"):
self.pause()
yield 1
# Send the data we recieve as the page body
while self.dataReady("inbox"):
pageData = {
"data" : str(self.recv("inbox"))
}
self.send(pageData, "outbox"); yield 1
# send a footer
footer = {
"data" : "</body></html>"
}
self.send(footer, "outbox"); yield 1
# and shutdown nicely
self.send(Axon.Ipc.producerFinished(self), "signal")
yield 1
def EchoHandler(request):
return Pipeline ( Cat(request), ExampleWrapper() )
def servePage(request):
return Minimal(request=request,
homedirectory=homedirectory,
indexfilename=indexfilename)
def HTTPProtocol():
return HTTPServer(requestHandlers([
["/echo", EchoHandler ],
["/hello", HelloHandler ],
["/", servePage ],
]))
SimpleServer(protocol=HTTPProtocol,
port=8082,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) ).run() | PypiClean |
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/gis/db/backends/oracle/operations.py | import re
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.oracle.operations import DatabaseOperations
DEFAULT_TOLERANCE = "0.05"
class SDOOperator(SpatialOperator):
sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'"
class SDODWithin(SpatialOperator):
sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'"
class SDODisjoint(SpatialOperator):
sql_template = (
"SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'"
% DEFAULT_TOLERANCE
)
class SDORelate(SpatialOperator):
sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'"
def check_relate_argument(self, arg):
masks = (
"TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|"
"CONTAINS|COVERS|ANYINTERACT|ON"
)
mask_regex = re.compile(r"^(%s)(\+(%s))*$" % (masks, masks), re.I)
if not isinstance(arg, str) or not mask_regex.match(arg):
raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg)
def as_sql(self, connection, lookup, template_params, sql_params):
template_params["mask"] = sql_params[-1]
return super().as_sql(connection, lookup, template_params, sql_params[:-1])
class OracleOperations(BaseSpatialOperations, DatabaseOperations):
name = "oracle"
oracle = True
disallowed_aggregates = (models.Collect, models.Extent3D, models.MakeLine)
Adapter = OracleSpatialAdapter
extent = "SDO_AGGR_MBR"
unionagg = "SDO_AGGR_UNION"
from_text = "SDO_GEOMETRY"
function_names = {
"Area": "SDO_GEOM.SDO_AREA",
"AsGeoJSON": "SDO_UTIL.TO_GEOJSON",
"AsWKB": "SDO_UTIL.TO_WKBGEOMETRY",
"AsWKT": "SDO_UTIL.TO_WKTGEOMETRY",
"BoundingCircle": "SDO_GEOM.SDO_MBC",
"Centroid": "SDO_GEOM.SDO_CENTROID",
"Difference": "SDO_GEOM.SDO_DIFFERENCE",
"Distance": "SDO_GEOM.SDO_DISTANCE",
"Envelope": "SDO_GEOM_MBR",
"FromWKB": "SDO_UTIL.FROM_WKBGEOMETRY",
"FromWKT": "SDO_UTIL.FROM_WKTGEOMETRY",
"Intersection": "SDO_GEOM.SDO_INTERSECTION",
"IsValid": "SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT",
"Length": "SDO_GEOM.SDO_LENGTH",
"NumGeometries": "SDO_UTIL.GETNUMELEM",
"NumPoints": "SDO_UTIL.GETNUMVERTICES",
"Perimeter": "SDO_GEOM.SDO_LENGTH",
"PointOnSurface": "SDO_GEOM.SDO_POINTONSURFACE",
"Reverse": "SDO_UTIL.REVERSE_LINESTRING",
"SymDifference": "SDO_GEOM.SDO_XOR",
"Transform": "SDO_CS.TRANSFORM",
"Union": "SDO_GEOM.SDO_UNION",
}
# We want to get SDO Geometries as WKT because it is much easier to
# instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings.
# However, this adversely affects performance (i.e., Java is called
# to convert to WKT on every query). If someone wishes to write a
# SDO_GEOMETRY(...) parser in Python, let me know =)
select = "SDO_UTIL.TO_WKBGEOMETRY(%s)"
gis_operators = {
"contains": SDOOperator(func="SDO_CONTAINS"),
"coveredby": SDOOperator(func="SDO_COVEREDBY"),
"covers": SDOOperator(func="SDO_COVERS"),
"disjoint": SDODisjoint(),
"intersects": SDOOperator(
func="SDO_OVERLAPBDYINTERSECT"
), # TODO: Is this really the same as ST_Intersects()?
"equals": SDOOperator(func="SDO_EQUAL"),
"exact": SDOOperator(func="SDO_EQUAL"),
"overlaps": SDOOperator(func="SDO_OVERLAPS"),
"same_as": SDOOperator(func="SDO_EQUAL"),
# Oracle uses a different syntax, e.g., 'mask=inside+touch'
"relate": SDORelate(),
"touches": SDOOperator(func="SDO_TOUCH"),
"within": SDOOperator(func="SDO_INSIDE"),
"dwithin": SDODWithin(),
}
unsupported_functions = {
"AsKML",
"AsSVG",
"Azimuth",
"ForcePolygonCW",
"GeoHash",
"GeometryDistance",
"IsEmpty",
"LineLocatePoint",
"MakeValid",
"MemSize",
"Scale",
"SnapToGrid",
"Translate",
}
def geo_quote_name(self, name):
return super().geo_quote_name(name).upper()
def convert_extent(self, clob):
if clob:
# Generally, Oracle returns a polygon for the extent -- however,
# it can return a single point if there's only one Point in the
# table.
ext_geom = GEOSGeometry(memoryview(clob.read()))
gtype = str(ext_geom.geom_type)
if gtype == "Polygon":
# Construct the 4-tuple from the coordinates in the polygon.
shell = ext_geom.shell
ll, ur = shell[0][:2], shell[2][:2]
elif gtype == "Point":
ll = ext_geom.coords[:2]
ur = ll
else:
raise Exception(
"Unexpected geometry type returned for extent: %s" % gtype
)
xmin, ymin = ll
xmax, ymax = ur
return (xmin, ymin, xmax, ymax)
else:
return None
def geo_db_type(self, f):
"""
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return "MDSYS.SDO_GEOMETRY"
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
# dwithin lookups on Oracle require a special string parameter
# that starts with "distance=".
if lookup_type == "dwithin":
dist_param = "distance=%s" % dist_param
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
if value is None:
return "NULL"
return super().get_geom_placeholder(f, value, compiler)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL name.
"""
agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns
return OracleGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys
return OracleSpatialRefSys
def modify_insert_params(self, placeholder, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
"""
if placeholder == "NULL":
return []
return super().modify_insert_params(placeholder, params)
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class)
if srid:
geom.srid = srid
return geom
return converter
def get_area_att_for_field(self, field):
return "sq_m" | PypiClean |
/Djax-0.8.6.tar.gz/Djax-0.8.6/README.rst | Djax
====
**Django / ACE Integration**
Djax integrates the Django web framework with `Axilent
ACE <http://www.axilent.com/products/ace/>`__. ACE is a sophisticated
content targeting system that can be used for product recommendations,
related content, personalization and contextual advertising.
Djax links Django models with ACE content types, enabling the use of ACE
as a CMS for a Django website. It also provides easy integration with
ACE's targeting Content Channels, and provides integration with ACE's
user profiling system.
Installation
~~~~~~~~~~~~
To install Djax with Pip:
::
pip install Djax
Then, add ``djax`` to your ``INSTALLED_APPS``.
Finally, you will need to ``syncdb`` to generate Djax's tables.
Integrating ACE Published Content With Django
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to use content that is authored our sourced in ACE in a Django
website, integrate the desired Django model with Djax using the
ACEContent mixin.
::
# your-app/models.py
from django.db import models
from djax.content import ACEContent
class Article(models.Model,ACEContent):
title = models.CharField(max_length=100)
body = models.TextField()
class ACE:
content_type = 'Article'
field_map = {
'title':'title',
'body':'body',
}
Several important things are happening here:
1. In addition to inheriting from ``models.Model`` like an ordinary
Django model, the Article class also inherits from ``ACEContent``.
This will allow Djax to identify it as a local type of content that
should be bound to an ACE Content Type.
2. In the ``ACE`` inner class, the ``content_type`` attribute identifies
the ACE Content Type with which this model should be associated.
3. In the ``ACE inner class`` the ``field_map`` dictionary defines the
mappings between the ACE Content Type fields (the keys in the
dictionary) and the local model's fields (the values in the
dictionary).
When Djax syncs with ACE, it will create or update this model with the
mapped content from ACE.
Managing Foreign Key Relations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ACE is not a relational database, and accordingly does not hold content
to the same level of integral rigor as an RDBMS. However, it does
provide some means to directly link one content item to another using a
field type called a **Content Link**.
Djax provides a way to convert an ACE Content Link into a Django foreign
key relationship. Let's say you have a local model that has an Author
model and an Article model. The Article model has a foriegn key field
that points to the Author model. In ACE, the Article Content Type would
have a Content Link field that would be used to point at an author.
The integration can be implemented without any special work using Djax:
::
class Author(models.Model,ACEContent):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
class ACE:
content_type = 'Author'
field_map = {
'first_name':'first_name',
'last_name':'last_name',
}
class Article(models.Model,ACEContent):
author = models.ForeignKey(model=Author,related_name='articles')
title = models.CharField(max_length=100)
body = models.TextField()
class ACE:
content_type = 'Article'
field_map = {
'author':'author',
'title':'title',
'body':'body',
}
During a sync, incoming Content Link data from ACE will be enough to
alert Djax to look for a local model-to-ACE Content Type mapping, and
create a foreign key association in the local models.
Because the local model Article does not allow Article objects to exist
in the database without an associated Author, it is important to ensure
that the Author object is sync'd to the local database first. In a bulk
sync this will be taken care of automatically, but when syncing once
content item at a time, an error will occur if the Article object is
sync'd before the associated Author object.
Nullable Foreign Key Relations
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
What if a foreign key relationship is nullable? In the example given
above, what if not all Articles have Authors? It's not a problem in ACE,
just leave the appropriate Content Link field empty. But an additional
step is required with Djax integration:
::
class Article(models.Model,ACEContent):
author = models.ForeignKey(model=Author,null=True,related_name='articles')
title = models.CharField(max_length=100)
body = models.TextField()
class ACE:
content_type = 'Article'
field_map = {
'author':NullableForeignKeyConverter('author'),
'title':'title',
'body':'body',
}
There are two changes in the Article model. First the author field has
been marked ``null=True`` to indicate to Django that the Article model
may not have an Author.
Secondly, the simple string ('author') indicating that the author field
in the incoming content from ACE should be mapped to the local author
field has been replaced by a ``NullableForeignKeyConverter`` object.
This is an indication to Djax that it should apply a special process to
the incoming data: either find a local model that corresponds to the
supplied Content Link data, or leave the field null.
Managing Many-to-Many Relations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ACE can also handle many-to-many relations using the Content Link List
field type. Let's say we have a local model that defines a many-to-many
relation between Publication and Author objects. In ACE, the Author
object would have a publication field that was a Content Link List that
would be used to associate it with Publications.
To implement the integration in Djax we would do this:
::
class Publication(models.Model,ACEContent):
name = models.CharField(max_length=100)
class ACE:
content_type = 'Publication'
field_map = {
'name':'name',
}
class Author(models.Model,ACEContent):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
publications = models.ManyToManyField(Publication,related_name='authors')
class ACE:
content_type = 'Author'
field_map = {
'first_name':'first_name',
'last_name':'last_name',
'publications':M2MFieldConverter('publications'),
}
In the Author model's ``ACE`` inner class, we have specified the
``M2MFieldConverter`` for the publications field. This lets Djax know to
convert incoming Content Link List data into a local many-to-many
relation.
Implementing Your Own Field Converters
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The default behavior of a field map is to simply take the value from the
incoming ACE content and assign that value to the recipient local model.
This behavior can be overridden with the use of a *FieldConverter*.
A FieldConverter is an object that is placed as a value to the
corresponding ACE content field key, within the field map. The
FieldConverter is just an object (it does not require any particular
parent class). Djax will look for two specific methods on the field
converter object: ``to_local_model`` and ``to_ace``, and the name of the
local model field, defined as ``field``.
Simple Example:
::
class AuthorFieldConverter(object):
"""Field converter changes string to related author (for article) and vice versa."""
field = 'author'
def to_local_model(self,ace_content,ace_field_value):
"""String to related model."""
return Author.objects.get(name=ace_field_value)
def to_ace(self,local_model):
"""Related model to string."""
return local_model.author.name
In this case the field converter looks up a related model by name and
returns the related model as the value to assign to the local model.
A field converter may be marked as **deferred**, in which case Djax will
ensure that the local model is created *before* the conversion method is
called, and will pass the local model into the conversion method as an
argument.
With deferred converters, the return value for the ``to_local_model``
method is ignored. It is up to the method to associate the value to the
local model.
Parent / Child Deferred Example:
::
class MusicLabelCatalogConverter(object):
"""Converts the bands signed to the parent label."""
field = 'bands'
deferred = True
def to_local_model(self,ace_content,ace_field_value,local_model):
"""Gets or creates associated local band objects. Ace provides a list of band names."""
for band_name in ace_field_value:
Band.objects.get_or_create(label=local_model,name=band_name)
# clean up unassociated bands
[band.delete() for band in local_model.bands.exclude(name__in=ace_field_value)]
def to_ace(self,local_model):
"""Returns a list of band names for ace."""
return [band.name for band in local_model.bands.all()]
ACEContent Methods
~~~~~~~~~~~~~~~~~~
A Django model that also inherits from ACEContent will have several
additional methods that allow it to be programmatically managed from a
Django app, if desired.
ACEContent.get\_axilent\_content\_key
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Returns the local model's ACE content key. If the content does not exist
within the ACE account, it will return None. The content key is a GUID
rendered in hex format.
ACEContent.get\_axilent\_content\_type
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Returns the name of the ACE Content Type for the model.
ACEContent.sync\_with\_axilent
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Forces the local model to update from content from ACE. If there is no
corresponding content item in the ACE account, this method will do
nothing.
ACEContent.to\_content\_dict
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Returns content values as a dictionary according to the ``field_map``.
ACEContent.push\_to\_library
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Pushes the local values of the content into the associated ACE library.
This method returns a 2-tuple of booleans, indicating 1. if the library
was updated and 2. if a new content item was created in the library.
ACEContent.push\_to\_graphstack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Puhes the local values of the content directly into the associated
GraphStack. A GraphStack in ACE is a logical container for deployed or
published content.
ACEContent.archive
^^^^^^^^^^^^^^^^^^
Removes the content from any GraphStack where it has been deployed.
ACEContent.live\_delete
^^^^^^^^^^^^^^^^^^^^^^^
Removes the associated ACE content item from the active GraphStack where
it is deployed.
ACEContent.tag
^^^^^^^^^^^^^^
Tags the content item within the associated ACE library.
ACEContent.detag
^^^^^^^^^^^^^^^^
De-tags the content item within the associated ACE library.
ACEContent.live\_tag
^^^^^^^^^^^^^^^^^^^^
Tags the content item where it has been deployed in the associated
GraphStack.
ACEContent.live\_detag
^^^^^^^^^^^^^^^^^^^^^^
De-tags the content item where it has been deployed in the associated
GraphStack.
ACEContent.reindex\_search
^^^^^^^^^^^^^^^^^^^^^^^^^^
Forces search re-indexing of the deployed associated content.
ACEContent.trigger\_affinity
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Sends an affinity trigger for this content to ACE.
ACEContent.trigger\_ban
^^^^^^^^^^^^^^^^^^^^^^^
Sends a ban trigger for this content to ACE.
Setting Up Djax and ACE to Handle User-Generated Content
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A common scenario is User Generated Content (UGC), in which user's of
the website create content, in the form of Django models, which then
needs to be pushed back into the ACE library for administrative review.
Djax and ACE now support this round-trip model for content.
In the ACE project, first create a new **Content Source** for the
Content Type that you want to round-trip. Content Sources are found in
the settings panel, for each content type under the Content Types
section of the ACE project.
The new Content Source should be of the type **Djax User Generated
Content**. When creating the Content Source, you will need to set the
refresh interval, the URL pointing to the Djax install, and an auth
token.
In your code, you set up your model as ``ACEContent`` as usual, defining
the ACE content type and the field map in the ``ACE`` subclass.
Everytime the Content Source passes the refresh interval, it will query
your Djax install. At this point the Djax install will push the content
into the ACE library, either creating new content items or updating
existing ones.
| PypiClean |
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/postgres/apps.py | from django.apps import AppConfig
from django.core.signals import setting_changed
from django.db import connections
from django.db.backends.postgresql.psycopg_any import RANGE_TYPES
from django.db.backends.signals import connection_created
from django.db.migrations.writer import MigrationWriter
from django.db.models import CharField, OrderBy, TextField
from django.db.models.functions import Collate
from django.db.models.indexes import IndexExpression
from django.utils.translation import gettext_lazy as _
from .indexes import OpClass
from .lookups import (
SearchLookup,
TrigramSimilar,
TrigramStrictWordSimilar,
TrigramWordSimilar,
Unaccent,
)
from .serializers import RangeSerializer
from .signals import register_type_handlers
def uninstall_if_needed(setting, value, enter, **kwargs):
"""
Undo the effects of PostgresConfig.ready() when django.contrib.postgres
is "uninstalled" by override_settings().
"""
if (
not enter
and setting == "INSTALLED_APPS"
and "django.contrib.postgres" not in set(value)
):
connection_created.disconnect(register_type_handlers)
CharField._unregister_lookup(Unaccent)
TextField._unregister_lookup(Unaccent)
CharField._unregister_lookup(SearchLookup)
TextField._unregister_lookup(SearchLookup)
CharField._unregister_lookup(TrigramSimilar)
TextField._unregister_lookup(TrigramSimilar)
CharField._unregister_lookup(TrigramWordSimilar)
TextField._unregister_lookup(TrigramWordSimilar)
CharField._unregister_lookup(TrigramStrictWordSimilar)
TextField._unregister_lookup(TrigramStrictWordSimilar)
# Disconnect this receiver until the next time this app is installed
# and ready() connects it again to prevent unnecessary processing on
# each setting change.
setting_changed.disconnect(uninstall_if_needed)
MigrationWriter.unregister_serializer(RANGE_TYPES)
class PostgresConfig(AppConfig):
name = "django.contrib.postgres"
verbose_name = _("PostgreSQL extensions")
def ready(self):
setting_changed.connect(uninstall_if_needed)
# Connections may already exist before we are called.
for conn in connections.all(initialized_only=True):
if conn.vendor == "postgresql":
conn.introspection.data_types_reverse.update(
{
3904: "django.contrib.postgres.fields.IntegerRangeField",
3906: "django.contrib.postgres.fields.DecimalRangeField",
3910: "django.contrib.postgres.fields.DateTimeRangeField",
3912: "django.contrib.postgres.fields.DateRangeField",
3926: "django.contrib.postgres.fields.BigIntegerRangeField",
}
)
if conn.connection is not None:
register_type_handlers(conn)
connection_created.connect(register_type_handlers)
CharField.register_lookup(Unaccent)
TextField.register_lookup(Unaccent)
CharField.register_lookup(SearchLookup)
TextField.register_lookup(SearchLookup)
CharField.register_lookup(TrigramSimilar)
TextField.register_lookup(TrigramSimilar)
CharField.register_lookup(TrigramWordSimilar)
TextField.register_lookup(TrigramWordSimilar)
CharField.register_lookup(TrigramStrictWordSimilar)
TextField.register_lookup(TrigramStrictWordSimilar)
MigrationWriter.register_serializer(RANGE_TYPES, RangeSerializer)
IndexExpression.register_wrappers(OrderBy, OpClass, Collate) | PypiClean |
/observations-0.1.4.tar.gz/observations-0.1.4/observations/r/snow_deaths.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def snow_deaths(path):
"""John Snow's Map and Data on the 1854 London Cholera Outbreak
The `Snow` data consists of the relevant 1854 London streets, the
location of 578 deaths from cholera, and the position of 13 water pumps
(wells) that can be used to re-create John Snow's map showing deaths
from cholera in the area surrounding Broad Street, London in the 1854
outbreak. Another data frame provides boundaries of a tesselation of the
map into Thiessen (Voronoi) regions which include all cholera deaths
nearer to a given pump than to any other.
The apocryphal story of the significance of Snow's map is that, by
closing the Broad Street pump (by removing its handle), Dr. Snow stopped
the epidemic, and demonstrated that cholera is a water borne disease.
The method of contagion of cholera was not previously understood. Snow's
map is the most famous and classical example in the field of medical
cartography, even if it didn't happen exactly this way. (the apocryphal
part is that the epidemic ended when the pump handle was removed.) At
any rate, the map, together with various statistical annotations, is
compelling because it points to the Broad Street pump as the source of
the outbreak.
`Snow.deaths`: A data frame with 578 observations on the following 3
variables, giving the address of a person who died from cholera. When
many points are associated with a single street address, they are
"stacked" in a line away from the street so that they are more easily
visualized. This is how they are displayed on John Snow's original map.
The dates of the deaths are not individually recorded in this data set.
`case`
Sequential case number, in some arbitrary, randomized order
`x`
x coordinate
`y`
y coordinate
`Snow.pumps`: A data frame with 13 observations on the following 4
variables, giving the locations of water pumps within the boundaries of
the map.
`pump`
pump number
`label`
pump label: `Briddle St` `Broad St` ... `Warwick`
`x`
x coordinate
`y`
y coordinate
`Snow.streets`: A data frame with 1241 observations on the following 4
variables, giving coordinates used to draw the 528 street segment lines
within the boundaries of the map. The map is created by drawing lines
connecting the `n` points in each street segment.
`street`
street segment number: `1:528`
`n`
number of points in this street line segment
`x`
x coordinate
`y`
y coordinate
`Snow.polygons`: A list of 13 data frames, giving the vertices of
Thiessen (Voronoi) polygons containing each pump. Their boundaries
define the area that is closest to each pump relative to all other
pumps. They are mathematically defined by the perpendicular bisectors of
the lines between all pumps. Each data frame contains:
`x`
x coordinate
`y`
y coordinate
`Snow.deaths2`: An alternative version of `Snow.deaths` correcting
some possible duplicate and missing cases, as described in
`vignette("Snow_deaths-duplicates")`.
`Snow.dates`: A data frame of 44 observations and 3 variables from
Table 1 of Snow (1855), giving the number of fatal attacks and number of
deaths by date from Aug. 19 – Sept. 30, 1854. There are a total of 616
deaths represented in both columns `attacks` and `deaths`; of these,
the date of the attack is unknown for 45 cases.
Tobler, W. (1994). Snow's Cholera Map,
`http://www.ncgia.ucsb.edu/pubs/snow/snow.html`; data files were
obtained from `http://ncgia.ucsb.edu/Publications/Software/cholera/`,
but these sites seem to be down.
The data in these files were first digitized in 1992 by Rusty Dodson of
the NCGIA, Santa Barbara, from the map included in the book by John
Snow: "Snow on Cholera...", London, Oxford University Press, 1936.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `snow_deaths.csv`.
Returns:
Tuple of np.ndarray `x_train` with 578 rows and 3 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'snow_deaths.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/HistData/Snow.deaths.csv'
maybe_download_and_extract(path, url,
save_file_name='snow_deaths.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata | PypiClean |
/Kahi_openalex_subjects-0.0.2a0-py3-none-any.whl/kahi_openalex_subjects/Kahi_openalex_subjects.py | from kahi.KahiBase import KahiBase
from pymongo import MongoClient, TEXT
from time import time
from joblib import Parallel, delayed
def process_relation(sub, url, db_name):
client = MongoClient(url)
db = client[db_name]
collection = db["subjects"]
relations = []
for rel in sub["related_concepts"]:
sub_db = collection.find_one(
{"external_ids.id": rel["id"]})
if sub_db:
name = sub_db["names"][0]["name"]
for n in sub_db["names"]:
if n["lang"] == "en":
name = n["name"]
break
rel_entry = {
"id": sub_db["_id"],
"name": name,
"level": sub_db["level"]
}
relations.append(rel_entry)
else:
print("Could not find related concept in colombia db")
for rel in sub["ancestors"]:
sub_db = collection.find_one(
{"external_ids.id": rel["id"]})
if sub_db:
name = sub_db["names"][0]["name"]
for n in sub_db["names"]:
if n["lang"] == "en":
name = n["name"]
break
rel_entry = {
"id": sub_db["_id"],
"name": name,
"level": sub_db["level"]
}
relations.append(rel_entry)
else:
print("Could not find related concept in colombia db")
if len(relations) > 0:
collection.update_one({"external_ids.id": sub["id"]}, {
"$set": {"relations": relations}})
class Kahi_openalex_subjects(KahiBase):
config = {}
def __init__(self, config):
self.config = config
self.mongodb_url = config["database_url"]
self.client = MongoClient(self.mongodb_url)
self.db = self.client[config["database_name"]]
self.collection = self.db["subjects"]
self.collection.create_index("external_ids.id")
self.collection.create_index([("names.name", TEXT)])
self.openalex_client = MongoClient(
config["openalex_subjects"]["database_url"])
if config["openalex_subjects"]["database_name"] not in list(self.openalex_client.list_database_names()):
raise Exception("Database not found in mongodb client")
self.openalex_db = self.openalex_client[config["openalex_subjects"]
["database_name"]]
if config["openalex_subjects"]["collection_name"] not in list(self.openalex_db.list_collection_names()):
raise Exception("Collection not found in openalex database")
self.openalex_collection = self.openalex_db[config["openalex_subjects"]
["collection_name"]]
self.n_jobs = config["openalex_subjects"]["num_jobs"]
self.inserted_concepts = []
self.inserted_concepts_ids_tuples = []
self.relations_inserted_ids = []
for reg in self.collection["subjects"].find():
oa_id = ""
for ext in reg["external_ids"]:
if ext["sources"] == "openalex":
oa_id = ext["id"]
break
if oa_id != "":
self.inserted_concepts.append(oa_id)
self.inserted_concepts_ids_tuples.append((reg["_id"], oa_id))
if reg["relations"] != []:
self.relations_inserted_ids.append(oa_id)
def process_openalex(self):
openalex_subjects = list(self.openalex_collection.find(
{"id": {"$nin": self.inserted_concepts}}))
for sub in openalex_subjects:
if sub["id"] in self.inserted_concepts:
continue
db_reg = self.collection.find_one(
{"external_ids.id": sub["id"]})
if db_reg:
self.inserted_concepts.append(sub["id"])
self.inserted_concepts_ids_tuples.append(
(db_reg["_id"], sub["id"]))
continue
entry = self.empty_subjects()
entry["updated"] = [
{"source": "openalex", "time": int(time())}]
sources_inserted_ids = []
entry["external_ids"].append(
{"source": "openalex", "id": sub["id"]})
sources_inserted_ids.append("openalex")
for source, idx in sub["ids"].items():
if source in sources_inserted_ids:
continue
entry["external_ids"].append({"source": source, "id": idx})
sources_inserted_ids.append(source)
entry["level"] = sub["level"]
entry["names"].append(
{"name": sub["display_name"], "lang": "en"})
inserted_lang_names = ["en"]
if sub["international"]:
if sub["international"]["display_name"]:
for lang, name in sub["international"]["display_name"].items():
if lang in inserted_lang_names:
continue
entry["names"].append({"name": name, "lang": lang})
inserted_lang_names.append(lang)
if sub["description"]:
entry["descriptions"].append(
{"description": sub["description"], "lang": "en"})
if sub["wikidata"]:
entry["external_urls"].append(
{"source": "wikidata", "url": sub["wikidata"]})
if sub["image_url"]:
entry["external_urls"].append(
{"source": "image", "url": sub["image_url"]})
response = self.collection.insert_one(entry)
self.inserted_concepts.append(sub["id"])
self.inserted_concepts_ids_tuples.append(
(response.inserted_id, sub["id"]))
def process_relations(self):
openalex_data = list(self.openalex_collection.find(
{"id": {"$nin": self.relations_inserted_ids}}, {"id": 1, "ancestors": 1, "related_concepts": 1}))
Parallel(
n_jobs=self.n_jobs,
backend="multiprocessing",
verbose=10
)(delayed(process_relation)(sub, self.config["database_url"], self.config["database_name"]) for sub in openalex_data)
def run(self):
print("Inserting the subjects")
self.process_openalex()
print("Creating relations")
self.process_relations()
return 0 | PypiClean |
/NeuroTS-3.4.0-py3-none-any.whl/neurots/morphmath/rotation.py |
# Copyright (C) 2021 Blue Brain Project, EPFL
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import math
import numpy as np
from neurots.morphmath.utils import norm
def spherical_from_vector(vect):
"""Return the spherical coordinates of a vector: phi, theta."""
x, y, z = vect
phi = np.arctan2(y, x)
theta = np.arccos(z / norm(vect))
return phi, theta
def vector_from_spherical(phi, theta):
"""Return a normalized vector from the spherical angles: phi, theta."""
x = np.sin(theta) * np.cos(phi)
y = np.sin(theta) * np.sin(phi)
z = np.cos(theta)
return x, y, z
def rotation_around_axis(axis, angle):
"""Return a normalized vector rotated around the selected axis by an angle."""
d = np.array(axis, dtype=float) / np.linalg.norm(axis)
sn = np.sin(angle)
cs = np.cos(angle)
eye = np.eye(3, dtype=float)
# ddt = np.outer(d, d)
skew = np.array([[0, -d[2], d[1]], [d[2], 0, -d[0]], [-d[1], d[0], 0]], dtype=float)
# mtx = ddt + cs * (eye - ddt) + sn * skew
# mtx = cs * eye + sn * skew + (1. - cs) * ddt
mtx = eye + sn * skew + (1.0 - cs) * np.linalg.matrix_power(skew, 2)
return mtx
def angle3D(v1, v2):
"""Return the angle between v1, v2."""
v1 = np.array(v1)
v2 = np.array(v2)
return math.acos(v1.dot(v2) / (np.linalg.norm(v1) * np.linalg.norm(v2)))
def rotate_vector(vec, axis, angle):
"""Rotate the input vector vec by a selected angle around a specific axis."""
return np.dot(rotation_around_axis(axis, angle), vec)
def rotation_matrix_from_vectors(vec1, vec2):
"""Find the rotation matrix that aligns vec1 to vec2.
Picked from morph_tool.transform
Picked from: https://stackoverflow.com/a/59204638/3868743
Args:
vec1: A 3d "source" vector
vec2: A 3d "destination" vector
Returns:
A transform matrix (3x3) which when applied to vec1, aligns it with vec2.
"""
vec1, vec2 = vec1 / np.linalg.norm(vec1), vec2 / np.linalg.norm(vec2)
v_cross = np.cross(vec1, vec2)
v_cross_norm = np.linalg.norm(v_cross)
if v_cross_norm == 0:
return np.eye(3)
kmat = np.array(
[
[0.0, -v_cross[2], v_cross[1]],
[v_cross[2], 0.0, -v_cross[0]],
[-v_cross[1], v_cross[0], 0.0],
]
)
return np.eye(3) + kmat + kmat.dot(kmat) * ((1 - np.dot(vec1, vec2)) / (v_cross_norm**2)) | PypiClean |
/Cantiz-PyChromecast-3.2.2.tar.gz/Cantiz-PyChromecast-3.2.2/pychromecast/cast_channel_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='cast_channel.proto',
package='extensions.api.cast_channel',
syntax='proto2',
serialized_pb=_b('\n\x12\x63\x61st_channel.proto\x12\x1b\x65xtensions.api.cast_channel\"\xe3\x02\n\x0b\x43\x61stMessage\x12R\n\x10protocol_version\x18\x01 \x02(\x0e\x32\x38.extensions.api.cast_channel.CastMessage.ProtocolVersion\x12\x11\n\tsource_id\x18\x02 \x02(\t\x12\x16\n\x0e\x64\x65stination_id\x18\x03 \x02(\t\x12\x11\n\tnamespace\x18\x04 \x02(\t\x12J\n\x0cpayload_type\x18\x05 \x02(\x0e\x32\x34.extensions.api.cast_channel.CastMessage.PayloadType\x12\x14\n\x0cpayload_utf8\x18\x06 \x01(\t\x12\x16\n\x0epayload_binary\x18\x07 \x01(\x0c\"!\n\x0fProtocolVersion\x12\x0e\n\nCASTV2_1_0\x10\x00\"%\n\x0bPayloadType\x12\n\n\x06STRING\x10\x00\x12\n\n\x06\x42INARY\x10\x01\"\xce\x01\n\rAuthChallenge\x12]\n\x13signature_algorithm\x18\x01 \x01(\x0e\x32/.extensions.api.cast_channel.SignatureAlgorithm:\x0fRSASSA_PKCS1v15\x12\x14\n\x0csender_nonce\x18\x02 \x01(\x0c\x12H\n\x0ehash_algorithm\x18\x03 \x01(\x0e\x32*.extensions.api.cast_channel.HashAlgorithm:\x04SHA1\"\xb0\x02\n\x0c\x41uthResponse\x12\x11\n\tsignature\x18\x01 \x02(\x0c\x12\x1f\n\x17\x63lient_auth_certificate\x18\x02 \x02(\x0c\x12 \n\x18intermediate_certificate\x18\x03 \x03(\x0c\x12]\n\x13signature_algorithm\x18\x04 \x01(\x0e\x32/.extensions.api.cast_channel.SignatureAlgorithm:\x0fRSASSA_PKCS1v15\x12\x14\n\x0csender_nonce\x18\x05 \x01(\x0c\x12H\n\x0ehash_algorithm\x18\x06 \x01(\x0e\x32*.extensions.api.cast_channel.HashAlgorithm:\x04SHA1\x12\x0b\n\x03\x63rl\x18\x07 \x01(\x0c\"\xa3\x01\n\tAuthError\x12\x44\n\nerror_type\x18\x01 \x02(\x0e\x32\x30.extensions.api.cast_channel.AuthError.ErrorType\"P\n\tErrorType\x12\x12\n\x0eINTERNAL_ERROR\x10\x00\x12\n\n\x06NO_TLS\x10\x01\x12#\n\x1fSIGNATURE_ALGORITHM_UNAVAILABLE\x10\x02\"\xc6\x01\n\x11\x44\x65viceAuthMessage\x12=\n\tchallenge\x18\x01 \x01(\x0b\x32*.extensions.api.cast_channel.AuthChallenge\x12;\n\x08response\x18\x02 \x01(\x0b\x32).extensions.api.cast_channel.AuthResponse\x12\x35\n\x05\x65rror\x18\x03 \x01(\x0b\x32&.extensions.api.cast_channel.AuthError*J\n\x12SignatureAlgorithm\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x13\n\x0fRSASSA_PKCS1v15\x10\x01\x12\x0e\n\nRSASSA_PSS\x10\x02*%\n\rHashAlgorithm\x12\x08\n\x04SHA1\x10\x00\x12\n\n\x06SHA256\x10\x01\x42\x02H\x03')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SIGNATUREALGORITHM = _descriptor.EnumDescriptor(
name='SignatureAlgorithm',
full_name='extensions.api.cast_channel.SignatureAlgorithm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RSASSA_PKCS1v15', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RSASSA_PSS', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1292,
serialized_end=1366,
)
_sym_db.RegisterEnumDescriptor(_SIGNATUREALGORITHM)
SignatureAlgorithm = enum_type_wrapper.EnumTypeWrapper(_SIGNATUREALGORITHM)
_HASHALGORITHM = _descriptor.EnumDescriptor(
name='HashAlgorithm',
full_name='extensions.api.cast_channel.HashAlgorithm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SHA1', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SHA256', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1368,
serialized_end=1405,
)
_sym_db.RegisterEnumDescriptor(_HASHALGORITHM)
HashAlgorithm = enum_type_wrapper.EnumTypeWrapper(_HASHALGORITHM)
UNSPECIFIED = 0
RSASSA_PKCS1v15 = 1
RSASSA_PSS = 2
SHA1 = 0
SHA256 = 1
_CASTMESSAGE_PROTOCOLVERSION = _descriptor.EnumDescriptor(
name='ProtocolVersion',
full_name='extensions.api.cast_channel.CastMessage.ProtocolVersion',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CASTV2_1_0', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=335,
serialized_end=368,
)
_sym_db.RegisterEnumDescriptor(_CASTMESSAGE_PROTOCOLVERSION)
_CASTMESSAGE_PAYLOADTYPE = _descriptor.EnumDescriptor(
name='PayloadType',
full_name='extensions.api.cast_channel.CastMessage.PayloadType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRING', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BINARY', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=370,
serialized_end=407,
)
_sym_db.RegisterEnumDescriptor(_CASTMESSAGE_PAYLOADTYPE)
_AUTHERROR_ERRORTYPE = _descriptor.EnumDescriptor(
name='ErrorType',
full_name='extensions.api.cast_channel.AuthError.ErrorType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='INTERNAL_ERROR', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_TLS', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGNATURE_ALGORITHM_UNAVAILABLE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1009,
serialized_end=1089,
)
_sym_db.RegisterEnumDescriptor(_AUTHERROR_ERRORTYPE)
_CASTMESSAGE = _descriptor.Descriptor(
name='CastMessage',
full_name='extensions.api.cast_channel.CastMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='protocol_version', full_name='extensions.api.cast_channel.CastMessage.protocol_version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source_id', full_name='extensions.api.cast_channel.CastMessage.source_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='destination_id', full_name='extensions.api.cast_channel.CastMessage.destination_id', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='namespace', full_name='extensions.api.cast_channel.CastMessage.namespace', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='payload_type', full_name='extensions.api.cast_channel.CastMessage.payload_type', index=4,
number=5, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='payload_utf8', full_name='extensions.api.cast_channel.CastMessage.payload_utf8', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='payload_binary', full_name='extensions.api.cast_channel.CastMessage.payload_binary', index=6,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_CASTMESSAGE_PROTOCOLVERSION,
_CASTMESSAGE_PAYLOADTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=52,
serialized_end=407,
)
_AUTHCHALLENGE = _descriptor.Descriptor(
name='AuthChallenge',
full_name='extensions.api.cast_channel.AuthChallenge',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='signature_algorithm', full_name='extensions.api.cast_channel.AuthChallenge.signature_algorithm', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sender_nonce', full_name='extensions.api.cast_channel.AuthChallenge.sender_nonce', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hash_algorithm', full_name='extensions.api.cast_channel.AuthChallenge.hash_algorithm', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=410,
serialized_end=616,
)
_AUTHRESPONSE = _descriptor.Descriptor(
name='AuthResponse',
full_name='extensions.api.cast_channel.AuthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='signature', full_name='extensions.api.cast_channel.AuthResponse.signature', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='client_auth_certificate', full_name='extensions.api.cast_channel.AuthResponse.client_auth_certificate', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='intermediate_certificate', full_name='extensions.api.cast_channel.AuthResponse.intermediate_certificate', index=2,
number=3, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='signature_algorithm', full_name='extensions.api.cast_channel.AuthResponse.signature_algorithm', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sender_nonce', full_name='extensions.api.cast_channel.AuthResponse.sender_nonce', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hash_algorithm', full_name='extensions.api.cast_channel.AuthResponse.hash_algorithm', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crl', full_name='extensions.api.cast_channel.AuthResponse.crl', index=6,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=619,
serialized_end=923,
)
_AUTHERROR = _descriptor.Descriptor(
name='AuthError',
full_name='extensions.api.cast_channel.AuthError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error_type', full_name='extensions.api.cast_channel.AuthError.error_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_AUTHERROR_ERRORTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=926,
serialized_end=1089,
)
_DEVICEAUTHMESSAGE = _descriptor.Descriptor(
name='DeviceAuthMessage',
full_name='extensions.api.cast_channel.DeviceAuthMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='challenge', full_name='extensions.api.cast_channel.DeviceAuthMessage.challenge', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='response', full_name='extensions.api.cast_channel.DeviceAuthMessage.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='error', full_name='extensions.api.cast_channel.DeviceAuthMessage.error', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1092,
serialized_end=1290,
)
_CASTMESSAGE.fields_by_name['protocol_version'].enum_type = _CASTMESSAGE_PROTOCOLVERSION
_CASTMESSAGE.fields_by_name['payload_type'].enum_type = _CASTMESSAGE_PAYLOADTYPE
_CASTMESSAGE_PROTOCOLVERSION.containing_type = _CASTMESSAGE
_CASTMESSAGE_PAYLOADTYPE.containing_type = _CASTMESSAGE
_AUTHCHALLENGE.fields_by_name['signature_algorithm'].enum_type = _SIGNATUREALGORITHM
_AUTHCHALLENGE.fields_by_name['hash_algorithm'].enum_type = _HASHALGORITHM
_AUTHRESPONSE.fields_by_name['signature_algorithm'].enum_type = _SIGNATUREALGORITHM
_AUTHRESPONSE.fields_by_name['hash_algorithm'].enum_type = _HASHALGORITHM
_AUTHERROR.fields_by_name['error_type'].enum_type = _AUTHERROR_ERRORTYPE
_AUTHERROR_ERRORTYPE.containing_type = _AUTHERROR
_DEVICEAUTHMESSAGE.fields_by_name['challenge'].message_type = _AUTHCHALLENGE
_DEVICEAUTHMESSAGE.fields_by_name['response'].message_type = _AUTHRESPONSE
_DEVICEAUTHMESSAGE.fields_by_name['error'].message_type = _AUTHERROR
DESCRIPTOR.message_types_by_name['CastMessage'] = _CASTMESSAGE
DESCRIPTOR.message_types_by_name['AuthChallenge'] = _AUTHCHALLENGE
DESCRIPTOR.message_types_by_name['AuthResponse'] = _AUTHRESPONSE
DESCRIPTOR.message_types_by_name['AuthError'] = _AUTHERROR
DESCRIPTOR.message_types_by_name['DeviceAuthMessage'] = _DEVICEAUTHMESSAGE
DESCRIPTOR.enum_types_by_name['SignatureAlgorithm'] = _SIGNATUREALGORITHM
DESCRIPTOR.enum_types_by_name['HashAlgorithm'] = _HASHALGORITHM
CastMessage = _reflection.GeneratedProtocolMessageType('CastMessage', (_message.Message,), dict(
DESCRIPTOR = _CASTMESSAGE,
__module__ = 'cast_channel_pb2'
# @@protoc_insertion_point(class_scope:extensions.api.cast_channel.CastMessage)
))
_sym_db.RegisterMessage(CastMessage)
AuthChallenge = _reflection.GeneratedProtocolMessageType('AuthChallenge', (_message.Message,), dict(
DESCRIPTOR = _AUTHCHALLENGE,
__module__ = 'cast_channel_pb2'
# @@protoc_insertion_point(class_scope:extensions.api.cast_channel.AuthChallenge)
))
_sym_db.RegisterMessage(AuthChallenge)
AuthResponse = _reflection.GeneratedProtocolMessageType('AuthResponse', (_message.Message,), dict(
DESCRIPTOR = _AUTHRESPONSE,
__module__ = 'cast_channel_pb2'
# @@protoc_insertion_point(class_scope:extensions.api.cast_channel.AuthResponse)
))
_sym_db.RegisterMessage(AuthResponse)
AuthError = _reflection.GeneratedProtocolMessageType('AuthError', (_message.Message,), dict(
DESCRIPTOR = _AUTHERROR,
__module__ = 'cast_channel_pb2'
# @@protoc_insertion_point(class_scope:extensions.api.cast_channel.AuthError)
))
_sym_db.RegisterMessage(AuthError)
DeviceAuthMessage = _reflection.GeneratedProtocolMessageType('DeviceAuthMessage', (_message.Message,), dict(
DESCRIPTOR = _DEVICEAUTHMESSAGE,
__module__ = 'cast_channel_pb2'
# @@protoc_insertion_point(class_scope:extensions.api.cast_channel.DeviceAuthMessage)
))
_sym_db.RegisterMessage(DeviceAuthMessage)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\003'))
# @@protoc_insertion_point(module_scope) | PypiClean |
/APASVO-0.0.6.tar.gz/APASVO-0.0.6/README.rst | ######
APASVO
######
*A graphical tool to perform event detection/picking in seismic traces.*
**Main Features**
* Three different picking algorithms available: STA-LTA [1]_, AMPA [2]_ and Takanami's autoregressive method [3]_.
* Proper functionality from DSP tools: scrolling, zooming, panning, playbacking...
* Signal envelope, spectrogram and estimated characteristic function visualization.
* Manually editing of picked seismic events or picking new ones.
* Detect mode: Find all characteristic function's peaks which value is over a threshold value.
* Support for text/binary files containing seismic traces.
* Save picked events to CSV format, and characteristic function to text/binary file format.
* Two additional command line tools: An event picking/detection tool and a synthetic earthquake generator [4]_.
.. contents:: **Table of Contents**
:local:
:backlinks: none
============
Installation
============
-------
Windows
-------
A prebuilt version of APASVO for Windows is available, compatible with 32-bit and 64-bit machines. You can download it `here`_.
Prebuilt package contains all the required software dependencies to work. Just unzip its contents into a directory of your choice and then you can start using the application.
.. _here: https://github.com/jemromerol/apasvo/releases
-----
Linux
-----
~~~~~~~~~~~~~~~~~
Prebuilt packages
~~~~~~~~~~~~~~~~~
Prebuilt distributions are the recommended installation method because they don't require installing any extra software. Just download the appropriate package for your architecture, unzip its contents into the directory of your choice and you can start using the application.
Prebuilt packages of APASVO for Linux are available for both 32-bit and 64-bit architectures. You can download them `here`_.
.. warning::
Prebuilt packages for Linux require GLIBC version 2.13 or newer to work. You can check your GLIBC version with:
::
$ ldd --version
.. _here: https://github.com/jemromerol/apasvo/releases
~~~~~~~~~~~~~~~~~~~~~~
Installation from Pypi
~~~~~~~~~~~~~~~~~~~~~~
.. warning::
Installing from PyPI is a long and delicate process that involves installing several large libraries and their dependencies, so it is discouraged unless you are confident about installing python applications with multiple dependencies from source. In any case, PREBUILT PACKAGES ARE THE RECOMMENDED WAY OF INSTALLING APASVO.
*************
Prerequisites
*************
Make sure you have Python 2.7.x installed. Then, install the latest `pip`_ distribution.
*************************************
Installation of required dependencies
*************************************
APASVO depends on a list of Python packages, which you can check in the project's `requirements.txt`_ file. These packages are automatically installed when APASVO is installed from Python repositories by using ``pip`` or from source code via `setuptools`_.
However, some of these packages, namely Matplotlib and PySide, require installation of a number of additional dependencies. If you're on a Debian / Ubuntu system, you can install these dependencies using the command:
::
$ sudo apt-get build-dep python-pyside python-matplotlib
Or if you are in Fedora/RedHat, first install ``yum-builddep`` and then use the command:
::
$ su -c "yum-builddep python-pyside python-matplotlib"
*******
Install
*******
You can install the latest version of APASVO from Python repositories by using the command:
::
$ pip install --use-wheel apasvo
~~~~~~~~~~~~~~~~~~~~~~~~
Installation from source
~~~~~~~~~~~~~~~~~~~~~~~~
First, make sure you meet the requirements explained in `Prerequisites`_ and install the needed dependencies as explained in `Installation of required dependencies`_ section.
Then, download the latest version from `GitHub`_. If you have ``git`` installed, you can use the following command:
::
$ git clone https://github.com/jemromerol/apasvo.git
Finally, enter the newly created directory containing the source code and run:
::
$ python setup.py install
.. _pip: http://pip.readthedocs.org/en/latest/installing.html
.. _requirements.txt: https://github.com/jemromerol/apasvo/blob/master/requirements.txt
.. _setuptools: https://pythonhosted.org/an_example_pypi_project/setuptools.html#using-setup-py
.. _GitHub: https://github.com/jemromerol/apasvo
----
OS X
----
Sorry, but no precompiled version for OS X is available yet. You can try to install it from Python repositories or from source by following a similar procedure to that described for `Linux`_.
===========
Screenshots
===========
* http://jemromerol.github.io/media/apasvo-screenshot-1.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-2.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-3.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-4.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-5.jpg
* http://jemromerol.github.io/media/apasvo-screenshot-6.jpg
=======
License
=======
Licensed under the `GPLv3`_ license.
.. _GPLv3: http://www.gnu.org/licenses/gpl-3.0.html
=======
Authors
=======
José Emilio Romero López. [email protected]
==========
References
==========
.. [1] Trnkoczy, A. (2002). Understanding and parameter setting of STA/LTA trigger
algorithm. IASPEI New Manual of Seismological Observatory Practice, 2, 1-19.
.. [2] Álvarez, I., García, L., Mota, S., Cortés, G., Benítez, C., & De la Torre, A. (2013).
An Automatic P-Phase Picking Algorithm Based on Adaptive Multiband Processing.
Geoscience and Remote Sensing Letters, IEEE, Volume: 10, Issue: 6, pp. 1488 - 1492
.. [3] Takanami, T., & Kitagawa, G. (1988).
A new efficient procedure for the estimation of onset times of seismic waves.
Journal of Physics of the Earth, 36(6), 267-290.
.. [4] Peterson, Jon. "Observations and modeling of seismic background noise." (1993): 93-95.
=========
Changelog
=========
* 0.0.6 (2016-02-07)
* Add bandpass filtering options
* 0.0.5 (2015-11-30)
* Add a trace selector window to handle multitrace files. It also allows to open multiple
files and switch between them.
* Fix several bugs.
* 0.0.4 (2015-11-09)
* Refactor apasvo classes to use Obspy library. Thanks to Obspy, now the application supports multiple input
formats (wav, sac, mseed, segy, ...) besides binary & text, multiple export event formats (NonLinLoc, QuakeML...)
and (virtually) support for multitrace files.
* Redesign apasvo-detector to detect events for multitrace files in batch.
* Fix several bugs
* 0.0.3 (2014-08-16)
* Fixed several bugs.
* 0.0.2 (2014-06-02)
* Fixed several bugs.
* Improve installation files.
* 0.0.1 (2014-05-16)
| PypiClean |
/MambuPy-2.0.0b22.tar.gz/MambuPy-2.0.0b22/mambupy/rest/mamburoles.py | from ..mambugeturl import getrolesurl
from .mambustruct import MambuStruct
from .mamburestutils import MambuStructIterator
mod_urlfunc = getrolesurl
class MambuRole(MambuStruct):
"""A Role from Mambu."""
def __init__(self, urlfunc=mod_urlfunc, entid="", *args, **kwargs):
"""Tasks done here:
Just initializes the MambuStruct.
"""
MambuStruct.__init__(self, urlfunc, entid, *args, **kwargs)
def __repr__(self):
"""Instead of the default id given by the parent class, shows
the rolename
"""
return self.__class__.__name__ + " - rolename: '%s'" % self["name"]
class MambuRoles(MambuStruct):
"""A list of Roles from Mambu.
With the default urlfunc, entid argument must be empty at
instantiation time to retrieve all the roles according to any other
filter you send to the urlfunc.
itemclass argument allows you to pass some other class as the
elements for the list. Why is this useful? You may wish to override
several behaviours by creating your own MambuRole son class. Pass
that to the itemclass argument here and voila, you get a list of
YourMambuRole class using MambuRoles instead of plain old MambuRole
elements.
If you wish to specialize other Mambu objects on MambuPy you may
do that. Mind that if you desire that the iterable version of it
to have elements of your specialized class, you need to change
the logic of the constructor and the convert_dict_to_attrs method in
the iterable class to use some sort of itemclass there too.
Don't forget to submit the change on a pull request when done
;-)
"""
def __init__(
self, urlfunc=mod_urlfunc, entid="", itemclass=MambuRole, *args, **kwargs
):
"""entid argument is empty. That makes perfect
sense: you always get serveral roles from Mambu REST API
"""
self.itemclass = itemclass
MambuStruct.__init__(self, urlfunc, entid, *args, **kwargs)
def __iter__(self):
return MambuStructIterator(self.attrs)
def convert_dict_to_attrs(self, *args, **kwargs):
"""The trick for iterable Mambu Objects comes here:
You iterate over each element of the responded List from Mambu,
and create a Mambu Role (or your own itemclass) object for each
one, initializing them one at a time, and changing the attrs
attribute (which just holds a list of plain dictionaries) with a
MambuUser (or your own itemclass) just created.
"""
for n, u in enumerate(self.attrs):
try:
params = self.params
except AttributeError:
params = {}
kwargs.update(params)
try:
self.mamburoleclass
except AttributeError:
self.mamburoleclass = MambuRole
role = self.mamburoleclass(urlfunc=None, entid=None, *args, **kwargs)
role.init(u, *args, **kwargs)
role._MambuStruct__urlfunc = getrolesurl
self.attrs[n] = role | PypiClean |
/dyson-1.1.2.tar.gz/dyson-1.1.2/docs/storing_variables.md | Storing Variables
=================
Storing variables, in short, is a way to store variables returned from specific steps
in order to run validations, or perform other actions upon them.
## Examples
```yaml
---
- goto: url=https://google.com
- get_attribute:
of: name=btnK
attribute: value
store: button_value
- validate: "'{{ button_value }}' is 'Google Search"
```
```yaml
---
- goto: url=http://localhost:3000
- get_attribute:
of: css=a[href*='test/']
attribute: href
store: the_url
- goto: "url={{ the_url }}"
```
| PypiClean |
/CsuPMTD-1.0.27.tar.gz/CsuPMTD-1.0.27/PMTD/maskrcnn_benchmark/layers/sigmoid_focal_loss.py | import torch
from torch import nn
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from PMTD.maskrcnn_benchmark import _C
# TODO: Use JIT to replace CUDA implementation in the future.
class _SigmoidFocalLoss(Function):
@staticmethod
def forward(ctx, logits, targets, gamma, alpha):
ctx.save_for_backward(logits, targets)
num_classes = logits.shape[1]
ctx.num_classes = num_classes
ctx.gamma = gamma
ctx.alpha = alpha
losses = _C.sigmoid_focalloss_forward(
logits, targets, num_classes, gamma, alpha
)
return losses
@staticmethod
@once_differentiable
def backward(ctx, d_loss):
logits, targets = ctx.saved_tensors
num_classes = ctx.num_classes
gamma = ctx.gamma
alpha = ctx.alpha
d_loss = d_loss.contiguous()
d_logits = _C.sigmoid_focalloss_backward(
logits, targets, d_loss, num_classes, gamma, alpha
)
return d_logits, None, None, None, None
sigmoid_focal_loss_cuda = _SigmoidFocalLoss.apply
def sigmoid_focal_loss_cpu(logits, targets, gamma, alpha):
num_classes = logits.shape[1]
gamma = gamma[0]
alpha = alpha[0]
dtype = targets.dtype
device = targets.device
class_range = torch.arange(1, num_classes+1, dtype=dtype, device=device).unsqueeze(0)
t = targets.unsqueeze(1)
p = torch.sigmoid(logits)
term1 = (1 - p) ** gamma * torch.log(p)
term2 = p ** gamma * torch.log(1 - p)
return -(t == class_range).float() * term1 * alpha - ((t != class_range) * (t >= 0)).float() * term2 * (1 - alpha)
class SigmoidFocalLoss(nn.Module):
def __init__(self, gamma, alpha):
super(SigmoidFocalLoss, self).__init__()
self.gamma = gamma
self.alpha = alpha
def forward(self, logits, targets):
device = logits.device
if logits.is_cuda:
loss_func = sigmoid_focal_loss_cuda
else:
loss_func = sigmoid_focal_loss_cpu
loss = loss_func(logits, targets, self.gamma, self.alpha)
return loss.sum()
def __repr__(self):
tmpstr = self.__class__.__name__ + "("
tmpstr += "gamma=" + str(self.gamma)
tmpstr += ", alpha=" + str(self.alpha)
tmpstr += ")"
return tmpstr | PypiClean |
/netket-3.9.2.tar.gz/netket-3.9.2/netket/utils/model_frameworks/haiku.py |
import sys
from flax import serialization
from flax.core import freeze
from .base import ModuleFramework, framework
# expose jax-stax as a flax module
class HaikuWrapper:
def __init__(self, transformed):
self.transformed = transformed
def init(self, rng, *args, **kwargs):
variables = self.transformed.init(rng["params"], *args, **kwargs)
return freeze({"params": variables})
def apply(
self,
variables,
*args,
rngs=None,
method=None, # noqa: W0613
mutable=False,
**kwargs,
):
if mutable is not False:
raise ValueError("Not implemented")
return self.transformed.apply(variables["params"], rngs, *args, **kwargs)
def unwrap_params(self, variables):
return variables["params"]
def __repr__(self):
return f"HaikuWrapper({self.transformed})"
@framework
class HaikuFramework(ModuleFramework):
name: str = "Haiku"
@staticmethod
def is_loaded() -> bool:
# this should be not necessary, as netket requires and loads
# Flax, but let's set a good example
return "haiku" in sys.modules
@staticmethod
def is_my_module(module) -> bool:
# this will only get called if the module is loaded
import haiku # noqa: E0401
# jax modules are tuples
if isinstance(module, haiku.Transformed):
return True
return False
@staticmethod
def wrap(module):
register_serialization_functions()
return HaikuWrapper(module)
@staticmethod
def wrap_params(variables):
return freeze({"params": variables})
@staticmethod
def unwrap_params(wrapped_variables):
return wrapped_variables["params"]
already_registered = False
# Haiku uses FlatMapping objects instead of FrozenDict when freezing dicts.
# They are functionally equivalent but we must teach flax how to serialize them.
def register_serialization_functions():
global already_registered # noqa: W0603
if not already_registered:
already_registered = True
import haiku # noqa: E0401
FlatMappingType = type(haiku.data_structures.to_immutable_dict({"ciao": 1}))
def serialize_flat_mapping(flat_mapping):
return dict(flat_mapping)
def deserialize_flat_mapping(flat_mapping, _):
return haiku.data_structures.to_immutable_dict(flat_mapping)
serialization.register_serialization_state(
FlatMappingType,
serialize_flat_mapping,
deserialize_flat_mapping,
) | PypiClean |
/MorningstarAutoTestingFramework-0.1.2.zip/MorningstarAutoTestingFramework-0.1.2/Src/Database/SQLServerHelper.py | import pymssql
class SQLServerHelper:
server = ""
user = ""
password = ""
def __init__(self, server_instance, login_user, login_password):
global server
server = server_instance
global user
user = login_user
global password
password = login_password
def execute_sql_script(self, database_name, script):
"""
:param database_name: 要连接的数据库名,例如: tempdb
:param script: 要执行的SQL命令文本。例如:
IF OBJECT_ID('persons', 'U') IS NOT NULL
DROP TABLE persons
CREATE TABLE persons (
id INT NOT NULL,
name VARCHAR(100),
salesrep VARCHAR(100),
PRIMARY KEY(id)
)
:return:
"""
conn = pymssql.connect(server, user, password, database_name)
cursor = conn.cursor()
cursor.execute(script)
conn.commit()
conn.close()
def execute_sql_query(self, database_name, script, output_query_to_xml):
"""
:param database_name: 要连接的数据库名,例如: tempdb
:param script: 要执行的SQL命令文本。例如:
:param output_query_to_xml: 是否输出xml格式
:return:
"""
conn = pymssql.connect(server, user, password, database_name)
cursor = conn.cursor()
cursor.execute('SELECT * FROM persons WHERE salesrep=%s', 'John Doe')
row = cursor.fetchone()
# cursor.executemany(
# "INSERT INTO persons VALUES (%d, %s, %s)",
# [(1, 'John Smith', 'John Doe'),
# (2, 'Jane Doe', 'Joe Dog'),
# (3, 'Mike T.', 'Sarah H.')])
# # you must call commit() to persist your data if you don't set autocommit to True
# conn.commit()
#
# cursor.execute('SELECT * FROM persons WHERE salesrep=%s', 'John Doe')
# row = cursor.fetchone()
# while row:
# print("ID=%d, Name=%s" % (row[0], row[1]))
# row = cursor.fetchone()
conn.close()
def aaa():
print "aaaa" | PypiClean |
/BuildStream-2.0.1-cp39-cp39-manylinux_2_28_x86_64.whl/buildstream/_artifactelement.py |
from typing import TYPE_CHECKING, Optional, Dict
from contextlib import suppress
from . import Element
from . import _cachekey
from ._artifactproject import ArtifactProject
from ._exceptions import ArtifactElementError
from ._loader import LoadElement
from .node import Node
if TYPE_CHECKING:
from ._context import Context
from ._state import Task
# ArtifactElement()
#
# Object to be used for directly processing an artifact
#
# Args:
# context (Context): The Context object
# ref (str): The artifact ref
#
class ArtifactElement(Element):
# A hash of ArtifactElement by ref
__instantiated_artifacts: Dict[str, "ArtifactElement"] = {}
def __init__(self, context, ref):
project_name, element_name, key = verify_artifact_ref(ref)
project = ArtifactProject(project_name, context)
load_element = LoadElement(Node.from_dict({}), element_name, project.loader) # NOTE element has no .bst suffix
super().__init__(context, project, load_element, None, artifact_key=key)
########################################################
# Public API #
########################################################
# new_from_artifact_name():
#
# Recursively instantiate a new ArtifactElement instance, and its
# dependencies from an artifact name
#
# Args:
# artifact_name: The artifact name
# context: The Context object
# task: A task object to report progress to
#
# Returns:
# (ArtifactElement): A newly created Element instance
#
@classmethod
def new_from_artifact_name(cls, artifact_name: str, context: "Context", task: Optional["Task"] = None):
# Initial lookup for already loaded artifact.
with suppress(KeyError):
return cls.__instantiated_artifacts[artifact_name]
# Instantiate the element, this can result in having a different
# artifact name, if we loaded the artifact by it's weak key then
# we will have the artifact loaded via it's strong key.
element = ArtifactElement(context, artifact_name)
artifact_name = element.get_artifact_name()
# Perform a second lookup, avoid loading the same artifact
# twice, even if we've loaded it both with weak and strong keys.
with suppress(KeyError):
return cls.__instantiated_artifacts[artifact_name]
# Now cache the loaded artifact
cls.__instantiated_artifacts[artifact_name] = element
# Walk the dependencies and load recursively
artifact = element._get_artifact()
for dep_artifact_name in artifact.get_dependency_artifact_names():
dependency = ArtifactElement.new_from_artifact_name(dep_artifact_name, context, task)
element._add_build_dependency(dependency)
return element
# clear_artifact_name_cache()
#
# Clear the internal artifact refs cache
#
# When loading ArtifactElements from artifact refs, we cache already
# instantiated ArtifactElements in order to not have to load the same
# ArtifactElements twice. This clears the cache.
#
# It should be called whenever we are done loading all artifacts in order
# to save memory.
#
@classmethod
def clear_artifact_name_cache(cls):
cls.__instantiated_artifacts = {}
########################################################
# Override internal Element methods #
########################################################
def _load_artifact(self, *, pull, strict=None): # pylint: disable=useless-super-delegation
# Always operate in strict mode as artifact key has been specified explicitly.
return super()._load_artifact(pull=pull, strict=True)
# Once we've finished loading an artifact, we assume the
# state of the loaded artifact. This is also used if the
# artifact is loaded after pulling.
#
def _load_artifact_done(self):
self._mimic_artifact()
super()._load_artifact_done()
########################################################
# Implement Element abstract methods #
########################################################
def configure(self, node):
pass
def preflight(self):
pass
def configure_sandbox(self, sandbox):
install_root = self.get_variable("install-root")
# Tell the sandbox to mount the build root and install root
sandbox.mark_directory(install_root)
# verify_artifact_ref()
#
# Verify that a ref string matches the format of an artifact
#
# Args:
# ref (str): The artifact ref
#
# Returns:
# project (str): The project's name
# element (str): The element's name
# key (str): The cache key
#
# Raises:
# ArtifactElementError if the ref string does not match
# the expected format
#
def verify_artifact_ref(ref):
try:
project, element, key = ref.split("/", 2) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key length is not as expected
if not _cachekey.is_key(key):
raise ValueError
except ValueError:
raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
return project, element, key | PypiClean |
/Kivy-2.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/kivy/factory_registers.py |
from kivy.factory import Factory
r = Factory.register
r('Animation', module='kivy.animation')
r('AnimationTransition', module='kivy.animation')
r('ExceptionHandler', module='kivy.base')
r('Cache', module='kivy.cache')
r('ClockBase', module='kivy.clock')
r('ColorPicker', module='kivy.uix.colorpicker')
r('ColorWheel', module='kivy.uix.colorpicker')
r('ConfigParser', module='kivy.config')
r('EventDispatcher', module='kivy.event')
r('Observable', module='kivy.event')
r('FactoryException', module='kivy.factory')
r('Gesture', module='kivy.gesture')
r('GestureDatabase', module='kivy.gesture')
r('GesturePoint', module='kivy.gesture')
r('GestureStroke', module='kivy.gesture')
r('Parser', module='kivy.lang.parser')
r('LoaderBase', module='kivy.loader')
r('ProxyImage', module='kivy.loader')
r('LoggerHistory', module='kivy.logger')
r('NumericProperty', module='kivy.properties')
r('StringProperty', module='kivy.properties')
r('ListProperty', module='kivy.properties')
r('ObjectProperty', module='kivy.properties')
r('BooleanProperty', module='kivy.properties')
r('BoundedNumericProperty', module='kivy.properties')
r('OptionProperty', module='kivy.properties')
r('ReferenceListProperty', module='kivy.properties')
r('AliasProperty', module='kivy.properties')
r('NumericProperty', module='kivy.properties')
r('DictProperty', module='kivy.properties')
r('VariableListProperty', module='kivy.properties')
r('ConfigParserProperty', module='kivy.properties')
r('ColorProperty', module='kivy.properties')
r('Property', module='kivy.properties')
r('SafeList', module='kivy.utils')
r('Vector', module='kivy.vector')
r('Color', module='kivy.graphics.context_instructions')
r('BindTexture', module='kivy.graphics.context_instructions')
r('PushMatrix', module='kivy.graphics.context_instructions')
r('PopMatrix', module='kivy.graphics.context_instructions')
r('Rotate', module='kivy.graphics.context_instructions')
r('Scale', module='kivy.graphics.context_instructions')
r('Translate', module='kivy.graphics.context_instructions')
r('Transform', module='kivy.graphics.context_instructions')
r('MatrixInstruction', module='kivy.graphics.context_instructions')
r('Fbo', module='kivy.graphics.fbo')
r('Instruction', module='kivy.graphics.instructions')
r('InstructionGroup', module='kivy.graphics.instructions')
r('ContextInstruction', module='kivy.graphics.instructions')
r('VertexInstruction', module='kivy.graphics.instructions')
r('Canvas', module='kivy.graphics.instructions')
r('CanvasBase', module='kivy.graphics.instructions')
r('Callback', module='kivy.graphics.instructions')
r('RenderContext', module='kivy.graphics.instructions')
r('Shader', module='kivy.graphics.shader')
r('Texture', module='kivy.graphics.texture')
r('TextureRegion', module='kivy.graphics.texture')
r('Matrix', module='kivy.graphics.transformation')
r('VBO', module='kivy.graphics.vbo')
r('VertexBatch', module='kivy.graphics.vbo')
r('StencilPush', module='kivy.graphics.stencil_instructions')
r('StencilPop', module='kivy.graphics.stencil_instructions')
r('StencilUse', module='kivy.graphics.stencil_instructions')
r('StencilUnUse', module='kivy.graphics.stencil_instructions')
r('ScissorPush', module='kivy.graphics.scissor_instructions')
r('ScissorPop', module='kivy.graphics.scissor_instructions')
r('Triangle', module='kivy.graphics.vertex_instructions')
r('Quad', module='kivy.graphics.vertex_instructions')
r('Rectangle', module='kivy.graphics.vertex_instructions')
r('RoundedRectangle', module='kivy.graphics.vertex_instructions')
r('BorderImage', module='kivy.graphics.vertex_instructions')
r('Ellipse', module='kivy.graphics.vertex_instructions')
r('Line', module='kivy.graphics.vertex_instructions')
r('SmoothLine', module='kivy.graphics.vertex_instructions')
r('Point', module='kivy.graphics.vertex_instructions')
r('Bezier', module='kivy.graphics.vertex_instructions')
r('Mesh', module='kivy.graphics.vertex_instructions')
r('Svg', module='kivy.graphics.svg')
r('BoxShadow', module='kivy.graphics.boxshadow')
r('MotionEventFactory', module='kivy.input.factory')
r('MotionEventProvider', module='kivy.input.provider')
r('Shape', module='kivy.input.shape')
r('ShapeRect', module='kivy.input.shape')
r('ActionBar', module='kivy.uix.actionbar')
r('ActionItem', module='kivy.uix.actionbar')
r('ActionButton', module='kivy.uix.actionbar')
r('ActionToggleButton', module='kivy.uix.actionbar')
r('ActionCheck', module='kivy.uix.actionbar')
r('ActionSeparator', module='kivy.uix.actionbar')
r('ActionDropDown', module='kivy.uix.actionbar')
r('ActionGroup', module='kivy.uix.actionbar')
r('ActionOverflow', module='kivy.uix.actionbar')
r('ActionView', module='kivy.uix.actionbar')
r('ContextualActionView', module='kivy.uix.actionbar')
r('AnchorLayout', module='kivy.uix.anchorlayout')
r('BoxLayout', module='kivy.uix.boxlayout')
r('GridLayout', module='kivy.uix.gridlayout')
r('PageLayout', module='kivy.uix.pagelayout')
r('Accordion', module='kivy.uix.accordion')
r('AccordionItem', module='kivy.uix.accordion')
r('Button', module='kivy.uix.button')
r('ButtonBehavior', module='kivy.uix.behaviors.button')
r('ToggleButtonBehavior', module='kivy.uix.behaviors.togglebutton')
r('DragBehavior', module='kivy.uix.behaviors.drag')
r('FocusBehavior', module='kivy.uix.behaviors.focus')
r('CompoundSelectionBehavior', module='kivy.uix.behaviors.compoundselection')
r('KNSpaceBehavior', module='kivy.uix.behaviors.knspace')
r('CodeNavigationBehavior', module='kivy.uix.behaviors.codenavigation')
r('TouchRippleBehavior', module='kivy.uix.behaviors.touchripple')
r('TouchRippleButtonBehavior', module='kivy.uix.behaviors.touchripple')
r('EmacsBehavior', module='kivy.uix.behaviors.emacs')
r('CoverBehavior', module='kivy.uix.behaviors.cover')
r('Bubble', module='kivy.uix.bubble')
r('BubbleButton', module='kivy.uix.bubble')
r('Camera', module='kivy.uix.camera')
r('Carousel', module='kivy.uix.carousel')
r('CodeInput', module='kivy.uix.codeinput')
r('CheckBox', module='kivy.uix.checkbox')
r('DropDown', module='kivy.uix.dropdown')
r('EffectWidget', module='kivy.uix.effectwidget')
r('FloatLayout', module='kivy.uix.floatlayout')
r('RelativeLayout', module='kivy.uix.relativelayout')
r('ScatterLayout', module='kivy.uix.scatterlayout')
r('ScatterPlaneLayout', module='kivy.uix.scatterlayout')
r('FileChooserListView', module='kivy.uix.filechooser')
r('FileChooserIconView', module='kivy.uix.filechooser')
r('FileChooser', module='kivy.uix.filechooser')
r('Image', module='kivy.uix.image')
r('AsyncImage', module='kivy.uix.image')
r('Label', module='kivy.uix.label')
r('Layout', module='kivy.uix.layout')
r('ModalView', module='kivy.uix.modalview')
r('ProgressBar', module='kivy.uix.progressbar')
r('Popup', module='kivy.uix.popup')
r('Scatter', module='kivy.uix.scatter')
r('ScatterPlane', module='kivy.uix.scatter')
r('ScrollView', module='kivy.uix.scrollview')
r('Settings', module='kivy.uix.settings')
r('Slider', module='kivy.uix.slider')
r('Screen', module='kivy.uix.screenmanager')
r('ScreenManager', module='kivy.uix.screenmanager')
r('Spinner', module='kivy.uix.spinner')
r('Splitter', module='kivy.uix.splitter')
r('StackLayout', module='kivy.uix.stacklayout')
r('StencilView', module='kivy.uix.stencilview')
r('Switch', module='kivy.uix.switch')
r('TabbedPanel', module='kivy.uix.tabbedpanel')
r('TabbedPanelHeader', module='kivy.uix.tabbedpanel')
r('TextInput', module='kivy.uix.textinput')
r('ToggleButton', module='kivy.uix.togglebutton')
r('TreeView', module='kivy.uix.treeview')
r('TreeViewLabel', module='kivy.uix.treeview')
r('TreeViewNode', module='kivy.uix.treeview')
r('ShaderTransition', module='kivy.uix.screenmanager')
r('SlideTransition', module='kivy.uix.screenmanager')
r('SwapTransition', module='kivy.uix.screenmanager')
r('WipeTransition', module='kivy.uix.screenmanager')
r('FadeTransition', module='kivy.uix.screenmanager')
r('Sandbox', module='kivy.uix.sandbox')
r('Video', module='kivy.uix.video')
r('VideoPlayer', module='kivy.uix.videoplayer')
r('VideoPlayerVolume', module='kivy.uix.videoplayer')
r('VideoPlayerStop', module='kivy.uix.videoplayer')
r('VideoPlayerPlayPause', module='kivy.uix.videoplayer')
r('VideoPlayerProgressBar', module='kivy.uix.videoplayer')
r('VKeyboard', module='kivy.uix.vkeyboard')
r('Widget', module='kivy.uix.widget')
r('WidgetException', module='kivy.uix.widget')
r('RstDocument', module='kivy.uix.rst')
r('KineticEffect', module='kivy.effects.kinetic')
r('ScrollEffect', module='kivy.effects.scroll')
r('DampedScrollEffect', module='kivy.effects.dampedscroll')
r('OpacityScrollEffect', module='kivy.effects.opacityscroll')
r('Recognizer', module='kivy.multistroke')
r('MultistrokeGesture', module='kivy.multistroke')
r('UnistrokeTemplate', module='kivy.multistroke')
r('ProgressTracker', module='kivy.multistroke')
r('GestureSurface', module='kivy.uix.gesturesurface')
r('GestureContainer', module='kivy.uix.gesturesurface')
r('RecycleViewBehavior', module='kivy.uix.recycleview.__init__')
r('RecycleView', module='kivy.uix.recycleview.__init__')
r('LayoutSelectionBehavior', module='kivy.uix.recycleview.layout')
r('RecycleLayoutManagerBehavior', module='kivy.uix.recycleview.layout')
r('RecycleDataViewBehavior', module='kivy.uix.recycleview.views')
r('RecycleKVIDsDataViewBehavior', module='kivy.uix.recycleview.views')
r('RecycleDataAdapter', module='kivy.uix.recycleview.views')
r('RecycleDataModelBehavior', module='kivy.uix.recycleview.datamodel')
r('RecycleDataModel', module='kivy.uix.recycleview.datamodel')
r('RecycleLayout', module='kivy.uix.recyclelayout')
r('RecycleGridLayout', module='kivy.uix.recyclegridlayout')
r('RecycleBoxLayout', module='kivy.uix.recycleboxlayout') | PypiClean |
/Biomatters_Azimuth-0.2.6-py3-none-any.whl/azimuth/corrstats.py | __author__ = 'psinger'
import numpy as np
from scipy.stats import t, norm
from math import atanh, pow
from numpy import tanh
def rz_ci(r, n, conf_level = 0.95):
zr_se = pow(1/(n - 3), .5)
moe = norm.ppf(1 - (1 - conf_level)/float(2)) * zr_se
zu = atanh(r) + moe
zl = atanh(r) - moe
return tanh((zl, zu))
def rho_rxy_rxz(rxy, rxz, ryz):
num = (ryz-1/2.*rxy*rxz)*(1-pow(rxy,2)-pow(rxz,2)-pow(ryz,2))+pow(ryz,3)
den = (1 - pow(rxy,2)) * (1 - pow(rxz,2))
return num/float(den)
def dependent_corr(xy, xz, yz, n, twotailed=True, conf_level=0.95, method='steiger'):
"""
Calculates the statistic significance between two dependent correlation coefficients
@param xy: correlation coefficient between x and y
@param xz: correlation coefficient between x and z
@param yz: correlation coefficient between y and z
@param n: number of elements in x, y and z
@param twotailed: whether to calculate a one or two tailed test, only works for 'steiger' method
@param conf_level: confidence level, only works for 'zou' method
@param method: defines the method uses, 'steiger' or 'zou'
@return: t and p-val
"""
if method == 'steiger':
d = xy - xz
determin = 1 - xy * xy - xz * xz - yz * yz + 2 * xy * xz * yz
av = (xy + xz)/2
cube = (1 - yz) * (1 - yz) * (1 - yz)
t2 = d * np.sqrt((n - 1) * (1 + yz)/(((2 * (n - 1)/(n - 3)) * determin + av * av * cube)))
p = 1 - t.cdf(abs(t2), n - 2)
if twotailed:
p *= 2
return t2, p
elif method == 'zou':
L1 = rz_ci(xy, n, conf_level=conf_level)[0]
U1 = rz_ci(xy, n, conf_level=conf_level)[1]
L2 = rz_ci(xz, n, conf_level=conf_level)[0]
U2 = rz_ci(xz, n, conf_level=conf_level)[1]
rho_r12_r13 = rho_rxy_rxz(xy, xz, yz)
lower = xy - xz - pow((pow((xy - L1), 2) + pow((U2 - xz), 2) - 2 * rho_r12_r13 * (xy - L1) * (U2 - xz)), 0.5)
upper = xy - xz + pow((pow((U1 - xy), 2) + pow((xz - L2), 2) - 2 * rho_r12_r13 * (U1 - xy) * (xz - L2)), 0.5)
return lower, upper
else:
raise Exception('Wrong method!')
def independent_corr(xy, ab, n, n2 = None, twotailed=True, conf_level=0.95, method='fisher'):
"""
Calculates the statistic significance between two independent correlation coefficients
@param xy: correlation coefficient between x and y
@param xz: correlation coefficient between a and b
@param n: number of elements in xy
@param n2: number of elements in ab (if distinct from n)
@param twotailed: whether to calculate a one or two tailed test, only works for 'fisher' method
@param conf_level: confidence level, only works for 'zou' method
@param method: defines the method uses, 'fisher' or 'zou'
@return: z and p-val
"""
if method == 'fisher':
xy_z = 0.5 * np.log((1 + xy)/(1 - xy))
xz_z = 0.5 * np.log((1 + ab)/(1 - ab))
if n2 is None:
n2 = n
se_diff_r = np.sqrt(1/(n - 3) + 1/(n2 - 3))
diff = xy_z - xz_z
z = abs(diff / se_diff_r)
p = (1 - norm.cdf(z))
if twotailed:
p *= 2
return z, p
elif method == 'zou':
L1 = rz_ci(xy, n, conf_level=conf_level)[0]
U1 = rz_ci(xy, n, conf_level=conf_level)[1]
L2 = rz_ci(ab, n2, conf_level=conf_level)[0]
U2 = rz_ci(ab, n2, conf_level=conf_level)[1]
lower = xy - ab - pow((pow((xy - L1), 2) + pow((U2 - ab), 2)), 0.5)
upper = xy - ab + pow((pow((U1 - xy), 2) + pow((ab - L2), 2)), 0.5)
return lower, upper
else:
raise Exception('Wrong method!')
#print dependent_corr(.396, .179, .088, 200, method='steiger')
#print independent_corr(.560, .588, 100, 353, method='fisher')
#print dependent_corr(.396, .179, .088, 200, method='zou')
#print independent_corr(.560, .588, 100, 353, method='zou') | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/docs/releasenotes/0.6.16.rst | ============================
Djblets 0.6.16 Release Notes
============================
**Release date**: February 26, 2012
djblets.datagrid
================
* Removed an extraneous ``</span>`` in the paginator.
* Fixed a compatibility issue with Django 1.4 in the queries.
djblets.util
============
* Fix parsing of tokens in a blocktag in Django 1.4.
We were failing to pass a tuple to parser.parse() when specifying the
end tags for a block tag. Somehow this never bit us before, but a
seemingly unrelated change in Django 1.4 triggered this broken
behavior, making all custom blocktags break.
This led to some false-positives in other fixes, where it seemed that
removing custom tags starting with "if" solved things. In reality, it
didn't solve anything. It just masked the problem.
This change should be compatible with all versions of Django.
djblets.webapi
==============
* Add support for resource-specific mimetypes.
A common method for REST services is to have payloads returned with
resource-specific mimetypes. These can help to identify the format of
a payload (useful for automatic bindings) without having to inspect
the URI.
This adds support for easily telling a resource to return such
mimetypes. If :py:attr:`mimetype_vendor` is specified,
:py:class:`WebAPIResource` can populate the accepted mimetypes list with
some vendor/resource-specific mimetypes that will be used by default for any
responses.
When there's a vendor mimetype, the resources will use the resource
names for the mimetypes, but these can be overridden using
:py:attr:`mimetype_list_resource_name` and
:py:attr:`mimetype_item_resource_name`.
* Accept ``true`` as a valid boolean value in the web API.
Patch by Jim Chen.
Contributors
============
* Christian Hammond
* David Trowbridge
* Jim Chen
| PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/utils/Utils.py | import functools
import os
import sys
import time
from contextlib import contextmanager
def getOS():
if os.name == "nt":
return "Windows"
elif os.name == "posix":
result = os.uname()[0]
# Handle msys2 posix nature still meaning it's Windows.
if result.startswith(("MSYS_NT-", "MINGW64_NT-")):
result = "Windows"
return result
else:
assert False, os.name
_linux_distribution_info = None
def _parseOsReleaseFileContents(filename):
result = None
base = None
version = None
from .FileOperations import getFileContentByLine
for line in getFileContentByLine(filename):
if line.startswith("PRETTY_NAME=") and "/sid" in line:
version = "sid"
if line.startswith("ID="):
result = line[3:].strip('"')
if line.startswith("ID_LIKE="):
base = line[8:].strip('"').lower()
if "ubuntu" in base:
base = "Ubuntu"
elif "debian" in base:
base = "Debian"
elif "fedora" in base:
base = "Fedora"
if line.startswith("VERSION="):
version = line[8:].strip('"')
if "SUSE Linux Enterprise Server" in line:
result = "SLES" # spell-checker: ignore SLES
return result, base, version
def getLinuxDistribution():
"""Name of the Linux distribution.
We should usually avoid this, and rather test for the feature,
but in some cases it's hard to manage that.
"""
if getOS() != "Linux":
return None, None, None
# singleton, pylint: disable=global-statement
global _linux_distribution_info
if _linux_distribution_info is None:
result = None
base = None
version = None
if os.path.exists("/etc/os-release"):
result, base, version = _parseOsReleaseFileContents("/etc/os-release")
elif os.path.exists("/etc/SuSE-release"):
result, base, version = _parseOsReleaseFileContents("/etc/SuSE-release")
elif os.path.exists("/etc/issue"):
result, base, version = _parseOsReleaseFileContents("/etc/issue")
if result is None:
from .Execution import check_output
try:
result = check_output(["lsb_release", "-i", "-s"], shell=False)
if str is not bytes:
result = result.decode("utf8")
except OSError:
pass
if result is None:
from nuitka.Tracing import general
general.warning(
"Cannot detect Linux distribution, this may prevent optimization."
)
result = "Unknown"
# Change e.g. "11 (Bullseye)"" to "11".
if version is not None and version.strip():
version = version.split()[0]
_linux_distribution_info = result.title(), base, version
return _linux_distribution_info
def getWindowsRelease():
if not isWin32OrPosixWindows():
return None
import platform
return platform.release()
def isDebianBasedLinux():
dist_name, base, _dist_version = getLinuxDistribution()
# False alarm, pylint: disable=superfluous-parens
return (base or dist_name) in ("Debian", "Ubuntu")
def isFedoraBasedLinux():
dist_name, base, _dist_version = getLinuxDistribution()
return (base or dist_name) == "Fedora"
def isWin32Windows():
"""The Win32 variants of Python does have win32 only, not posix."""
return os.name == "nt"
def isPosixWindows():
"""The MSYS2 variant of Python does have posix only, not Win32."""
return os.name == "posix" and getOS() == "Windows"
def isWin32OrPosixWindows():
return isWin32Windows() or isPosixWindows()
def isLinux():
"""The Linux OS."""
return getOS() == "Linux"
def isMacOS():
"""The macOS platform."""
return getOS() == "Darwin"
def isNetBSD():
"""The NetBSD OS."""
return getOS() == "NetBSD"
def isFreeBSD():
"""The FreeBSD OS."""
return getOS() == "FreeBSD"
def isOpenBSD():
"""The FreeBSD OS."""
return getOS() == "OpenBSD"
_is_alpine = None
def isAlpineLinux():
if os.name == "posix":
# Avoid repeated file system lookup, pylint: disable=global-statement
global _is_alpine
if _is_alpine is None:
_is_alpine = os.path.isfile("/etc/alpine-release")
return _is_alpine
else:
return False
def getArchitecture():
if getOS() == "Windows":
if "AMD64" in sys.version:
return "x86_64"
else:
return "x86"
else:
return os.uname()[4]
def getCPUCoreCount():
cpu_count = 0
if getOS() != "Windows":
# Try to sum up the CPU cores, if the kernel shows them, getting the number
# of logical processors
try:
# Encoding is not needed, pylint: disable=unspecified-encoding
with open("/proc/cpuinfo") as cpuinfo_file:
cpu_count = cpuinfo_file.read().count("processor\t:")
except IOError:
pass
# Multiprocessing knows the way.
if not cpu_count:
import multiprocessing
cpu_count = multiprocessing.cpu_count()
return cpu_count
def encodeNonAscii(var_name):
"""Encode variable name that is potentially not ASCII to ASCII only.
For Python3, unicode identifiers can be used, but these are not
possible in C, so we need to replace them.
"""
if str is bytes:
return var_name
else:
# Using a escaping here, because that makes it safe in terms of not
# to occur in the encoding escape sequence for unicode use.
var_name = var_name.replace("$$", "$_$")
var_name = var_name.encode("ascii", "xmlcharrefreplace")
var_name = var_name.decode("ascii")
return var_name.replace("&#", "$$").replace(";", "")
def hasOnefileSupportedOS():
return getOS() in ("Linux", "Windows", "Darwin", "FreeBSD")
def hasStandaloneSupportedOS():
return getOS() in ("Linux", "Windows", "Darwin", "FreeBSD", "OpenBSD")
def getUserName():
"""Return the user name.
Notes: Currently doesn't work on Windows.
"""
import pwd # pylint: disable=I0021,import-error
# spell-checker: ignore getpwuid,getuid
return pwd.getpwuid(os.getuid())[0]
@contextmanager
def withWarningRemoved(category):
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=category)
# These do not inherit from DeprecationWarning by some decision we
# are not to care about.
if "pkg_resources" in sys.modules and category is DeprecationWarning:
try:
from pkg_resources import PkgResourcesDeprecationWarning
except ImportError:
pass
else:
warnings.filterwarnings(
"ignore", category=PkgResourcesDeprecationWarning
)
yield
@contextmanager
def withNoDeprecationWarning():
with withWarningRemoved(DeprecationWarning):
yield
@contextmanager
def withNoSyntaxWarning():
with withWarningRemoved(SyntaxWarning):
yield
def decoratorRetries(
logger, purpose, consequence, attempts=5, sleep_time=1, exception_type=OSError
):
"""Make retries for errors on Windows.
This executes a decorated function multiple times, and imposes a delay and
a virus checker warning.
"""
def inner(func):
if os.name != "nt":
return func
@functools.wraps(func)
def retryingFunction(*args, **kwargs):
for attempt in range(1, attempts + 1):
try:
result = func(*args, **kwargs)
except exception_type as e:
if not isinstance(e, OSError):
logger.warning(
"""\
Failed to %s in attempt %d due to %s.
Disable Anti-Virus, e.g. Windows Defender for build folders. Retrying after a second of delay."""
% (purpose, attempt, str(e))
)
else:
if isinstance(e, OSError) and e.errno in (110, 13):
logger.warning(
"""\
Failed to %s in attempt %d.
Disable Anti-Virus, e.g. Windows Defender for build folders. Retrying after a second of delay."""
% (purpose, attempt)
)
else:
logger.warning(
"""\
Failed to %s in attempt %d with error code %d.
Disable Anti-Virus, e.g. Windows Defender for build folders. Retrying after a second of delay."""
% (purpose, attempt, e.errno)
)
time.sleep(sleep_time)
continue
else:
if attempt != 1:
logger.warning(
"Succeeded with %s in attempt %d." % (purpose, attempt)
)
return result
logger.sysexit("Failed to %s, %s." % (purpose, consequence))
return retryingFunction
return inner | PypiClean |
/Neodroid-0.4.9-py36-none-any.whl/neodroid/environments/networking_environment.py | import logging
from neodroid.messaging import ClientEvents, message_client_event
from neodroid.messaging.message_client import MessageClient
from neodroid.utilities.unity_specifications.environment_snapshot import (
EnvironmentSnapshot,
)
__author__ = "Christian Heider Nielsen"
import time
from abc import ABC, abstractmethod
from .environment import Environment
__all__ = ["NetworkingEnvironment"]
class NetworkingEnvironment(Environment, ABC):
"""
"""
def __init__(
self,
*,
ip: str = "localhost",
port: int = 6969,
connect_to_running: bool = False,
on_connected_callback: callable = None,
on_disconnected_callback: callable = None,
on_timeout_callback: callable = None,
retries: int = 10,
connect_try_interval: float = 0.1,
**kwargs,
):
super().__init__(**kwargs)
# Networking
self._ip = ip
self._port = port
self._connect_to_running = connect_to_running
self._external_on_connected_callback = on_connected_callback
self._external_on_disconnected_callback = on_disconnected_callback
self._external_on_timeout_callback = on_timeout_callback
self._retries = retries
self._connect_try_interval = connect_try_interval
def __next__(self) -> EnvironmentSnapshot:
if not self._is_connected_to_server:
raise StopIteration
return self.react()
def _setup_connection(self, auto_describe: bool = True):
connect_tries = range(self._retries)
self._message_server = MessageClient(
self._ip,
self._port,
on_timeout_callback=self.__on_timeout_callback__,
on_connected_callback=self.__on_connected_callback__,
on_disconnected_callback=self.__on_disconnected_callback__,
)
if auto_describe:
while self.description is None:
self.describe()
time.sleep(self._connect_try_interval)
logging.info(
f"Connecting, please make sure that the ip {self._ip} "
f"and port {self._port} "
f"are cd correct"
)
n = next(connect_tries)
if n == self._retries:
raise ConnectionError
self._is_connected_to_server = True
else:
self._is_connected_to_server = True
@message_client_event(event=ClientEvents.CONNECTED)
def __on_connected_callback__(self):
"""
"""
if self._external_on_connected_callback:
self._external_on_connected_callback()
@message_client_event(event=ClientEvents.DISCONNECTED)
def __on_disconnected_callback__(self):
"""
"""
self._is_connected_to_server = False
if self._external_on_disconnected_callback:
self._external_on_disconnected_callback()
@message_client_event(event=ClientEvents.TIMEOUT)
def __on_timeout_callback__(self):
"""
"""
if self._external_on_timeout_callback:
self._external_on_timeout_callback()
@property
def is_connected(self):
"""
@return:
@rtype:
"""
return self._is_connected_to_server
@abstractmethod
def _close(self, *args, **kwargs):
raise NotImplementedError
def __enter__(self):
self.reset()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def close(self, *args, **kwargs):
"""
@param args:
@type args:
@param kwargs:
@type kwargs:
@return:
@rtype:
"""
self._message_server.teardown()
return self._close(*args, **kwargs)
def __repr__(self):
return (
f"<NetworkingEnvironment>\n"
f" <IsConnected>{self.is_connected}</IsConnected>\n"
f"</NetworkingEnvironment>"
) | PypiClean |
/Ibid-0.1.1.tar.gz/Ibid-0.1.1/ibid/plugins/oeis.py |
from urllib2 import urlopen
import re
import logging
from ibid.compat import defaultdict
from ibid.plugins import Processor, match
from ibid.utils import plural
log = logging.getLogger('plugins.oeis')
features = {'oeis': {
'description': 'Query the Online Encyclopedia of Integer Sequences',
'categories': ('lookup', 'web', 'calculate',),
}}
class OEIS(Processor):
usage = u"""oeis (A<OEIS number>|M<EIS number>|N<HIS number>)
oeis <term>[, ...]"""
feature = ('oeis',)
@match(r'^oeis\s+([AMN]\d+|-?\d(?:\d|-|,|\s)*)$')
def oeis (self, event, query):
query = re.sub(r'(,|\s)+', ',', query)
f = urlopen('http://oeis.org/search?n=1&fmt=text&q='
+ query)
for i in range(3):
f.next() # the first lines are uninteresting
results_m = re.search(r'Showing .* of (\d+)', f.next())
if results_m:
f.next()
sequence = Sequence(f)
event.addresponse(u'%(name)s - %(url)s - %(values)s',
{'name': sequence.name,
'url': sequence.url(),
'values': sequence.values})
results = int(results_m.group(1))
if results > 1:
event.addresponse(u'There %(was)s %(count)d more %(results)s. '
u'See %(url)s%(query)s for more.',
{'was': plural(results-1, 'was', 'were'),
'count': results-1,
'results': plural(results-1, 'result', 'results'),
'url': 'http://oeis.org/search?q=',
'query': query})
else:
event.addresponse(u"I couldn't find that sequence.")
class Sequence(object):
def __init__ (self, lines):
cmds = defaultdict(list)
for line in lines:
line = line.lstrip()[:-1]
if not line:
break
line_m = re.match(r'%([A-Z]) (A\d+)(?: (.*))?$', line)
if line_m:
cmd, self.catalog_num, info = line_m.groups()
cmds[cmd].append(info)
else:
cmds[cmd][-1] += line
# %V, %W and %X give signed values if the sequence is signed.
# Otherwise, only %S, %T and %U are given.
self.values = (''.join(cmds['V'] + cmds['W'] + cmds['X']) or
''.join(cmds['S'] + cmds['T'] + cmds['U']))
self.name = ''.join(cmds['N'])
def url (self):
return 'http://oeis.org/' + self.catalog_num
# vi: set et sta sw=4 ts=4: | PypiClean |
/EpyNN-1.2.11.tar.gz/EpyNN-1.2.11/epynn/convolution/forward.py | import numpy as np
# Local application/library specific imports
from epynn.commons.io import padding
def initialize_forward(layer, A):
"""Forward cache initialization.
:param layer: An instance of convolution layer.
:type layer: :class:`epynn.convolution.models.Convolution`
:param A: Output of forward propagation from previous layer.
:type A: :class:`numpy.ndarray`
:return: Input of forward propagation for current layer.
:rtype: :class:`numpy.ndarray`
:return: Input of forward propagation for current layer.
:rtype: :class:`numpy.ndarray`
:return: Input blocks of forward propagation for current layer.
:rtype: :class:`numpy.ndarray`
"""
X = layer.fc['X'] = padding(A, layer.d['p'])
return X
def convolution_forward(layer, A):
"""Forward propagate signal to next layer.
"""
# (1) Initialize cache and pad image
X = initialize_forward(layer, A) # (m, h, w, d)
# (2) Slice input w.r.t. filter size (fh, fw) and strides (sh, sw)
Xb = np.array([[X[ :, h:h + layer.d['fh'], w:w + layer.d['fw'], :]
# Inner loop
# (m, h, w, d) ->
# (ow, m, h, fw, d)
for w in range(layer.d['w'] - layer.d['fw'] + 1)
if w % layer.d['sw'] == 0]
# Outer loop
# (ow, m, h, fw, d) ->
# (oh, ow, m, fh, fw, d)
for h in range(layer.d['h'] - layer.d['fh'] + 1)
if h % layer.d['sh'] == 0])
# (3) Bring back m along axis 0
Xb = np.moveaxis(Xb, 2, 0)
# (oh, ow, m, fh, fw, d) ->
# (m, oh, ow, fh, fw, d)
# (4) Add dimension for filter units (u) on axis 6
Xb = layer.fc['Xb'] = np.expand_dims(Xb, axis=6)
# (m, oh, ow, fh, fw, d) ->
# (m, oh, ow, fh, fw, d, 1)
# (5.1) Linear activation Xb -> Zb
Zb = Xb * layer.p['W']
# (m, oh, ow, fh, fw, d, 1) - Xb
# (fh, fw, d, u) - W
# (5.2) Sum block products
Z = np.sum(Zb, axis=(5, 4, 3))
# (m, oh, ow, fh, fw, d, u) - Zb
# (m, oh, ow, fh, fw, u) - np.sum(Zb, axis=(5))
# (m, oh, mw, fh, u) - np.sum(Zb, axis=(5, 4))
# (m, oh, ow, u) - np.sum(Zb, axis=(5, 4, 3))
# (5.3) Add bias to linear activation product
Z = layer.fc['Z'] = Z + layer.p['b']
# (6) Non-linear activation
A = layer.fc['A'] = layer.activate(Z)
return A # To next layer | PypiClean |
/MPT5.0.1-0.1.tar.gz/MPT5.0.1-0.1/src/MPT5/Res/Allicons.py |
from wx.lib.embeddedimage import PyEmbeddedImage
accept_button = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAmpJREFUeNqkU01oE1EQ/t52E5PNj01BSqlWyLXeWgvGggpF'
b'RAo96c2TImgRKc1B8OLBQ9GmIFVPemlBwYKgFKRQFLyI+bGaUtHYWCzW1ND8NnbT7J/zdpNN'
b'td4cmH3wZr5vvp2ZxwzDwP+YyD+MMfvi9Dh6VA1juo4BTs0jggC0CJgnvzY7ikQjlxdn5qdO'
b'cOoW7gW8/svnjl1E98FDUJQKivI6fhZXkVx5j9jyEmqKNjMbxtldBAR+0tnWcebu+Wkk16bw'
b'4dsUVJ2ro+rkLkcQFdmHV8lFyDX9PikZtgkGI6zH55Lidy5M4+WnYeQr66ZsUxdryLW8UGxD'
b'LJWHZqD3+YiREHhQUXF78PAQFn88QI4kC9QZ1mI5KEOlQ9aASyEDDlce7QFA1zAOK2zaiWD7'
b'ASznXpiya+RUAXRApbNKDCP91rRcEuD3mMqO21PgoapWxjYhroasxMgbBgfR16hyuL85atFh'
b'TgSN6QvWGIHyr6wluW6jRwxsUuVwqAmeeMsginZbmgQC3eTKGTiJPRJvhq8fbYIjMQb3HgIT'
b'QtMZGqtjEmjbiH7JpKFXW+FxUfI79se2RRIW2OEESgWGzS3qTRVRuwcbHzH22Zd96nHvxb4u'
b'wEskk0kGvb4HEgfT72kKkM0CmYKBLGFsBQuPMVfewMxCuoSVlIBKHnBTNS9120Ndd1KZUo7h'
b'a4ohnTFQ4bmPMGf2b8cqd528gYdSKwY6Agw+ydpA8xepFVw2r8zB8zcRpuvVXW+BLNg9hND+'
b'XlwR3ehrjIqHVRnR73FMLj3Daw7+52OqG9VGJ7n/r5dbJl8j39r5Gn8LMAAV2Qj37wA9/gAA'
b'AABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
add = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAlxJREFUeNqkU0toU1EQPfc2bZKHSRpLaUu0YMGNXbYVEfxh'
b'EZFCV3UnCIqCRUTsIgsXLlyEWhdaFRcKSnZ2pRTclC7EhYa0kErFQrQgjQn5NE3S5vs+nXn5'
b'FVNw4YV53Hfnzjkzc+YKwzDwP8vCHyFE4+DSDIZUDT5dxyhDs0dKoE1igcw7fw9L9btMLsxP'
b'DeDiNJ67DzhvXTlzA8f6B1FWc9jKR5HIbCC0HkIwvIpyRZubn8LlFgAKfuc52Dfx7JofK5G3'
b'WF73o6RydoCV8lQ6BrBTdGExtIJCWXtBmUw2AMYeiyGHTQk+ue7H4o9JpLZjUHXgzqlqf55+'
b'FrARiBTtSKa78HUtBk3H8Ie7xpLkCxUVj8ZGxvHtzyukCjEqGNCbbYHGRv9GWwV2ZRO9bvJr'
b'mGGfrN05d6TnMMLJjyZzmSJKehOA90Uqh0uyK2U4FbO7ZxsqcKIlbYvq7MTNkXSLVN7TTalf'
b'LgtTlbr6siojkNlJQFjkv4WX+8yBJIBUNgrVsYlZYuAyOF3viSqN74swlWin3lgJgGYE9dEx'
b'8bQSAuHoLxhFN2xWqpPM2tFk4T2fsS+bAXJ5Iiki0ABIfodvLRJHMqHD0CUszGZpAvCezzTK'
b'Kh4XiKYNxCnGLL82SMr5+3jj6sbEUY9Ed48ORxfgsvWaAJliDLkUBccEfkYNZOKYW3iIqxSb'
b'3zvK/Rce4LXSidE+t4DDbjTq5I7nClXm7aQZPEXHv1veAq2BwXGcPDSM2xY7jtelYrdaQGAj'
b'iNnV9/jEwfs+ptriMfGQOf8SMEsWIcvvfY27AgwA3EwHS5LUr7EAAAAASUVORK5CYII=')
#------------------------------------------------------------------------------------------------
application_add = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAepJREFUeNqUkjFIW1EUhv9z733mvac2aOJQ0hrUQlEIgpam'
b'dhCHIp1cipisFbp0aWvADjqpdGjExU0hq5B2cbQuQkNBsYOCWUTFwQyCiGI6xPdez30aothG'
b'vXCG+975/vtz/kOPppZyJcd96Xke7nqICIYUC1JQUml4cuA5HNeFuAPsckkhMLa4mhAkk+qc'
b'wZCpkFrah2IFqgJrj+eskO5vhuZq+EkW8NBgGngaroMkba+KACs4XLpfc/quSqzYFzW5orjP'
b'0Zw+yr0cXj6/xRZvt0Dc1d7eAc1pUpX/1dp18P4Dk/9d+POpdEgWkBUB27Z4wjcFiEEhFMdm'
b'QZEBUW6hIM+hBqocv2mZbOsmLoUBywhg/tdbLG5m/Kg5NHSFB9EZyl5xYFm8C9cVOGdIzjbz'
b'8x3WCxkM9AI9kRGsFaaR387i6BizomxJSLZpGNdKw/p8+z2HWCtwVgRioTROzoAnj/0k3gte'
b'yVsjCxhAnF/++OLC4Ye4h2cPR/09oPrUwgrZD3p5pf8xAz3pRiTb4oi1AcUSMP7Kw8QywWbR'
b'jR2O8TSdGObOYDUHxRk5uXvgvI6EgdzeZ548sHMA/DnCd4p++VFl9yVKThCFse6WoWl8DTTg'
b'jW+bLuBsCp/oHtvbzNV05X7Itf9XgAEAdUyeqmc4PvoAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
application_delete = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAeZJREFUeNqUkr9rU1EUx7/n3vte3ksaRK2L2AQXSxxcChIE'
b'oUXRDhIUJ/8GB0EyCoK0OOhQRAXp1kUUsohSaRYdXAQpCLUuClYQ8UehgdLWJvf6PabVFtO0'
b'vXCGl3s+3/vN+R45NDr1arXlT4QQsNMjIoiseWGNDDmFRyrH0fIeZgewZ1ljcO3J60EjFq5J'
b'cH/iUJ2ag6OCdIHVY5MKt88UoFzMJykQsDeJ0N/bAytqr4sAFVos7VdOv90qFQeLCauI3Rzl'
b'9Di/NrzZ2Xe0uL0FYVepdBTKKenW73LZHoQtYJ26CisuaD8dxMCz/gpksymvpBMNsQ4SxUAU'
b'wZh2z0omhwZ/c+vxJ2lCWx14R4h3X8Zu4OfDcRhrkWFa1dIFXPk8ucFBmnIXNisI81bg051R'
b'4OUzlCsVuqAT38S+mbeYd7m7bs0RGx2r8+y+TtxDeegkwsIPBOYvFO3rO4hv8wuXHVdy28ji'
b'OIKcOg8MX/o3pfpj+OkZuNQvvdlzfXKAK/3fDAL/wq9MHu+TGKH2AM1H9+HZZ/io6EDpRAWP'
b'sPLdHDw9OzDSn5rhQt7+2QON8+Mi8KGxXJPizfqWYIs5NxzjqpYPPz997FZvml5s7wLnsrxS'
b'O1efviq72N4C68CG7++sud8CDACp8J9A4CbcpgAAAABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
application_edit = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAkFJREFUeNqUUk1oE0EYfTM7k+xuk2qIF0trtIriQZQatfZg'
b'KkJFjDkpHsSCCIIXqdBT6dGgYPFisQe1GiFFRQRTT/GgEStiLXhKL4o/VK1SamqNtWyy4zcl'
b'0VqTUgfeMszOe9/7vnmsPp4ecopui1IKS12MMcwU3MGAJWNCk8/EtqPouuBLILsEg3N0p54f'
b'0DVFgYhBU6Az/R6CFNgiZO2xQAo9baugeR4qSQIKAVNiwwofDKbtLSJACkVCwPLA0Xs6Ew4p'
b'toZMQgj/s75pN+RXuKXhjY5myWJlC+UTRr0bHgPOtdd4Y0m40zmI8qUa2wdVxb+eOucGhBS4'
b'm7qHfVv2YPzTBKz8tPotYNsWTbiCAOPghqTKFpLJAcSiUSQGrmPbZBNk7jN4+flNy4Rpev+G'
b'ZcH2+WEH/Mh2r8GRiI2rN26huWkHHvI0wn0nw8zbdV/9jO+HWyxQFtSfnqlfZhgwaJs5vRaR'
b'Qw3A2DtkjXb0D/3AxWAUXqloLCXX3BCQUs5BaJTIjzoaibwa+DgB5HJ4kkqg58J5SGcWfirK'
b'pVE9f5lTDWg9WA98GAemviAxXIcTibdz/wR0cund/J03M8yu3UWRhu5AD9JRJp6uv4OdbZSN'
b'yRngWS+SL0M4WncFpvMVHuXAzeceB4LBiG5gHWHZwuq344df1DZuxN7Ny9Hfm8TxS8PhBVem'
b'QmcfvGL0mZd1hlnuRdfYOeRUjRLOd+xuDqHlWF94sGPTSPvKy/C5ebLu/hOySmvrvP1ItUu/'
b'BBgASa3Cu6z82mEAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
application_form = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAa9JREFUeNqck7tKA0EUhv+5bGIheCkEiWgKu0SNNoJgYoiC'
b'MQn4NKltbHwaURAREzWFICgoWPgCFlHxUtiYZNZzZjYma1TU2Z1lmZnvP3P+MyPGNg92Gi1T'
b'avk+AO6/aQJKCHhK7uo3grfW50EifxIgGOXtsxILID7ch63TWx7n98fmB5/yQgzM6iYFzowa'
b'ZEoj6MHFdwrUIgbMamMHBS6ubwiQXSw9slfBZ6+MwdxsAszq9oQX6SNAO4gW8bqry3ML+MaF'
b'dYICqZnZD8GOgPZwWDmEkhKGoNzyCmLjcWTTi6EdVE9qkEp2CQTVU1piNZ+3UTiioW2mksme'
b'FFJTU7i/q6PNaT9whiP7wmYOSFfnx+dnPDw92ZRcDnYWmnbLjB9OgX6lgmShAKhWKx3jLO9M'
b'zWWzX3jgRbC3v28XMbCWL2BsIo7cUiaUQuXomILoTwIECKVQKBbJAxk4bzCdSPR4ME2+tJqN'
b'j7S0cJVBVCvYaongBCkWMriv10MpcI962p1a6trj2hI81B/9dBV+uBfCMcxq8/pSG9w4Tjc4'
b'/O/vkoWZ5Q1PUh/A/9rLuwADAK8loV8tdNaPAAAAAElFTkSuQmCC')
#-----------------------------------------------------------------------------------------------------
application_lightning = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAjFJREFUeNqUU0toE1EUPe/Nm2RmOq0dPwVpbBYqoosEWlAQ'
b'seIi1EqL4MKtC0Hdxy4kKoVCwe+uCi7dqAsLTVDRKPgFxY1GjQ1KtGqLxKY1jQlxJvO8E1uF'
b'2Nb64Czm3rnnnnfufazp+PURXfAeKSWWehhjKDtu3NLVXuEVD/RuRtV1wZdQ7BIUzhEbedrj'
b'9RQOFa7QBKK3xiCIgS1S7Gl0iOF0pA22KyF+EUhYmooNK00ozJM3r+gaPIIqETTrGsquAkdq'
b'EDYFdgQ1QhD/c8quiclADMKdNS+dfk0d/pbAmIAsTkA86KO8qOVVRcEnavrVNaWY+7HBMCHr'
b'9dM3U/xw8s9JN0dr114KcjJRIDucwON0LvabwDB0criegIMJDTOF99BaAvAt70YpE8X0mIOP'
b'toX9Z2+cF3Pj18gYt34ViID7DExNjsIMtePHxCU4FQOFDzkMFLsxOjSe/6NA18lhWVevgK6L'
b'79mHaIisR7U0jUpRRaVs44J5kRoKKfisaq4IwgLzd8t4F0/Ab1lYszWElvBa5B6lkHnxOclV'
b'5d/7134si1Whw2huC4BzH8aHk+h/Gca2o08Ossbo5XvMaNpu04bUe+CN1YZa2/3UxnPYtKcL'
b'r64k0P9lF27mV9+HFez0LrCOsGwxBUMndp+JBKY6vYfQd8c4dO1q8hmFv7UO3n3LgoO3F3g0'
b'HEXoeOPfh7xslCVXKXQcSe2kVObAqZMzcWcL/KRvqaeDEJ4v8VOAAQAI1MRifLBK3QAAAABJ'
b'RU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
application_put = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAixJREFUeNqUUktoE1EUPe8z895MY2tqkApqta0pguAP3ChV'
b'gi0oVEHduLAbN7W7oCCiqKggiL+l6E6sgqCL6MpYIeJCXUsKXQTbhYKhNVajtJPMeN8kk3Qh'
b'tn1wuO93zv0y5/zLTKuSg0EQYDmLMYbZucoLacjXDu1C1ffBl0j2CYJzXMh8HJQVIq7SEmde'
b'TUGSAluEbOKskMLNgfUwXBIIENcWehMxCGZCW0SAFKoEwzFc6ZHavk5N6MRyl+FKv1688fE8'
b'hcfC4tTLRBFJWFJDMkX3nLz75HUOXvU3kr2bYLgyUnO0g4A1y8hpr2QMd3PHMDbxJkzN+Eol'
b'U0j3PW38awjoUIDVfZN3bqFFxUPyveND+PXnK2LOGgw/eYjLB9sb/ySCSMCm9rDapbDhWC4o'
b'+rAzpflJFKZz6Fq9Nzw7bTVHQlB60fhopcPqcibg2ApbLzG4NsLOzHqTYHbNCvq7+yoDdR6u'
b'lW3OjtYKStlQZE1iqW3A/eE0Th4AZrzPgFWz5vzg1Gns3w608jw4j4rOBaSUdFE7fism8Oj9'
b'HXR3HIFP3iN0dxzF6IdbKBbX4ae/GdwS/x7g0ZEiNron8Dr/HGsTO6Bci+xOjOWfoTy9AbeH'
b'pqilNphOP87ZK1b2eVWf+tpsoRCKbBw91lkc3pPFlmQ7Pk3MIPNuAIXqDcq9hHLpy1uTQA+h'
b'7X8T138R12UL+itlZLNXcG7B0w+2xKntIsQJ3wmFhQ9/BRgAwTqsFR2Vlj8AAAAASUVORK5C'
b'YII=')
#-----------------------------------------------------------------------------------------------------
application_get = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAiNJREFUeNqUUk1IVFEYPfe+O/PuPCefk0GLIFtMuW1VtOiP'
b'UILQInARRUUbC4oWRQsJJ6MGAmE2QrgJN61qo2KKjVazCBJJIlCmRUTkokULsVKb99O5b3xp'
b'RakXDt+73HfOPfd8n3A7hwa0slrCMMRGlhACi54/qAz5Tuse+EEAuU5yQFhS4ubARIvySKzX'
b'CtdHP0JRQaxBNj49KnQ3b4fhUiBERifQuCUNSxhrawhQwScMx3BVhWqHGjTRgI0uw1XBcngz'
b'M9O0J6Jw/oiLzhJIKBtSKGblwfcXsLMxCxqAin9L6RRCIf9K2pJJaFWL7mfHMVouomnXEdw4'
b'3L8sLVaC1xTQWv9CSjtIO7XIbNqKwos2+JUiRq4+Zh1DodQWcSTFJcJYIAnbjmHDcRy46Trc'
b'e3oCS94TXGzqwfPpK7jU3IMf3jDuF89QIAMVj4+2dZRu1bpF2zZu9bdiwR/EuYMdGCtfjs5M'
b'Pcv9o4k8ssnPKxlobUdtqVpTBDBP8ql9pzH5KQ+6jVpoMp6czePk3vNwdB9nJw5dWtEgrV7j'
b'b4FX5YcRqXChHVOzvdi9rR3XHvSyK310SVrC+vcAv7kb4mWOw0KBefkOsqZaDWW8g2eLr/mE'
b'73MlNzd8oOIHCMLfWyjZf8jN2J8Cluz3SLrV6vDmuq4phF/LJfOALOH+b+KOdqJL1OCYucA8'
b'OfyGoZHbyPFoTqxzancQ9av2X4gP5uOnAAMA9ZixG4ZCuckAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
brain_trainer = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAApZJREFUeNp0U1tIVFEUXefOmXGcUccZn4iPMbHyUWYiFlKp'
b'CX0EQRBBUJg9fvwJguonP5KgD01Io5/6SMqPCCIhIqiPyqSHD9IMxTA1ynR8zNg0M97n6dzR'
b'K+pMCzb33Mtea++z97qEMQYdhBAYuFlX1GSJtV1Kyd1mZRqDIAiYGvywTC0xLRfvDzfqOQYv'
b'fDBebp0rddxuqJAG33WwFYRYUPwWfuoY6ulgLacKfq/nUayDJIn36q61m802O4Y6T2Dyy0C4'
b'OtNUxBUfQfXJy4AqpbfStus8/arOEdYLOJLTjqVk5ON58wXMfv+KnGw7FBoHmyMezoUXeNZ8'
b'Gjv21UAS5XqDs0EgweXCzFg3MmJnIasU0wsUKU4TP5vxadyGJG2cZ2lwFxZlRBWQY51t6Vuy'
b'QJJ3wuxIg6bIPF+FhTIUZmuY8/F0zQemymucDTP40fu23jNShv3H60AoRU/XU0x9fo+sNAGB'
b'gIy4rdVAyMsnqEUKtJ7d1bT3YGV8amoC5MB8eK2Vhw7ArPiwGKQoripBZm4WIHqx5PUi8goa'
b'GneXF4CJfjDJD3X5D+S/iyivrYHAZPgXPNBCPqhL85jzeEcjBPonfA1PHnSByH4QLoLlpTBB'
b'8c+jprYU+W4Xb9+HmYkJBETtZcQVOl9PPixxJ94Z7h5A4fZM3hHT3QIGAmZUowIX+AlrDBWj'
b'DZGXFTw5LkuqEPRCCAvo82JQNLYiwk1lVSUIJlNi1C2Iitrz6NXo0QQbxdi0/5esME95nrP0'
b'cEkS9wKDJhC4bSrMlFYYHLLpZ8rhkWzo6da4caas/8oeu50Y06IEd/tCON/+kUR0wDG1Gmuw'
b'Etb7eCRUpa6u3sSF/EH1zf86iIY8Hombvvk4T/c1/gkwABOTKeN1Na44AAAAAElFTkSuQmCC')
#-----------------------------------------------------------------------------------------------------
delete = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAllJREFUeNqMU0toU1EQPfd9k4qF+mkg0lREUWqEWtQuWqOl'
b'2rpQ3HXjTl0IdlHowrVLF4ILCy7EjTs3KgrSoLaJdVGVCDVEix9sJNXGxmhsk5f3u9556bPR'
b'CHZguMzMOefN3HmX4S9L9HSMccc9zTkP+DnGmMFk6cahp5nz9ViBAfODZG90wLGs8bbD/Wjt'
b'2getpQVelQNmsYh86gU+TT6CrKqDsal0/A+B5MHoAJPk8ejZcwgEdZSnk7CzH2Dn56G0hqFE'
b'tqGpOwajUkX6+jVw1xmMPUnHfwtMdO/ke0dGIS/kUH54D65jgcky/Ba440CSVTQdOQEntAUv'
b'r1xG3/QsIwEp0bN7LHL0GNRCHuX4bRoYshaApGjCVe+kmPJUJxzhiUcfl1zXPRPq7ILx+D5k'
b'VyhWDbDKMlh5adVFTHmqE47wxCMBRUjrSj4Hs1rBhgcz+J8Vj3eC8MTzOoDEwLPvIUkK1mKE'
b'IzzxagJiNvbjO+TgujUJEI7wxPNG4IyZtmVpyvpmLJ/qB2ybFtzIJIKigHACD+J5AoumfSdf'
b'WhpqC20G03Tv58A/+LRRRhsSIp8XvoJ4K2lEJnqjc/v3dCAQ0Ol2ayINDTAxvwTDqOL5qwz6'
b'ptLtApeVRC07WSgNpzKzMEX7qq5D1TSoqgpFOJ1eLPJUJxzhibeyRuDi6+xNOu3UzNXtW9sR'
b'DofEZQXBeK1HxzAwn/uCdx/nkPj2c9jH+yP41ix8x60Duy5sUpWT4iI0cB/BzEXLvjv07M0l'
b'EbwVXmp4jXUWEb7RW/GqucILftv1z/mXAAMAUKIIFQm47kwAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
edit_button = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RTUzOTM3N0U3NkFC'
b'MTFFMkJEMUFDRjAzNEZEMkMwRTYiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RTUzOTM3'
b'N0Y3NkFCMTFFMkJEMUFDRjAzNEZEMkMwRTYiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDpFNTM5Mzc3Qzc2QUIxMUUyQkQxQUNGMDM0RkQyQzBFNiIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpFNTM5Mzc3RDc2QUIxMUUyQkQxQUNGMDM0RkQy'
b'QzBFNiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/PpLMCisAAAG5SURBVHjanFO7SgNRED37yOZFqnSCYiH4AQpiKxiV'
b'GDC2gn6HJiYGLNL4EVqJKBGDQop8gVgI1kFIYZVCQwzJPq4ze3fNTcDnwOzlLnNmzpmZq60c'
b'N3b7Q/fUcT0IgR9N0wDT0BG3jL1mIXOG5dKd+I8FOJguVWZr9egjvqoonRk6LjATB0KcHmI8'
b'uvM/T3EGEFsko8Dqfg0nV4/QdRkf4nQtTCAkwAucAwyqmowR+KCOVNxC46E1YhYmgMJgrDJF'
b'JC1grVBHzDJgE71mNY+BPS7R5FQOJIjd10xpE1R5o1RHNGJgaLu4reTQ7cseOAoFk09bkUDx'
b'iBN4s0yV6TIg8M1RDq8Eth0JsiclOJ7UzA2yTCB7eE2n7oNrZQlm6n6ThYyHmmDoSerRCHB3'
b'38b64hz6hLgs5fD2Pg5mH7pKD7SAAc85X6lhKp1CdmkeF8UtH8ya1Q31JXsjCZ8JZA8EXjpd'
b'ZBam0RvIqpPrrQfxmjoFzsi0zovb/s9+QFmoAw9McLxQpmBS53hhEtaoGsuB+O4x8ajlCpm9'
b'9lN5p4oK7/YvHqNf2KD9Zhxtin9Pk8/i7/ZMPet8CDAApqgNk/2TrwAAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
information = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAp5JREFUeNpkU01IVFEU/t6972fG1JHGrLBso2WRSAshonIj'
b'FrZQ27QoEQza6K6gXYS0KKhWuijIjbZw0xhRoEagUhRWUIJSWuL4A44OzDjhjOO89zrnzjwZ'
b'68Jh5pzvO+d857x7NfxzSq+Fem3b7XBd1+eSr7FpWkpKrW99oLUzn0schauzr22o0dC04ca6'
b'cpyrKUMw4CMGJwPr8RQmpiIYmVzGtuteWOtvGdlVgJP3B6zhW1dqEEvZeDcTxczKH2xuOQj4'
b'JY4dLER99V6U+CQeDk5hNb6linABwQV06tzdfgqf5hN4Nr6EudUkuluqsPS4Hjap+L2WRN/E'
b'ssKZx3xPuc4zd1ysxNivOL6E4yj060hnHAiRnc5nSewxpfrPuGkKMP+JE+qlUKdwHOd6+YEA'
b'vi4kiGjAZ+jK7r2ex8k7H+E3dVjk8y/jzGM+52XVQ7PC8TT8hlBdaSxkSPf3u6dV17P3P8OU'
b'WhYj33BcMJ/zGBe85XgqA8vSYRoSBhtJHpyMqAJCl9BzccaZx3wt9/1YAQVski1Ud2pAi3Oh'
b'S7VfmLpQJnM7yTa0WYFXwE07tmNaNB/NlS1AoJdg5jp7vhACmQwpoDzlO6no0MpKDAW0bR+T'
b'VUdtRyLPSLdQxRlnHvM5D7mbWlF69cVCU8MJFBVZ2KZP2FQdQHNtcOfKtg/MqmSDiiQSW3jz'
b'dhrrzy8foYsU1gkPZxbfd42OyZ5LDcdRTEVG5xJ4NR1Tl4iao9hv0BgCG5Q8OvYDzOc8TwGf'
b'4pLzN9vkoTM9dbWHcbSyDEUFJrzHlNhM4+dcBJPfFmEvfeiKjT/qp/DGrsfERciqgq1Pb2v+'
b'YDMt0/QA2l/aTUZfRkM3HpA7y8n/vca8U0HGCxB5MYcs6snOf85/BRgAU2QTxLlxCL0AAAAA'
b'SUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
lightning = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAk1JREFUeNqUU11IU2EYfr7zMzZHpbC1jChW3hRZMfuRCq+S'
b'TLEbu+pSCsIgL6RdWNhl1E1EQWk/4GXdBBGxfggbyFaa1bQkmS5jbLNtba7trJ2/r+8sFzp1'
b'0cM55z283/c8PN/7nEMopTBACEHw7tGDuq4OU00FFvog7OI4EF5klUddp4cY7RJPwCLk8vn2'
b'ja5dsO07C12VAE0CJ66DWsgi9KAXT0di51CGJQIC0dvM9i2Q05+Qn70BcCZYt7mRePsYUkGZ'
b'6u4fH6woIJpovdnmhJz6zCyvgVizn5lIIxMcRUvf2Am2JVMuwJVegneaXFW2mqImVcLMuhXi'
b'2h1IjnkQmM1ejaUKE1gBfx1Iv5SOaocTVE4AegFEqELcN4Cf03Hs3mx2T9064DaGSHihONQ/'
b'410kwM7fbnFsgK4lwQmWYm/94R44migoVBCaY5VDxNOPuWh2aGu5A1FEvdleCzX7DbzJChYn'
b'lMQTI19wPF+MLeb7gFg097yxZ6SLdpcJEI4g+uIhc6CBqjI2He+DMu8FRxUokoT4aACTwR8D'
b'xy4FrrDtM8tmUHfav9eo1045GzuanTcJOybPc1CyKqJeH7wT6d7O61+MGCOrxfjOeLS4qi+b'
b'7XY2oQS0vIzI6yEMT85fZOTbbDlV8TsoNgQ0W2p3Qkl9RfjVM5y/F2p95Eu+WYm8TGDmfsNJ'
b'qlFGDiPuf48Lg6E2Rn7JlhSsgiUCskq7TDwwF/B/337mYytrjVciLxMwCTik6TTDyEcWyP+G'
b'kW/p12RoYPee/+H9FmAA+Yf39yJOQikAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
separator = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6Q0ExQUNGRUY1OUI1'
b'MTFFMjkzOTlFMzVFOURFNDVCMTgiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6Q0ExQUNG'
b'RjA1OUI1MTFFMjkzOTlFMzVFOURFNDVCMTgiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDpDQTFBQ0ZFRDU5QjUxMUUyOTM5OUUzNUU5REU0NUIxOCIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpDQTFBQ0ZFRTU5QjUxMUUyOTM5OUUzNUU5REU0'
b'NUIxOCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/PrqlUgUAAAAtSURBVHjaYvz//z8DJYCJgUIwagAVDGBpnLCIonhk'
b'BGJjigwYTUjDwQCAAAMANs0I/5zY9nkAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
table_import = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAfhJREFUeNqkUz1MFEEUfjO7i3eQHGpnhZW5iCEk0NmYIIkS'
b'giYUGi2vR3qFy0EQEwoSKiVQWiGGggvFXWFjZw+ViUZjjMHjctwhs7P7+GZm71ilkbDJy3z7'
b'5v19b94TzEwX+fzrr6oWmDBhFG81VfQgjIliKETnRiTmDgv8BpKop8s78NuRYngY53rp/n9n'
b'7y3uXO4EMLFVlND5OIpEASSmYm2NSlcLVl38vU6lKwWTjeh2hUIk9dvOGiWAgjWcP1glFpIk'
b'Ss3KkBZqb6ze4HlgYzeXOPoaUWI0sqUi0ovjJKY26cfyJIxc4I3qHj25m7cBDH48krd3zp9J'
b'7jcV77cUN1qKvh5DuTJJ155tUK4LTQKLADVe8pz4yZnxTvsgOPWOX1qu4X1ZHNPvaX3iFmnc'
b'6oSaBCeDjUyP9VP2RZnou2L+Bvl8xPxLObFBpzYZ1Hi1usuHOI28ruxyA2ctcjaZ59vs35gr'
b'2yc8Upp46aErC30wVOygQDJJhYFwWMsUBciQAbnZ8qd6acw6v3s0QH907IYJZZsEaQoRnrEw'
b'ctNSEH2LFdvtYx2hmSGFL8f/Gpa3H/bo6Z38GWy+bgSQ7TJ+ztwb7g68M9OWdkhjS8mTroLO'
b'LoS63tScM4PS2QWRXKaw3QU49/iiIf5JOAjxzrGMkbjoOp8IMAChbgwty8w4LQAAAABJRU5E'
b'rkJggg==')
#-----------------------------------------------------------------------------------------------------
table_lightning = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAntJREFUeNqcU0tIVFEY/s65dx45vmYzMdjTRUSiSREtamPp'
b'zCAU0cYekAWRm5bSQjQtXEatoiwQSpoiGoIeSjVaWU4JLcIeDlbKOOZkhTNM4+TMvXNP59x7'
b'nR5ESD/8nI///v93vu/cc4i78158LpsrVTRAYwCBCA5M9CcmhMBCAYdVSsRavU4UH+9l/xNi'
b'TlBSRWMG+dM6IFQPPPOhvXcKeO7V8zd8dxIYqtHbs5oxJsOcb5vtApUkyFyenSrojHfpdR0n'
b'Lug2rBJDR6IbHYKASQYBMxn215aDcqsWXg8Ew9hbu1av3+gfwx7PGmic+Howhn1et17XWJGp'
b'wAw7bxAEQoFMGWzUqEsSP7T4DFT/LjQQC7RRYJwrja62g3XvZHmCQPANJM4gSZRLpQj0hw3Z'
b'FjuGBwewqYSizLeb66V8IxkTN+8gNPqlNU/Q6Kvgv4jvzi34H4RxwGtYuPIois2uJOQlyyA7'
b'65Eea0ZqUkVUceLg6b5zdIFASNZT+CLGqmNKoXx6BceKDcjGeqBmCpCMZNCZqhdjs3kFV7kF'
b'cUlkbsPGD8I/wC0QbsdWjPnxx3DUNEKdSyCTsiDzXcH5wovQzm5n+d942FOhr0LSpYdhHNpm'
b'WLg8NA1VTeP9rduwlTqxfEslXFXl+Bx6jXcjH4OUmLeUmqljxhaEgarzqGz5gKJ1TShZWcZ7'
b'LIgE7qN9pApbW4abFn2VJ67tYCx5hk33eBiLDTLriZC+C3Gf7EvOqaxIyWk/HxMx3pDGQZbY'
b'ccTai+alT5BmBWj4ehRTzIVCkvk201ZXLFqrxX3BPyJyatWLuFYwW33sreeXco7nSywyNvJc'
b'/7cPPwQYAOJyPDBxd4URAAAAAElFTkSuQmCC')
#-----------------------------------------------------------------------------------------------------
table_refresh = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAqBJREFUeNqUk1tIVFEUhr+zzxnH8TZjQSKBVNRDiiFoEYZJ'
b'NIYlUZAvJthD0kP20OUpyvuUSGJCF0wsYeglJNLITDPELhamWYQEUVSYVGBp6jhNczntMzNp'
b'V6jF2Zx19lnrX/9e+19KoqNrwvXVb/MGIKCDgmHSCXtzfvBRgrsmAVERYtKkinjNSP5cuZn/'
b'NVtFly1W6Gheo6xhd3NAmGSZAOWTzVTGF8tNQeXEecoX7JGuhazOVtxeGKzWJS8zui4BCOeX'
b'fjqHUNUgPYsIUDvlRBWRWM0aJ2evIBQNv6LQsvs02dXGeZ7ix4emhxEK7ctQ5b5Jg/beMTLT'
b'vBxsTJAgMlYJdcKeCkPv9lGzs4yq1lSizEgGYYuUgUIGmlVwe57J5By2rIcliaFkyRafP7Se'
b'u6s4kF/EqXanPgdwuWdEVlOxRFjpGM7BngXTHjh7ab5x2Wth+VJIiltDbYuT7sNkzAHsyk0J'
b'MoiWDC72w+h7iAlksd/eyY7M6GBMZoVCenIU1U0DDPg/sL0+YUh8BzCLEP0vfh2PpNlcoNNQ'
b'eJsT3TFkVYU0YfSj/sIsvaU6iVod49PoSuSRa7rbkUdT94hkIIgwWYg2x2NSrTg6FNpKzpDf'
b'WMIh+8zcUR69zWPa1cfwKPPXWLwpJfg2KLU/cHHsuoJz71FuvChh4ypo6ImZ74X8XhxLCEAJ'
b'K1b8oLKyqzGUF2yjf9yBLgNX2GBlcuiWjPjRMVmkD+6USSGZVPGbTJ8c19kgxVKUtxqT9SGv'
b'XsPNez/HvNHaJJiSoSRWdU65fHqs1x8IDpPRB021IMQi1llSyM1eSFffR+57XkqVzsgkn0z3'
b'IQK+adSIOOMAaUaD/zY0W+sYnHDJUSkn45df/qSaW4//ZfDSw+uP9k2AAQBNZtnn2K837QAA'
b'AABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
table_relationship = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAoRJREFUeNqUU01LVFEYfs69586d22gTEgXuCsGCoEKpsPyI'
b'DGa0RUYhBNFPsDY1pqXCGBUEBlKUG1tEBC4blckSJDVEJSQZDSNxEVRoDeTXfL2959yZFqOb'
b'Hjicmfc87/dzhd3STwIKBCEELAPwecwxyzROunZgMVSL4s7o3GoiXZrMMJNcPiAgN8JB5MPf'
b'NlCx0xYwDfHPtppEabxjK9fA+DlgPIC2yBIwelobdRaOTvxMMLUtRYWux8RlPpfQMfAd+BCA'
b'bF95zJkIHiOF9l89aIdbIkEiTTuQgaX9qn0VqOzk4Py2b08QZfslwswXn9eIJCfpG5pDw5kD'
b'KLEBp3UQfqdI1BQcRyINrKdADxsfYWahCaqr7nGg8cRP1FfthvTy0Ew+0shA/VbIEGdHATaz'
b'zm3nm/FqogmcQwc4e/QCTJEAjwmiKzJLBkcweAPJdAbXggdhtYyivuiU4AB0ve4qJr48R3QG'
b'GLlNePp2hbmbnGQDydQmsJwiihPRk+g8/SYXCH2kwD1Q33Qt3X0NqrsPEs2T+m05nePHNN+w'
b'uWwvl+1IH/zZ1VQ6R3CxugTzP4YwFgP6b6LcEnH95uGy1VhNvm2lhJ43MfJahRiev4LYt2Go'
b'zTfUONz/Osam2fkGyjnRlLd1kDbCATyLzkJyy+qkuWXuxcWxO6AXX0EvF0HhYS77gZZa2d7O'
b'GFRX9q2I5uX4ve9i+pY5rR0qrkJ37wg8UknZLbuxC1Pv1/64ihNav8jxDcr+10PLw66OSTiW'
b'IUyRYsd1LIWqlbxpOylLb0tEf0Tq8DJh8XQckVmyhM02V9AKPok4B/GrVRNlSyH3KsNW8ILx'
b'Kc92WCXMJ4ptOvgv/BVgAO5JKpHTWRGVAAAAAElFTkSuQmCC')
#-----------------------------------------------------------------------------------------------------
table_row_delete = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAkZJREFUeNqkU99LFFEU/u6dOzOru07gWuBLD8UKRhBhCanR'
b'Q2w/CaQnISj/gP4DyV1pqaDSCiIM3Uef7KVIkfKhH7a1LSYRBYkZIlQkybbtuts4czt3Zn9I'
b'b9YHh/nmu+ece+6557LmxNRM/rfTYbuAKwEGBSJl9jdnjEHnQNDQ5nWNtwgVnB04js1iS2wy'
b'YpkM3FbbKjyPAi9OAKljiE0sAy+PelbjpE8STx323FXFKlKgHH/hxzC4pkFQeQFuI7E67Ok+'
b'vwtJa3pAR3xtDHHUTiVkOcOZ6A5wChYacG96Hj3RiKePP/qIniMtcEl/29uLbU/HgU+/IHUT'
b'LvnzSiqjDiANQpASavASKeOhEHT6GuTTlHqAtlsjSLeGUay38NMM0RGYn2D27DmEU/fBNIH9'
b'pC14TZdolw4yXaexeP4m2gwTpdsXsef6Hbzu60bAELUETRTcGU/A/v4Vnib9azO2NmMmEcOh'
b'0RHMMQ7NakQpOYRd/Zfw5uplKVC7YehLi+CfF1BJqirQikUvl1n2M4INkK6D9fya30TlpJDt'
b'PIXHySSY0ME482WV4Mkz5A52Y+xdHjsp0KgLorD3AD4MJdA1u7wPgb6HUmGdzCZzyUYz32QF'
b'yfSXKp+ONMr84IB81RqWuJaRwcE01F5eBZpXjn8nem61OnEin63yQsdJpG9cQfv7FQQKWVil'
b'HJjVPyH/aZTjU7BMzkQ9d5Zorrfbjlt7TKz8hjZMnEeouaq/OtUd1JDj6p9Wdm9o8mbgkM0x'
b'KSX+B38EGAC2m+wilWllXQAAAABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
table_row_insert = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAhtJREFUeNqkU01oE0EU/mZ/0nSjpqJgCQiCRSkePGjpqWKg'
b'pRYqHkUwQsFb9SgIapMahUKPRTyoh4BC9SA9mFjFFukiFq1XsfUHrSK2NJRWIpLN7HPm7SZp'
b'vFnfMjPfvN33zffevBVEhMT1p+9KZbnf8wGfAAFtCoTobyyEgG0AsYj5Hppg21CBNmM6ztCc'
b'+mS2F33Ay+PAbB/ShW9q7eXRgPOL6rskgjilRiuIXZmkUvYYhvOfIAwtr1kp/Q1JAa8pSOEg'
b'BQEfnvSR6W+Dc7kAVmAaUX75+PVezC8ewkBvK5ogkerewyNCdcz+nrZaPZjAtoCj1wTuDNzA'
b'56VVxJSv2Xaw1QYPJ7K9ju04tthhvNJj6aXDTmL01BjefBmEygATrsSrDynk3Cn+TPtybjVE'
b'lWC+C/cGZ9RGEfSPCsqezGDu+3kYTUqUyjvVZeLm9BRyZy9i7ddXTRGe6CPu7MaZWyMhmQFL'
b'kaBUKYKiG646tI/0EEuVhQbfLtrXsLceXcBhKcfmzp04jaJ5l50Tsz4OJLoxfPsZc+pDiKpl'
b'W0B765EQ++BG2nn1OSWzoPtve6gzDW6ScfdHrWEeuKs1PD6zUsPRS3niIpYlMJ0hdAwJtCda'
b'mF1661ow43J5Wc2B3/OKat4R1oACBfH05KZbmRU4pr/WknkSr0idl3rI5xWE+t1twHprmwZi'
b'lvhZrftBXVD8u0n+F/7H/ggwAEYOV/C7+oSSAAAAAElFTkSuQmCC')
#-----------------------------------------------------------------------------------------------------
update = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAtBJREFUeNp8k21IU1EYx//nbnfTzU0z05aucgMDCc2KhAiE'
b'4VwUgRRG36IP9aEIDIOkYWA6JAqiD0F9KJJAAulrbXNZoGFi9g4VimZrvrR82ft27+69nXt1'
b'NkM83Hs49zzP+d3zf16IJEkghGCjUe/ySqyKIMkJPpWKtPqcjtGMjcgAR5ePF0VRLYgS6AOG'
b'8tQqBn3OBoVs6/BIl4/XYHI2hJefA1iIpHr72xwnZRsjT96r9tqK0oLwhWNVaKGOLSf2Ii2K'
b'qzeQgQ+H/PidlHDGXglLiaHJ1um5uwqg411gIWr0fQ2iyz2Gtz8WAemfBIYSQqEYXk8sont4'
b'Gg37y2HQqs/Xuzw7FACltddWmJCCCjotC3OhXjmUGd+f3W9Mzkz2MNEIopyAgYkQ6qrKIKTF'
b'm8s3EMUreUY95qI8WC6JHt/HMSYrsFODT90Dt063xoP+HsJx+BnmodXpaARJ0zKAEG2II8hh'
b'NfTqIkbuNZ+i51NZiZDX/vePrzlFLoUcrQZhgQGhOhWA/K+lpIC8XA34RBLhwNiHV66mQ9mp'
b'dHT1qWJB/1QqHkceBSzRgJJMEOXFnyRQnK9FWUk+zbtHxcVCo9kAmu6wrcOdY95SoPhFeAJZ'
b'5YoEeSEiwgFbC40QBPEG1uQBoDWi21ZkCFlLN2E+AfA8T6HgFAAfW/ImYzEQRg1ruQkFhtxL'
b'tuvu23aX15oB0EJDo20PazaboNGokYhGEZ//9UgBBEaed85Pz4JlVfBHgaN11aiuNDfTQ+NZ'
b'GvApKMAfA002EJyZxdCds+0KYNzXPZxYnOud/PINsbSEQILB7l1WWfeaYgKrRYrRwFykh8W0'
b'OUy3p0lWM22va33yQF9YXF9m2YktpmK86R/Ei7bDitHu8kiyDEluFurPMIwMTZP/utFSceTc'
b'QVON/SKbazgg7/c5HRnjvnUaVSLrtDMtMZTS17jyPbpRq/8VYABm7ivbCz/37QAAAABJRU5E'
b'rkJggg==')
#-----------------------------------------------------------------------------------------------------
watch_window = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RDQ2OENGNTYxMDdD'
b'MTFFM0FGOUJCRTk2Mzc1ODMxMjgiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RDQ2OENG'
b'NTcxMDdDMTFFM0FGOUJCRTk2Mzc1ODMxMjgiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDpENDY4Q0Y1NDEwN0MxMUUzQUY5QkJFOTYzNzU4MzEyOCIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpENDY4Q0Y1NTEwN0MxMUUzQUY5QkJFOTYzNzU4'
b'MzEyOCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/PnZsAM4AAAKZSURBVHjajFNbS1RRFP72OfuMMzqOTk5EIDqZQ0Ez'
b'yNB46SXEsNGgMYp66WdUEoOUhZaO4m+IHnqT8PLSdJGgZtIoG80KHEMjKMHRYCTyXFv7xDEf'
b'urhhs1l7rfWd7/vWPqy6P72hGWalZVnYzWKMQZGlb7LE/CLmorkv0QTDNCH9p9mkLUsSesZn'
b'KiUmExgB6NRY5ea4nP4ETgjsL82Cn04IwydrIHpckAUfAWDB71ZwKOCFzGCj7iC8DSgADFHr'
b'cdlABmVMxkkCBa21btq12O1SqUchBsniOLjjXX5xEZBkmKYBYShjEj4uLaKwtmazEHeBQADx'
b'zlPQyS2Vvv6DKZAc71/OTCM1OACfrwIry8soKyvDemEdsaYWRBubcZTOQqFg1wqAZPE+NCZb'
b'28Z3nTkLhXN4vV7EGpvg8ZTauhVFgYu24nL9NpTY6STBFCY6l5MTY6RARm72NbZUFeEjEQSD'
b'QWQzz7cbReyYa9rCdgDEOzrwLJNFsK4OmqZi8/sm6utDCEciqPD5sEZeGLpu11ZbGzaAYMid'
b'sT1MPwAnBiNDKSrUYJJpnCtobWtDItGFifExzM/NwdJVXHQp0FxuizETXJF+IZyOt2M6m0Wk'
b'/QL2hxrsyX/+MIvixhLu3b2DEi4jcuIcqg9HIZEHX/JzeP+UZJdyViy/kUZ4ZAol9LiCkUZE'
b'D3ixz+/BwYYWLORySC5X4W3uDeoojgbLsddfgtpwjEZOz//rtc7jm73xWP7m+ZimG5mFJ6NY'
b'JXFyBcfC1Cg0Xc+sXE/ExPmO4lWh28cx/3gUqqq9YjW3H2GLHsWl4iS6+4dDV7uvvLBMc4/t'
b'tSStD6SGmnt7kvnevlv1lJt2cqDcYGro2J/+HWGAMx1he+5fuZ8CDAD/Sv46ZbrYXQAAAABJ'
b'RU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
image = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAexJREFUeNqUU8tuEzEUPfY4nsyrSQNqWfR/WNEdK/b0D2CN'
b'1C76CZQlYsOeil/gD9i0QuIhhRaFJnSSTmZsju28JkIIrnRsj33vuefeqxGPT9+/rBrztDYA'
b'BNpmV0vrTklAq+js7bOHRzg8Obe31tpr4uoP+L4F5+f8D0/ecbVQNRlTEs9ESC/SDCIjdgrI'
b'NPe7yLgXub9XWkO/OoNXTFN2obtx6noDyN1diB6d+wVQZEBCsm7qgZhw8jcKU25Zld+n44Oc'
b'JEUgcUoS3mkGdlxwTKmVT4ZNAscmuhJiL4XcS0iQUDqRJiGz4t5J4HPbptVW5cKdAnmfDgMS'
b'DOjs9l7oBWIq0jk9iUayArNNEOSLgSOIIe4xmBAsA9UNzPUFmkmJ5uIS5scd5OUIxfMXDBBr'
b'Ak/iJPe6zEzJegpz9ZHEVXiMDOT+Dp1mED8ny4gFAfVE7jPXRApbfqXUG6pgw9wY55Q8qmFM'
b'BVHOeae8/3IO6xK6Gmb0mSSU2aeaImI5ESwT2hmH3SEkg6QJue1GE/0ov32CmfH9gI27nQIl'
b'FcQdnmvgVwUzvoMdT/0ZLQU2HLIPJUzraW1RGKA3ufonFgRKbnXzH20VNx8P3zw6Pn/SGLMk'
b'/au56UVSop4MXy/7d0Ds4/9tyL/xy28BBgAMXMtNyzRrZwAAAABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
folders = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAA2NJREFUeNrEVztvE0EQnt3bteMYiSBFoiOCBhqgSBpeVUqQ'
b'gIYKgaCBIkhU8BegJBQGIagiAT2BDhDIoolIHIGCEA+FFCgmjiAJ+HyvZfbB2efn3cnIK63s'
b'27ub+Wbmm2/3iBACBjkoDHgQnLxUmPwhfGe7CAK8FJHbhFLwgT0Zn3pxGhe8fgNgOPPS+cHL'
b'D9A82hckCo9ZUCpcOIH/cjg3/weAjIrcc8H5cAPjt4xnOXzIjp0Dr2bD21uHNgghDciaUimX'
b'qAW+sGYxW6fiZouF/wIXBM2iJa6tyUoIHwJ7DcanHmqflOh7lEB7BBwWC2ePJ8lWHYCwMQCu'
b'lwjVXBAY0fornC+Vf7m8vPAJ1lfKbQFQxoFlhuXVUEIAMmIPKEcAlDelWIQBfp1bgpF9Z2DP'
b'yaO4LEzUDc/h+6W7F9vXqCsAaYi4sLLwGdaWy02dYMwjOXcdGIORUSxupYhZeRPxQ7Dkmb3X'
b'QHdSMhIaRXDR+XeYuPoY+eBHu0EXAPzyUwTiQrDxDggblk1qYhcKAAS1VF1gSuDqyH0HvC/T'
b'uhsaUkyQANRiShdoLq/fIWF6JDwMwlP302VAdQ1aDP4g93KGjA0AUA+Wns9B9eeGrn+0QOrV'
b'nft9BMBSAiAGgKiiM0nEuh7I/qecovNfMHFlxpToX2m0c7AsmC+cByszlLIE4IYZoNgJQm0T'
b'dQAERUZF7qNgfbzZVrAC11FX89OHV+MKVlMJZEBIJIuHxAvfwvqrzAuni2A9SixYDQB849AG'
b'IgGIRgJKY+ZRZHo/BateAouY3bmmOECaUXNm1vorWAoAkaliLiLDumblSr5112YUM0OVXvRT'
b'sCQAUdnyiot3Zo6wbBZK9+5DEIjWc4GqAlOK2VOwVmfRgYOC9R5B5+qaIbMhpGDZkS6wJ6+/'
b'voS/o1FdaBl8/vaxZ5qsnQXLsFCD5kOtZRI0opjSYRXnN5zlHpvIDn27u2BFgUCrYCmtscP7'
b'zFjcjH3a6SJYvUd7AAmPkJ0FKw4AosrghAxjibevboIVK4ZAERnSA+gsWLH8Exm7kxZAD8GK'
b'/TXihmeJRAB6C1bMkbHCc0MiAILQLRSsbZ0FK14WKQZS+e0XIWEBR3Du1nqQLvNNTF6T+pPE'
b'EDc5z/Thg0goFmsRHPDH6aA/z/8KMAAV2HjZKbRagAAAAABJRU5ErkJggg==')
#-----------------------------------------------------------------------------------------------------
setting_tools = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAACK1JREFUeNqsVwtQVOcVPnvv7rIg4b0IiCCIyvKoCtSmUbFq'
b'rXHU6sZHaMdBbWo6mWkTjaEq+EzAIRaTMSapnThl8rBRUTCdxrRNwmNRpKCTovJeQYF9L7vs'
b'7t33q+e/uZusiMi0vTNndvfe//7fd17f+ZcHU7y2bd8RmZiQeEIkEhUnJCSIkmckwbRp04Cm'
b'aZDfuwc9Pb2yk394cyUu9R19vWIvRVG7vV5vklgsBo1GAzwePvB6a+0OxwFc14/r/GRf3hSw'
b'6QNlh/4oEAh2STdugPm5OcCjKFAoldDa+i/8VEFfX1/je6dP7cC1hvLjb/YlJSQmLFu+FMRx'
b'ceD3+xGcB06nE/r778HX9Q2gUAxnnnn/vT5Cgv8k8H0Hymry8/IQez0LbGWsUN/YBLLmZujq'
b'7KxkGMtws6ypBde636io7Js3d25CYeES6Lh9F65fbwWMBGvx0+MhNzsLkpOT4IOzf+7B9SI0'
b'52QE6P2lB2vyC/Kkm6QbYXTUAAajERoQvLe3V/9W1YktuEaHZkZj9pbsf35GYhKCL0aCMvB6'
b'PIBRA7PZTDyGru4uaKivZ4NO0xTZP2wyAiTs6Hk+gm+AB0MjYLVZoalJBnK5XIHgW3HNEAk5'
b'mgtNGBERcZSEveN2JwtOUTRbHyaTCV4/emQNiVDQ/sbAF2pi8MM1PywokG56bgPcfzCEm4wB'
b'hXmMjY0BxmL5hgPXoNnQPGghmOvp8XFiUKvVbM4pigd8Pp8tPrwG0e4EGfltJQ/GR4B/8PCx'
b'2vy8hetJzgcG72MIGXC5nODxemHr5k1AU9S6iMhIqDpRKQ16T0CKzWyxAEEkBIj3QqEg2GPt'
b'RKEeH4FQHo/iwAcR3ITV6wCnwwl6/SgW3nXY9JyUbL6OrA1+kRDg03y2rb4F57M18KRGo8YV'
b'XXV+3gJQqtQwNmYGu8MFDocD7IQEtpFGq4Ov6htZMK6Kv7u8HpdqcHAA4rHvBQI+kkDj0+D3'
b'+SYlQOjxy49XXhGL49empsyEjNnpbN7Dw8MR2AkuNAcSIWlAYYGBgUHAZINSoag7dKiMdIIX'
b'LXb3nr11RUVFS6Ojo6FfLmfzLwoJATWKUHt7O9uKLCBbHxRUHi8PIQXM58K+9sVf72QX1Dc0'
b'wSCGnxRP6qw0BHaBx+1inw0NDYNIJGTTMGbQSZ+ifed3lx7a9kbpgTOr1q5fOm9uBvwb+59E'
b'CBWTJZGeng5ZEgn7WyCgQaVUw+W6ugGuDVkCQovF1FRzqXaZBBeGhorYwFgsDPZ7H+h0GvY3'
b'2TQyIgIWzJ8Pt9rbICsnGz4OX7Y55sjnUs8coPN/kAVtHZ1gt9vQ7Kz3MVFR7HtsgaIedPf0'
b'QHd3N+rB13uDU/AUWtrekn2nM+dJCmfPSQc3ei2XD0BvT3d9W1vrJQFfwMMo+NNSZm7ZuGHD'
b'8qi46fDqN27Ys3YltKkYaB9SQYpbBa9m+GBIo4fm6y2KyMjIGKyU0GlhYeiMhQXDom754E9n'
b'9uHXB1wbu3hcK0ahpSCJt2fPzihMTk6Gjo4OOFS2/2m8r+dEhJR0zNbNm4/1Ltq15rfPLoO7'
b'OjuoGQe4/RTcvDcI62g5+O42as+cPftLrjYoHFzhqA1WTi9MHHBAwPw0mV5EEsmNGy3XWyVZ'
b'ObkYrpmMxdRy7VpzNd5XBeQ2dEVxjHXFS1UlaxbDba0dVBY7uH1+8KBFhEeBbNgAjW+X/AzX'
b'3kdTEvIMwyg5DdBw0m0KVsXgJiVkotGS0CI48SCKR+LHi3np3dnp2Xn9Fet/DFflRlBaHCyw'
b'G/10+3mgwDkR5jXBJv2Xn+07fKQI33EEYfgfO3CCtYQLC8OBkzDZyf2Y37yThuDy8vXPsOAq'
b'xo7gwIJ7EFyJ4F79AHy1LRskknmZs+LFmX+vb6idDHiiCEwoVLH7zhcvypxb/fLyhfDFOHAv'
b'gqtxTngMD+BUngBong9+lL8Adv61D9r1PjCZzVd0x9Zt4fIPU5Hih57FlJzbvnx+TnXZswvh'
b'ar+BzXkwuM5shGGtEiS3qv9p0KlBgAKz8cN2GKMi4C/bl8KyeWkb4w7WXRwX6cem4CHw6NfO'
b'Fa9YOL9694psuHDXwFa71/89uN5sAKV6BIwflhXdbv7yaphQMOtvguw0QXwa/H5VDvxDboFf'
b'LEgGtdUr0UlW5dpkFy5NlJKJUsCPLqu7+GJhgXRtbjJc7BxFcOcE4MNgvFD5vO1O002u4BLn'
b'vnPj5ic7noa3GrVgRS2ZFRUGW7Nj4FRDJzR29dfpy6UB6X5sBOjo0tqaPasXS3+amQgXu0ZB'
b'8zjwTyu22O42t+E76kCn8HJWFFzptaZ6qXAghx6H2wMjFg/8alEqaDAS2gkiQT/keWntJQJe'
b'mBEPNei5xjoxuLOrpcJU/8nnHLiT0xK/raX2RmjukmwPX5gaIopk42t3TU4iQIAWl9We3fWT'
b'RUUr0fNHwbEvGQsoVA/A2XntuP7T8o84gXIEeUNIuJkbn7WG5CyRuGlhqjD0ySQCBKbFrnnh'
b'YunqBVhwweB+1nMGDyQjygFw3JFV6M8f/5gDt48rqu8UdTISLyAJrc0n0WevzrM2na8JtKFo'
b'jjgKH+DOmPNvW+178Psj/eA2qGUIfo6TWCsHOP7yBhRUdXLnHkbRI9Nqh8GKwmtFJRg2OeBy'
b'txFKVmXhHwIBe6oKEKDUFhtQOJYIOLFgcM+oSqaq2v4Kl/PHgU9KwoYkbEhCybjgldpbwGOM'
b'TWTABVIQxiv4+ZasmXFRGjsfnLiFGY/hQ4p7qHKqZtXJHXtwzTA3lLxT+DflfyQd/JBUUVg0'
b'shqCEXmnTFNV/DI+Hw3oACYKMhLKLtfyRGEpeKJmj05e7fAXqlO7DnJDyThFcJhgwKUkvvbR'
b'aaE4+RmXbqRFVVX8O27PsQABMutj0WZwk5DiRqaRK7ixyfR8iiQCU9bM1RHrULASCrhzmpBT'
b'SB/XZvb/wvNH1JU7xgs4x+z/g0P/3+s/AgwAdGqB/H5XELMAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
add_package = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODI4RUQzMzU3NkNC'
b'MTFFMkE3QUQ5Q0UxRUYxQTkwNEEiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODI4RUQz'
b'MzY3NkNCMTFFMkE3QUQ5Q0UxRUYxQTkwNEEiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDo4MjhFRDMzMzc2Q0IxMUUyQTdBRDlDRTFFRjFBOTA0QSIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo4MjhFRDMzNDc2Q0IxMUUyQTdBRDlDRTFFRjFB'
b'OTA0QSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/PqOUi98AAAIcSURBVHjajJJNSFRRFMd/9+PNmwwNxZBCbS0kuIgs'
b'wSBoG7nRbbsg2pTYx6YWFSENQou2EW21oHaBqVFE2KKNUaJFHwyWFFI4zVjjvNs5T/u0yc7j'
b'/967557/uef87zETF/aSJAkhqbwD0ySgugXFiLGuz1oroQav5Er5S+g8eIYPz26J0/6Dn9DY'
b'dqB38trZQBRLHocNyfJw9+FBSvlxamo3sbG2rip0X+M0XnmEgJdX7/zzKZLQQlp+YB0LLEm8'
b'8nTljfQSxVnmp25KS5b1NAjSRlN7D8pTM2Pnu0OlXJZfm/ZvTPUEQUpWHSDBRRHWeeODiNh1'
b'6BKv7w0J31dNYL5fQrJM654BHl7pl3i5BS2pXFzApaevbcHIY8Uf4eSRfVvBl0rEiXgEIqIh'
b'fP2E804yujUJlBSbmOHCOGNLMxjpwExcpau+mccNebRmQrmA99HqDJjfTs+YiOuLd5mumWF/'
b'B+zaepzJtzmezOaJP3LZpgRN4KJV+B+IZJ11MaPFaba3QuEztDdcpFCAtpZUzyPpXZikiM8I'
b'OXIC/xMZRyR+FW+nnHysc2VIjsp3x5aTOkcrc+BDCbLZvyivLWRQacZnc9x+muP0vsC5O4YN'
b'kldHwUvY4qPR+7V6G38OoQ0ioOi/u6GZF3N5tjXCg1enxA8v56C0wA1VrCMVex3rGeJEXT19'
b'WraOipJHBug3/L+JjGz+Zf1e8OabAAMA4Yi/RrV3XFkAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
edit_package = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODlBN0YyMTE3NkNC'
b'MTFFMjhFOEJDNDUxRkU5OTMzQTIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODlBN0Yy'
b'MTI3NkNCMTFFMjhFOEJDNDUxRkU5OTMzQTIiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDo4OUE3RjIwRjc2Q0IxMUUyOEU4QkM0NTFGRTk5MzNBMiIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo4OUE3RjIxMDc2Q0IxMUUyOEU4QkM0NTFGRTk5'
b'MzNBMiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/Pi0WmboAAAJDSURBVHjapFNNaBNBFP5mdmJD8RdME5CoYKmIFArR'
b'qq1FwaJWBQvVgKgHD5YeBMGDB6MNie1NpYoeqngQhCKIxB9ENi2ItWADtdKggvHiQUIkKKEx'
b'iW4z49tJU6KkFXSWt8zO7Hvv+773HlNK4X8WK2+Gw9uT9OmpOKqy7GQq1d770lM+EfbLDG5V'
b'm4+dQ/r9Q/LnC/hLrNxw0G0Ww2p36JXOxIbDrfdau/v9mQ9PKD4HY/MjsOkySCxrOICxmwFC'
b'MuYRdOpPfYxDKm+J0V8lkSgkpgiMdGsKjHM4apxIxSOUnWMhDRRRUHIGdRv3QxYLJRFH+tpU'
b'0bJoyzX/+Sjoaqki5EwOvhMD9K+Ea90uJggKWroH8OnFZfIX1QPMOkMWUL83iHc3flAAS19x'
b'G5aV+wqDsttW/QEMCrB+TxiBa8+wtNGL7ytq8TrwQJGIDOpnBoYwCIHxhwZK8wah9O7oxdmr'
b'Jo76u3D36RDcWTea0nldNygrCyEcMAyjwigz3RlMwdvej4lLhxE4Xo/IYxM+XzMm82/RPNiz'
b'ieuMdgDDMWuiZERHcIVVO0OIXdyHls46LJ88jUMN3xB5FEW4p8uGOKE7kckcxCLHXHdrxQm2'
b'a9t5ZKIn0Ra4AJkhuFNnMBobwuDt5xgJdWCuD4TKA07nb/UGiU6scD/ugtdKoKNxMa6Pr8ap'
b'WyaysSukam15Fth0LDq6RDdJRdmUVcCWtWkkkha+JMex5jPHkeAdmH2d4KLGLvl0eRqb7Cr9'
b'wyQTRrz5JcAAGhHev5SFI9MAAAAASUVORK5CYII=')
#-----------------------------------------------------------------------------------------------------
delete_package = PyEmbeddedImage(
b'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAGXRFWHRTb2Z0d2FyZQBBZG9i'
b'ZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tl'
b'dCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1l'
b'dGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUu'
b'MC1jMDYxIDY0LjE0MDk0OSwgMjAxMC8xMi8wNy0xMDo1NzowMSAgICAgICAgIj4gPHJkZjpS'
b'REYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgt'
b'bnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8v'
b'bnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNv'
b'bS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEu'
b'MC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9w'
b'IENTNS4xIFdpbmRvd3MiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6ODYzRjE3Qzk3NkNC'
b'MTFFMjgwQjhGQjU5OTExODg3MjkiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6ODYzRjE3'
b'Q0E3NkNCMTFFMjgwQjhGQjU5OTExODg3MjkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6'
b'aW5zdGFuY2VJRD0ieG1wLmlpZDo4NjNGMTdDNzc2Q0IxMUUyODBCOEZCNTk5MTE4ODcyOSIg'
b'c3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo4NjNGMTdDODc2Q0IxMUUyODBCOEZCNTk5MTE4'
b'ODcyOSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94'
b'cGFja2V0IGVuZD0iciI/Pj9gWJ8AAAIeSURBVHjajFI9aFNRFP7uz0ueDXZS2qWFLoVSlCwF'
b'qUVMESxY0KVxcRbRLYhDB4UMOgmCDg52cZLWgkIXm0hAHfxBOoTSQWtpM5hoW2kaY2Nf7vHc'
b'2wcJpE09cODd++73nfN95wiEkU2PFAHRxYmDg2zOnLv9Lpm7m4glJnO/tb2ev3OKhq5MYn3p'
b'JeNlG7zBsYGLE/P1NDFY2SuVTZ+eHrl2b7Cy8gpe9AgiUZ8zum96kSiC8jf0j11HovPLwNNc'
b'4bkG0UTpax6GeuDaJxwShB1+z7gkHy5rISVX9lHKv4Bw7bf3gFhG14lLsDgbmlFYnJviT+n0'
b'C3EwARE5H9ZXp6A8b4+AjMHw1QdYfXOf8botwV4HAXrP3MT7J6mQgBl3q5tQrvo+EpjQXSvp'
b'miSSCOplGC1CCSRAf7egtOKHqpVAMkFEovK6gNqnH/zcYAspdPR3NDyg3Qq09sIdaCLgT+FJ'
b'bGfXECsYnBw7C3i8OkGAlcUlfL4w9Ei7V5ZAea1LxOTSU6h++I7B4TjoF3dgAgil0dfTjY3N'
b'jRtuE4WpQke8Rtnm9qMsSxDE6DhwPtn4m5kGLeTh9kDTH8D3W023EiLKVQxmHoOePYSpG0g2'
b'VPAYpVKwErY/Zt4etdOgFjz7wx7E2LDlUhV9nSzJjpJJlssBirVg1nYUt0M6bIHnRuO3umN+'
b'kkKRxZ3a7HhmISXw/9HLebzp/JNz7Z8AAwDLYLxgFHNxHwAAAABJRU5ErkJggg==') | PypiClean |
/HolmesIV-2021.9.8a1.tar.gz/HolmesIV-2021.9.8a1/mycroft/skills/fallback_skill.py | import operator
from mycroft.metrics import report_timing, Stopwatch
from mycroft.util.log import LOG
from mycroft.skills.mycroft_skill import MycroftSkill, get_handler_name
class FallbackSkill(MycroftSkill):
"""Fallbacks come into play when no skill matches an Adapt or closely with
a Padatious intent. All Fallback skills work together to give them a
view of the user's utterance. Fallback handlers are called in an order
determined the priority provided when the the handler is registered.
======== ======== ================================================
Priority Who? Purpose
======== ======== ================================================
1-4 RESERVED Unused for now, slot for pre-Padatious if needed
5 MYCROFT Padatious near match (conf > 0.8)
6-88 USER General
89 MYCROFT Padatious loose match (conf > 0.5)
90-99 USER Uncaught intents
100+ MYCROFT Fallback Unknown or other future use
======== ======== ================================================
Handlers with the numerically lowest priority are invoked first.
Multiple fallbacks can exist at the same priority, but no order is
guaranteed.
A Fallback can either observe or consume an utterance. A consumed
utterance will not be see by any other Fallback handlers.
"""
fallback_handlers = {}
wrapper_map = [] # Map containing (handler, wrapper) tuples
def __init__(self, name=None, bus=None, use_settings=True):
super().__init__(name, bus, use_settings)
# list of fallback handlers registered by this instance
self.instance_fallback_handlers = []
@classmethod
def make_intent_failure_handler(cls, bus):
"""Goes through all fallback handlers until one returns True"""
def handler(message):
start, stop = message.data.get('fallback_range', (0, 101))
# indicate fallback handling start
LOG.debug('Checking fallbacks in range '
'{} - {}'.format(start, stop))
bus.emit(message.forward("mycroft.skill.handler.start",
data={'handler': "fallback"}))
stopwatch = Stopwatch()
handler_name = None
with stopwatch:
sorted_handlers = sorted(cls.fallback_handlers.items(),
key=operator.itemgetter(0))
handlers = [f[1] for f in sorted_handlers
if start <= f[0] < stop]
for handler in handlers:
try:
if handler(message):
# indicate completion
status = True
handler_name = get_handler_name(handler)
bus.emit(message.forward(
'mycroft.skill.handler.complete',
data={'handler': "fallback",
"fallback_handler": handler_name}))
break
except Exception:
LOG.exception('Exception in fallback.')
else:
status = False
# indicate completion with exception
warning = 'No fallback could handle intent.'
bus.emit(message.forward('mycroft.skill.handler.complete',
data={'handler': "fallback",
'exception': warning}))
# return if the utterance was handled to the caller
bus.emit(message.response(data={'handled': status}))
# Send timing metric
if message.context.get('ident'):
ident = message.context['ident']
report_timing(ident, 'fallback_handler', stopwatch,
{'handler': handler_name})
return handler
@classmethod
def _register_fallback(cls, handler, wrapper, priority):
"""Register a function to be called as a general info fallback
Fallback should receive message and return
a boolean (True if succeeded or False if failed)
Lower priority gets run first
0 for high priority 100 for low priority
Args:
handler (callable): original handler, used as a reference when
removing
wrapper (callable): wrapped version of handler
priority (int): fallback priority
"""
while priority in cls.fallback_handlers:
priority += 1
cls.fallback_handlers[priority] = wrapper
cls.wrapper_map.append((handler, wrapper))
def register_fallback(self, handler, priority):
"""Register a fallback with the list of fallback handlers and with the
list of handlers registered by this instance
"""
def wrapper(*args, **kwargs):
if handler(*args, **kwargs):
self.make_active()
return True
return False
self.instance_fallback_handlers.append(handler)
self._register_fallback(handler, wrapper, priority)
@classmethod
def _remove_registered_handler(cls, wrapper_to_del):
"""Remove a registered wrapper.
Args:
wrapper_to_del (callable): wrapped handler to be removed
Returns:
(bool) True if one or more handlers were removed, otherwise False.
"""
found_handler = False
for priority, handler in list(cls.fallback_handlers.items()):
if handler == wrapper_to_del:
found_handler = True
del cls.fallback_handlers[priority]
if not found_handler:
LOG.warning('No fallback matching {}'.format(wrapper_to_del))
return found_handler
@classmethod
def remove_fallback(cls, handler_to_del):
"""Remove a fallback handler.
Args:
handler_to_del: reference to handler
Returns:
(bool) True if at least one handler was removed, otherwise False
"""
# Find wrapper from handler or wrapper
wrapper_to_del = None
for h, w in cls.wrapper_map:
if handler_to_del in (h, w):
wrapper_to_del = w
break
if wrapper_to_del:
cls.wrapper_map.remove((h, w))
remove_ok = cls._remove_registered_handler(wrapper_to_del)
else:
LOG.warning('Could not find matching fallback handler')
remove_ok = False
return remove_ok
def remove_instance_handlers(self):
"""Remove all fallback handlers registered by the fallback skill."""
self.log.info('Removing all handlers...')
while len(self.instance_fallback_handlers):
handler = self.instance_fallback_handlers.pop()
self.remove_fallback(handler)
def default_shutdown(self):
"""Remove all registered handlers and perform skill shutdown."""
self.remove_instance_handlers()
super(FallbackSkill, self).default_shutdown() | PypiClean |
/FFC-2017.1.0.tar.gz/FFC-2017.1.0/ffc/quadrature/tabulate_basis.py |
# Copyright (C) 2009-2014 Kristian B. Oelgaard
#
# This file is part of FFC.
#
# FFC is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FFC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with FFC. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Anders Logg, 2009, 2015
# Modified by Martin Sandve Alnæs, 2013-2014
"Quadrature representation class."
import numpy
import itertools
# UFL modules
from ufl.cell import num_cell_entities
from ufl.classes import ReferenceGrad, Grad, CellAvg, FacetAvg
from ufl.algorithms import extract_unique_elements, extract_type, extract_elements
from ufl import custom_integral_types
# FFC modules
from ffc.log import error
from ffc.utils import product, insert_nested_dict
from ffc.fiatinterface import create_element
from ffc.fiatinterface import map_facet_points, reference_cell_vertices
from ffc.representationutils import create_quadrature_points_and_weights
from ffc.representationutils import integral_type_to_entity_dim
def _find_element_derivatives(expr, elements, element_replace_map):
"Find the highest derivatives of given elements in expression."
# TODO: This is most likely not the best way to get the highest
# derivative of an element, but it works!
# Initialise dictionary of elements and the number of derivatives.
# (Note that elements are already mapped through the
# element_replace_map)
num_derivatives = dict((e, 0) for e in elements)
# Extract the derivatives from the integral.
derivatives = set(extract_type(expr, Grad)) | set(extract_type(expr,
ReferenceGrad))
# Loop derivatives and extract multiple derivatives.
for d in list(derivatives):
# After UFL has evaluated derivatives, only one element can be
# found inside any single Grad expression
elem, = extract_elements(d.ufl_operands[0])
elem = element_replace_map[elem]
# Set the number of derivatives to the highest value
# encountered so far.
num_derivatives[elem] = max(num_derivatives[elem],
len(extract_type(d, Grad)),
len(extract_type(d, ReferenceGrad)))
return num_derivatives
def _tabulate_empty_psi_table(tdim, deriv_order, element):
"Tabulate psi table when there are no points (custom integrals)."
# All combinations of partial derivatives up to given order
gdim = tdim # hack, consider passing gdim variable here
derivs = [d for d in itertools.product(*(gdim*[list(range(0, deriv_order + 1))]))]
derivs = [d for d in derivs if sum(d) <= deriv_order]
# Return empty table
table = {}
for d in derivs:
value_shape = element.value_shape()
if value_shape == ():
table[d] = [[]]
else:
value_size = product(value_shape)
table[d] = [[[] for c in range(value_size)]]
# Let entity be 0 even for non-cells, this is for
# custom integrals where we don't need tables to
# contain multiple entitites
entity = 0
return {entity: table}
def _map_entity_points(cellname, tdim, points, entity_dim, entity):
# Not sure if this is useful anywhere else than in _tabulate_psi_table!
if entity_dim == tdim:
assert entity == 0
return points
elif entity_dim == tdim-1:
return map_facet_points(points, entity)
elif entity_dim == 0:
return (reference_cell_vertices(cellname)[entity],)
def _tabulate_psi_table(integral_type, cellname, tdim,
element, deriv_order, points):
"Tabulate psi table for different integral types."
# Handle case when list of points is empty
if points is None:
return _tabulate_empty_psi_table(tdim, deriv_order, element)
# Otherwise, call FIAT to tabulate
entity_dim = integral_type_to_entity_dim(integral_type, tdim)
num_entities = num_cell_entities[cellname][entity_dim]
psi_table = {}
for entity in range(num_entities):
entity_points = _map_entity_points(cellname, tdim, points, entity_dim, entity)
psi_table[entity] = element.tabulate(deriv_order, entity_points)
return psi_table
# MSA: This function is in serious need for some refactoring and
# splitting up. Or perhaps I should just add a new
# implementation for uflacs, but I'd rather not have two versions
# to maintain.
def tabulate_basis(sorted_integrals, form_data, itg_data):
"Tabulate the basisfunctions and derivatives."
# MER: Note to newbies: this code assumes that each integral in
# the dictionary of sorted_integrals that enters here, has a
# unique number of quadrature points ...
# Initialise return values.
quadrature_rules = {}
psi_tables = {}
integrals = {}
avg_elements = {"cell": [], "facet": []}
# Get some useful variables in short form
integral_type = itg_data.integral_type
cell = itg_data.domain.ufl_cell()
cellname = cell.cellname()
tdim = itg_data.domain.topological_dimension()
entity_dim = integral_type_to_entity_dim(integral_type, tdim)
num_entities = num_cell_entities[cellname][entity_dim]
# Create canonical ordering of quadrature rules
rules = sorted(sorted_integrals.keys())
# Loop the quadrature points and tabulate the basis values.
for rule in rules:
scheme, degree = rule
# --------- Creating quadrature rule
# Make quadrature rule and get points and weights.
(points, weights) = create_quadrature_points_and_weights(integral_type,
cell, degree,
scheme)
# The TOTAL number of weights/points
num_points = None if weights is None else len(weights)
# Add points and rules to dictionary
if num_points in quadrature_rules:
error("This number of points is already present in the weight table: " + repr(quadrature_rules))
quadrature_rules[num_points] = (weights, points)
# --------- Store integral
# Add the integral with the number of points as a key to the
# return integrals.
integral = sorted_integrals[rule]
if num_points in integrals:
error("This number of points is already present in the integrals: " + repr(integrals))
integrals[num_points] = integral
# --------- Analyse UFL elements in integral
# Get all unique elements in integral.
ufl_elements = [form_data.element_replace_map[e]
for e in extract_unique_elements(integral)]
# Insert elements for x and J
domain = integral.ufl_domain() # FIXME: For all domains to be sure? Better to rewrite though.
x_element = domain.ufl_coordinate_element()
if x_element not in ufl_elements:
if integral_type in custom_integral_types:
# FIXME: Not yet implemented, in progress
# warning("Vector elements not yet supported in custom integrals so element for coordinate function x will not be generated.")
pass
else:
ufl_elements.append(x_element)
# Find all CellAvg and FacetAvg in integrals and extract
# elements
for avg, AvgType in (("cell", CellAvg), ("facet", FacetAvg)):
expressions = extract_type(integral, AvgType)
avg_elements[avg] = [form_data.element_replace_map[e]
for expr in expressions
for e in extract_unique_elements(expr)]
# Find the highest number of derivatives needed for each element
num_derivatives = _find_element_derivatives(integral.integrand(),
ufl_elements,
form_data.element_replace_map)
# Need at least 1 for the Jacobian
num_derivatives[x_element] = max(num_derivatives.get(x_element, 0), 1)
# --------- Evaluate FIAT elements in quadrature points and
# --------- store in tables
# Add the number of points to the psi tables dictionary
if num_points in psi_tables:
error("This number of points is already present in the psi table: " + repr(psi_tables))
psi_tables[num_points] = {}
# Loop FIAT elements and tabulate basis as usual.
for ufl_element in ufl_elements:
fiat_element = create_element(ufl_element)
# Tabulate table of basis functions and derivatives in
# points
psi_table = _tabulate_psi_table(integral_type, cellname, tdim,
fiat_element,
num_derivatives[ufl_element],
points)
# Insert table into dictionary based on UFL elements
# (None=not averaged)
avg = None
psi_tables[num_points][ufl_element] = { avg: psi_table }
# Loop over elements found in CellAvg and tabulate basis averages
num_points = 1
for avg in ("cell", "facet"):
# Doesn't matter if it's exterior or interior
if avg == "cell":
avg_integral_type = "cell"
elif avg == "facet":
avg_integral_type = "exterior_facet"
for element in avg_elements[avg]:
fiat_element = create_element(element)
# Make quadrature rule and get points and weights.
(points, weights) = create_quadrature_points_and_weights(avg_integral_type, cell, element.degree(), "default")
wsum = sum(weights)
# Tabulate table of basis functions and derivatives in
# points
entity_psi_tables = _tabulate_psi_table(avg_integral_type,
cellname, tdim,
fiat_element, 0, points)
rank = len(element.value_shape())
# Hack, duplicating table with per-cell values for each
# facet in the case of cell_avg(f) in a facet integral
if num_entities > len(entity_psi_tables):
assert len(entity_psi_tables) == 1
assert avg_integral_type == "cell"
assert "facet" in integral_type
v, = sorted(entity_psi_tables.values())
entity_psi_tables = dict((e, v) for e in range(num_entities))
for entity, deriv_table in sorted(entity_psi_tables.items()):
deriv, = sorted(deriv_table.keys()) # Not expecting derivatives of averages
psi_table = deriv_table[deriv]
if rank:
# Compute numeric integral
num_dofs, num_components, num_points = psi_table.shape
if num_points != len(weights):
error("Weights and table shape does not match.")
avg_psi_table = numpy.asarray([[[numpy.dot(psi_table[j, k, :], weights) / wsum]
for k in range(num_components)]
for j in range(num_dofs)])
else:
# Compute numeric integral
num_dofs, num_points = psi_table.shape
if num_points != len(weights):
error("Weights and table shape does not match.")
avg_psi_table = numpy.asarray([[numpy.dot(psi_table[j, :],
weights) / wsum] for j in range(num_dofs)])
# Insert table into dictionary based on UFL elements
insert_nested_dict(psi_tables, (num_points, element, avg,
entity, deriv), avg_psi_table)
return (integrals, psi_tables, quadrature_rules) | PypiClean |
/node_managment_application-0.0.1.tar.gz/node_managment_application-0.0.1/nms_api_script.py |
import urllib.parse
import requests
from nms_project_settings.settings import IP_ADD
def post_request(path, app_key, body_json):
headers = {
"Authorization": app_key}
response = requests.post(urllib.parse.urljoin(API_URL, path), params=body_json, headers=headers)
return response
def get_request(path, app_key, json_payload):
# Set the Content-Type header to indicate JSON data
headers = {"Content-Type": "application/json", "Authorization": app_key}
return requests.get(urllib.parse.urljoin(API_URL, path), params=json_payload, headers=headers)
def put_request(path, app_key, body_json):
headers = {
"Authorization": app_key}
response = requests.put(urllib.parse.urljoin(API_URL, path), params=body_json, headers=headers)
return response
def node_id_func(api_key):
response = post_request('get_node_id/', api_key, {})
return response.json().get("Node Id", "")
def api_calls():
app_key = 'AR12532DE@#GH&67GF24GH45532$##FGG'
node_id = node_id_func(app_key)
while True:
print("1. Setup project")
print("2. Start project app")
print("3. Stop project app")
print("4. Restart project app")
print("5. Get node ip")
print("6. Get node location")
print("7. Upgrade node")
print("8. Get system config")
print("9. App health status")
print("10. Join network")
print("11. Leave network")
print("12. Get log")
print("13. Get node id")
print("14. Get health check status")
print("15. Exit")
print('\n')
try:
choice = int(input("Enter your choice : "))
except:
print("Enter only number.")
break
if choice == 1:
response = post_request('setup_project/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 2:
response = get_request('start_project_app/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 3:
response = get_request('stop_project_app/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 4:
response = get_request('restart_project_app/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 5:
response = get_request('get_node_ip/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print("node ip address : " + response.json().get("Node Ip",""))
else:
print("Something went wrong.")
print('\n')
elif choice == 6:
response = get_request('get_node_location/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print("node geo location : " + response.json().get(
"Node Address", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 7:
response = put_request('upgrade_node/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 8:
response = get_request('get_system_config/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("data", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 9:
response = get_request('app_health_status/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("data", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 10:
response = get_request('join_network/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 11:
response = get_request('leave_network/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 110:
network_status = int(input("Enter network_status : "))
open_ports_protocol = input("Enter open_ports_protocol : ")
open_ports_port_number = int(input("Enter open_ports_port_number : "))
close_ports_protocol = input("Enter close_ports_protocol : ")
close_ports_port_number = int(input("Enter close_ports_port_number : "))
response = post_request('set_firewall_request/', app_key,
{"network_status": network_status, "node_id": node_id,
"open_ports_protocol": open_ports_protocol,
"open_ports_port_number": open_ports_port_number,
"close_ports_protocol": close_ports_protocol,
"close_ports_port_number": close_ports_port_number})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
dt = response.json().get("data", "")
if dt:
print('Firewall setup details : \n', dt['data'])
else:
print('Something went wrong.')
else:
print("Something went wrong.")
print('\n')
elif choice == 12:
response = get_request('get_log/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 13:
response = post_request('get_node_id/', app_key, {})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print("Node Id : ", response.json().get("Node Id", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 14:
response = post_request('check_health_status/', app_key, {"node_id": node_id})
print('-------------------------------------------------------')
if response.status_code in [200, 401]:
print(response.json().get("Message", ""))
else:
print("Something went wrong.")
print('\n')
elif choice == 15:
break
else:
print('-------------------------------------------------------')
print("Invalid Choice")
break
API_URL = f"http://{IP_ADD}:9000/nms_app/"
api_calls() | PypiClean |
/NeuroUnits-0.1.2.tar.gz/NeuroUnits-0.1.2/src/neurounits/visitors/common/ast_replace_node.py |
# -------------------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -------------------------------------------------------------------------------
from neurounits.visitors.bases.base_visitor import ASTVisitorBase
import itertools
from neurounits.ast.astobjects import ASTObject
from neurounits.ast.eqnset import Block
from neurounits.visitors.common.terminal_node_collector import EqnsetVisitorNodeCollector
from neurounits.units_misc import LookUpDict
class ReplaceNode(ASTVisitorBase):
@classmethod
def replace_and_check(cls, srcObj, dstObj, root):
root = ReplaceNode(srcObj, dstObj).visit(root)
if srcObj in EqnsetVisitorNodeCollector(root).all():
from .ast_node_connections import ASTAllConnections
print 'A node has not been completely removed: %s' % srcObj
print 'The following are still connected:'
for node in EqnsetVisitorNodeCollector(root).all():
conns = ASTAllConnections().visit(node)
if srcObj in conns:
print ' node:', node
print 'OK'
# Lets make sure its completely gone:
assert not srcObj in EqnsetVisitorNodeCollector(root).all()
def __init__(self, srcObj, dstObj):
self.srcObj = srcObj
self.dstObj = dstObj
def replace_or_visit(self, o):
assert isinstance(o, (ASTObject,Block)), 'Not replacing with an ASTObject!: [%s] %s' % (o, type(o) )
assert isinstance(self.srcObj, (ASTObject, Block)), 'Not replacing with an existing ASTObject!: [%s] %s' % (self.srcObj, type(self.srcObj) )
if o == self.srcObj:
return self.dstObj
else:
if 'symbol' in o.__dict__ and hasattr(self.srcObj,'symbol'):
assert not o.symbol == self.srcObj.symbol, 'Symbol: %s' % o.symbol
return self.visit(o)
def replace(self, o, ):
if o == self.srcObj:
return self.dstObj
return o
#def visit(self, o, **kwargs):
# return o.accept_visitor(self, **kwargs)
def _replace_within_new_lut(self, lut):
from neurounits.units_misc import LookUpDict
new_lut = LookUpDict()
new_lut.unique_attrs = lut.unique_attrs
new_lut.accepted_obj_types = lut.accepted_obj_types
for o in lut:
new_lut._add_item( self.replace_or_visit(o) )
return new_lut
def VisitEventPortConnection(self, o, **kwargs):
o.dst_port = self.replace_or_visit(o.dst_port)
o.src_port = self.replace_or_visit(o.src_port)
return o
def VisitCompoundPortConnectorWireMapping(self, o, **kwargs):
o.interface_port = self.replace_or_visit(o.interface_port)
o.component_port = self.replace_or_visit(o.component_port)
return o
def VisitInterfaceWireContinuous(self, o, **kwargs):
return o
def VisitInterfaceWireEvent(self, o, **kwargs):
return o
def VisitInterface(self, o, **kwarg):
o.connections = self._replace_within_new_lut(o.connections)
return o
def VisitCompoundPortConnector(self, o, **kwargs):
o.wire_mappings = self._replace_within_new_lut(o.wire_mappings)
o.interface_def = self.replace_or_visit(o.interface_def)
return o
def VisitLibrary(self, o, **kwargs):
o._eqn_assignment = self._replace_within_new_lut(o._eqn_assignment)
o._function_defs = self._replace_within_new_lut(o._function_defs)
o._symbolicconstants = self._replace_within_new_lut(o._symbolicconstants)
return o
def VisitNineMLComponent(self, o, **kwargs):
o._transitions_events = self._replace_within_new_lut(o._transitions_events)
o._transitions_conditiontriggers = self._replace_within_new_lut(o._transitions_conditiontriggers)
o._rt_graphs = self._replace_within_new_lut(o.rt_graphs)
o._eqn_assignment = self._replace_within_new_lut(o._eqn_assignment)
o._eqn_time_derivatives = self._replace_within_new_lut(o._eqn_time_derivatives)
o._function_defs = self._replace_within_new_lut(o._function_defs)
o._symbolicconstants = self._replace_within_new_lut(o._symbolicconstants)
o._interface_connectors = self._replace_within_new_lut(o._interface_connectors)
o._event_port_connections = self._replace_within_new_lut(o._event_port_connections)
o._time_node = self.replace_or_visit(o._time_node)
return o
def VisitRTGraph(self, o, **kwargs):
if o.default_regime:
assert o.default_regime in o.regimes
o.regimes = self._replace_within_new_lut(o.regimes)
if o.default_regime:
o.default_regime = self.replace_or_visit(o.default_regime)
assert o.default_regime in o.regimes
return o
def VisitRegime(self, o, **kwargs):
# This is not a parent, so lets prevenmt recursion:
o.parent_rt_graph = self.replace(o.parent_rt_graph)
return o
def VisitOnEvent(self, o, **kwargs):
o.parameters = dict([(pName, self.replace_or_visit(p))
for (pName, p) in o.parameters.iteritems()])
o.actions = [self.replace_or_visit(a, **kwargs) for a in
o.actions]
return o
def VisitOnEventStateAssignment(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitSymbolicConstant(self, o, **kwargs):
return o
def VisitIfThenElse(self, o, **kwargs):
o.predicate = self.replace_or_visit(o.predicate, **kwargs)
o.if_true_ast = self.replace_or_visit(o.if_true_ast, **kwargs)
o.if_false_ast = self.replace_or_visit(o.if_false_ast, **kwargs)
return o
def VisitInEquality(self, o, **kwargs):
o.lesser_than = self.replace_or_visit(o.lesser_than)
o.greater_than = self.replace_or_visit(o.greater_than)
return o
def VisitBoolAnd(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs, **kwargs)
o.rhs = self.replace_or_visit(o.rhs, **kwargs)
return o
def VisitBoolOr(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs, **kwargs)
o.rhs = self.replace_or_visit(o.rhs, **kwargs)
return o
def VisitBoolNot(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs, **kwargs)
return o
# Function Definitions:
def VisitFunctionDefUser(self, o, **kwargs):
o.parameters = dict([(pName, self.replace_or_visit(p))
for (pName, p) in o.parameters.iteritems()])
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitFunctionDefBuiltIn(self, o, **kwargs):
return o
def VisitFunctionDefParameter(self, o, **kwargs):
return o
# Random Variables:
def VisitRandomVariable(self, o, **kwargs):
o.parameters = LookUpDict([ self.replace_or_visit(p) for p in o.parameters])
return o
def VisitRandomVariableParameter(self, p, **kwargs):
p.rhs_ast = self.replace_or_visit(p.rhs_ast)
return p
def VisitAutoRegressiveModel(self, p, **kwargs):
return p
# Terminals:
def VisitStateVariable(self, o, **kwargs):
if o.initial_value:
o.initial_value = self.replace_or_visit(o.initial_value)
return o
def VisitParameter(self, o, **kwargs):
return o
def VisitConstant(self, o, **kwargs):
return o
def VisitConstantZero(self, o, **kwargs):
return o
def VisitAssignedVariable(self, o, **kwargs):
return o
def VisitSuppliedValue(self, o, **kwargs):
return o
def VisitTimeVariable(self, o, **kwargs):
return o
def VisitAnalogReducePort(self, o, **kwargs):
o.rhses = [self.replace_or_visit(rhs) for rhs in o.rhses]
return o
# AST Objects:
def VisitTimeDerivativeByRegime(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs_map = self.replace_or_visit(o.rhs_map)
return o
def VisitRegimeDispatchMap(self, o, **kwargs):
o.rhs_map = dict([(self.replace_or_visit(reg), self.replace_or_visit(rhs)) for (reg,rhs) in o.rhs_map.items()])
return o
def VisitEqnAssignmentByRegime(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs_map = self.replace_or_visit(o.rhs_map)
return o
def VisitAddOp(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitSubOp(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitMulOp(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitDivOp(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitExpOp(self, o, **kwargs):
o.lhs = self.replace_or_visit(o.lhs)
return o
def VisitFunctionDefBuiltInInstantiation(self, o, **kwargs):
o.parameters = dict([(pName, self.replace_or_visit(p)) for (pName, p) in o.parameters.iteritems()])
o.function_def = self.replace_or_visit(o.function_def)
return o
def VisitFunctionDefUserInstantiation(self, o, **kwargs):
o.parameters = dict([(pName, self.replace_or_visit(p)) for (pName, p) in o.parameters.iteritems()])
o.function_def = self.replace_or_visit(o.function_def)
return o
def VisitFunctionDefInstantiationParameter(self, o, **kwargs):
o.rhs_ast = self.replace_or_visit(o.rhs_ast)
return o
def VisitOnConditionTriggerTransition(self, o, **kwargs):
o.trigger = self.replace_or_visit(o.trigger)
o.actions = [self.replace_or_visit(a) for a in o.actions]
o.src_regime = self.replace_or_visit(o.src_regime)
o.target_regime = self.replace_or_visit(o.target_regime)
return o
def VisitOnTransitionEvent(self, o, **kwargs):
o.port = self.replace_or_visit(o.port)
o.parameters = self._replace_within_new_lut(o.parameters)
o.actions = [self.replace_or_visit(a) for a in o.actions]
o.src_regime = self.replace_or_visit(o.src_regime)
o.target_regime = self.replace_or_visit(o.target_regime)
return o
def VisitOnEventDefParameter(self, o, **kwargs):
return o
def VisitEmitEvent(self, o, **kwargs):
o.parameters = self._replace_within_new_lut(o.parameters)
o.port = self.replace_or_visit(o.port)
return o
def VisitEmitEventParameter(self, o, **kwargs):
o.port_parameter_obj = self.replace_or_visit(o.port_parameter_obj)
o.rhs = self.replace_or_visit(o.rhs)
return o
def VisitInEventPort(self, o, **kwargs):
o.parameters = self._replace_within_new_lut(o.parameters)
return o
def VisitInEventPortParameter(self, o, **kwargs):
return o
def VisitOutEventPort(self, o, **kwargs):
o.parameters = self._replace_within_new_lut(o.parameters)
return o
def VisitOutEventPortParameter(self, o, **kwargs):
return o | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_ru-by.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"\u0432\u043e\u0441\u043a\u0440\u0435\u0441\u0435\u043d\u044c\u0435",
"\u043f\u043e\u043d\u0435\u0434\u0435\u043b\u044c\u043d\u0438\u043a",
"\u0432\u0442\u043e\u0440\u043d\u0438\u043a",
"\u0441\u0440\u0435\u0434\u0430",
"\u0447\u0435\u0442\u0432\u0435\u0440\u0433",
"\u043f\u044f\u0442\u043d\u0438\u0446\u0430",
"\u0441\u0443\u0431\u0431\u043e\u0442\u0430"
],
"MONTH": [
"\u044f\u043d\u0432\u0430\u0440\u044f",
"\u0444\u0435\u0432\u0440\u0430\u043b\u044f",
"\u043c\u0430\u0440\u0442\u0430",
"\u0430\u043f\u0440\u0435\u043b\u044f",
"\u043c\u0430\u044f",
"\u0438\u044e\u043d\u044f",
"\u0438\u044e\u043b\u044f",
"\u0430\u0432\u0433\u0443\u0441\u0442\u0430",
"\u0441\u0435\u043d\u0442\u044f\u0431\u0440\u044f",
"\u043e\u043a\u0442\u044f\u0431\u0440\u044f",
"\u043d\u043e\u044f\u0431\u0440\u044f",
"\u0434\u0435\u043a\u0430\u0431\u0440\u044f"
],
"SHORTDAY": [
"\u0432\u0441",
"\u043f\u043d",
"\u0432\u0442",
"\u0441\u0440",
"\u0447\u0442",
"\u043f\u0442",
"\u0441\u0431"
],
"SHORTMONTH": [
"\u044f\u043d\u0432.",
"\u0444\u0435\u0432\u0440.",
"\u043c\u0430\u0440\u0442\u0430",
"\u0430\u043f\u0440.",
"\u043c\u0430\u044f",
"\u0438\u044e\u043d\u044f",
"\u0438\u044e\u043b\u044f",
"\u0430\u0432\u0433.",
"\u0441\u0435\u043d\u0442.",
"\u043e\u043a\u0442.",
"\u043d\u043e\u044f\u0431.",
"\u0434\u0435\u043a."
],
"fullDate": "EEEE, d MMMM y '\u0433'.",
"longDate": "d MMMM y '\u0433'.",
"medium": "d MMM y '\u0433'. H:mm:ss",
"mediumDate": "d MMM y '\u0433'.",
"mediumTime": "H:mm:ss",
"short": "dd.MM.yy H:mm",
"shortDate": "dd.MM.yy",
"shortTime": "H:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "BYR",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "ru-by",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (vf.v == 0 && i % 10 == 1 && i % 100 != 11) { return PLURAL_CATEGORY.ONE; } if (vf.v == 0 && i % 10 >= 2 && i % 10 <= 4 && (i % 100 < 12 || i % 100 > 14)) { return PLURAL_CATEGORY.FEW; } if (vf.v == 0 && i % 10 == 0 || vf.v == 0 && i % 10 >= 5 && i % 10 <= 9 || vf.v == 0 && i % 100 >= 11 && i % 100 <= 14) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/CTBN-0.0.1-py3-none-any.whl/ctbn/ctbn.py | import numpy as np
from copy import deepcopy
from .parameter_learning_models import LRModel
from .graph_preliminaries import DirectedGraph
from .msa import ChuLiuEdmonds
from .inference import JunctionTreeAlgorithm
PARAMETER_STATEGIES = {
"lr":LRModel
}
class Utils:
@staticmethod
def get_column_vector(array):
return np.expand_dims(array, axis=1)
@staticmethod
def get_row_vector(array):
return np.expand_dims(array, axis=0)
class CTBN:
def __init__(self, parameter_strategy="lr"):
self.parameter_strategy = parameter_strategy
self.optimal_graph = None
self.root_node = "root" #this is a special node to represent self-loops
def _generate_graph(self, X, Y):
num_nodes = Y.shape[1]
all_nodes = [self.root_node] + [str(i+1) for i in range(num_nodes)]
cpts = {n:{} for n in all_nodes}
graph_dict = {n:{} for n in all_nodes}
for i in range(num_nodes):
for j in range(num_nodes):
dst_node = str(j+1)
if i == j:
src_node = self.root_node
models, edge_weight = LRModel.get_cll(X, Y[:,j])
else:
src_node = str(i+1)
X_temp = np.hstack([
X, Utils.get_column_vector(Y[:,i])
])
models, edge_weight = LRModel.get_cll_with_parents(X_temp, Y[:,j])
cpts[src_node][dst_node] = models
graph_dict[src_node][dst_node] = {"weight":edge_weight}
return cpts, graph_dict
def _remove_root_node(self):
nodes = deepcopy(self.optimal_graph.nodes)
nodes.remove(self.root_node)
edges = [e.copy() for e in self.optimal_graph.edges if e.src != self.root_node]
return DirectedGraph(nodes, edges)
def fit(self, X, Y):
self.cpts, complete_graph_dict = self._generate_graph(X, Y)
graph = DirectedGraph.from_dict(complete_graph_dict)
self.optimal_graph = ChuLiuEdmonds().find_msa(graph)
return
def _get_factors(self, X):
factors = {}
for edge in self.optimal_graph.edges:
models = self.cpts[edge.src][edge.dst]
if edge.src == self.root_node:
sample = Utils.get_row_vector(X)
factors[edge.dst] = models.predict_log_proba(sample).T
else:
factor = np.zeros((2,2))
for val in (0,1):
sample = Utils.get_row_vector(
np.hstack([X, [val]])
)
factor[val] = models[val].predict_log_proba(sample)
factors[edge.dst] = factor
return factors
def predict(self, X):
assert self.optimal_graph, "The model is not fit to the dataset yet. Please run the fit method."
factors = self._get_factors(X)
graph = self._remove_root_node()
max_prob, prediction = JunctionTreeAlgorithm(graph, factors).find_max_prob_assignment()
return max_prob, prediction | PypiClean |
/ESMValCore-2.9.0rc1.tar.gz/ESMValCore-2.9.0rc1/esmvalcore/esgf/_search.py | import itertools
import logging
from functools import lru_cache
import pyesgf.search
import requests.exceptions
from ..config._esgf_pyclient import get_esgf_config
from ..local import (
_get_start_end_date,
_parse_period,
_replace_years_with_timerange,
_truncate_dates,
)
from ._download import ESGFFile
from .facets import DATASET_MAP, FACETS
logger = logging.getLogger(__name__)
def get_esgf_facets(variable):
"""Translate variable to facets for searching on ESGF."""
project = variable.get('project', '')
facets = {'project': project}
for our_name, esgf_name in FACETS[project].items():
if our_name in variable:
values = variable[our_name]
if values == '*':
# Wildcards can be specified on ESGF by omitting the facet
continue
if isinstance(values, (tuple, list)):
values = list(values)
else:
values = [values]
for i, value in enumerate(values):
if our_name == 'dataset':
# Replace dataset name by ESGF name for dataset
values[i] = DATASET_MAP[project].get(value, value)
facets[esgf_name] = ','.join(values)
return facets
def select_latest_versions(files, versions):
"""Select only the latest version of files."""
result = []
def same_file(file):
"""Return a versionless identifier for a file."""
# Dataset without the version number
dataset = file.dataset.rsplit('.', 1)[0]
return (dataset, file.name)
if isinstance(versions, str):
versions = (versions, )
files = sorted(files, key=same_file)
for _, group in itertools.groupby(files, key=same_file):
group = sorted(group, reverse=True)
if versions:
selection = [f for f in group if f.facets['version'] in versions]
if not selection:
raise FileNotFoundError(
f"Requested versions {', '.join(versions)} of file not "
f"found. Available files: {group}")
group = selection
latest_version = group[0]
result.append(latest_version)
if len(group) > 1:
logger.debug("Only using the latest version %s, not %s",
latest_version, group[1:])
return result
FIRST_ONLINE_INDEX_NODE = None
"""Remember the first index node that is online."""
def _search_index_nodes(facets):
"""Search for files on ESGF.
Parameters
----------
facets: :obj:`dict` of :obj:`str`
Facets to constrain the search.
Raises
------
FileNotFoundError
If the function was unable to connect to ESGF.
Returns
-------
pyesgf.search.results.ResultSet
A ResultSet containing :obj:`pyesgf.search.results.FileResult`s.
"""
cfg = get_esgf_config()
search_args = dict(cfg["search_connection"])
urls = search_args.pop("urls")
global FIRST_ONLINE_INDEX_NODE
if FIRST_ONLINE_INDEX_NODE:
urls.insert(0, urls.pop(urls.index(FIRST_ONLINE_INDEX_NODE)))
errors = []
for url in urls:
connection = pyesgf.search.SearchConnection(url=url, **search_args)
context = connection.new_context(
pyesgf.search.context.FileSearchContext,
**facets,
)
logger.debug("Searching %s for datasets using facets=%s", url, facets)
try:
results = context.search(
batch_size=500,
ignore_facet_check=True,
)
FIRST_ONLINE_INDEX_NODE = url
return list(results)
except (
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
requests.exceptions.Timeout,
) as error:
logger.debug("Unable to connect to %s due to %s", url, error)
errors.append(error)
raise FileNotFoundError("Failed to search ESGF, unable to connect:\n" +
"\n".join(f"- {e}" for e in errors))
def esgf_search_files(facets):
"""Search for files on ESGF.
Parameters
----------
facets: :obj:`dict` of :obj:`str`
Facets to constrain the search.
Returns
-------
list of :py:class:`~ESGFFile`
The found files.
"""
results = _search_index_nodes(facets)
files = ESGFFile._from_results(results, facets)
msg = 'none' if not files else '\n' + '\n'.join(str(f) for f in files)
logger.debug("Found the following files matching facets %s: %s", facets,
msg)
return files
def select_by_time(files, timerange):
"""Select files containing data between a timerange."""
if '*' in timerange:
# TODO: support * combined with a period
return files
selection = []
for file in files:
start_date, end_date = _parse_period(timerange)
try:
start, end = _get_start_end_date(file.name)
except ValueError:
# If start and end year cannot be read from the filename
# just select everything.
selection.append(file)
else:
start_date, end = _truncate_dates(start_date, end)
end_date, start = _truncate_dates(end_date, start)
if start <= end_date and end >= start_date:
selection.append(file)
return selection
def find_files(*, project, short_name, dataset, **facets):
"""Search for files on ESGF.
Parameters
----------
project : str
Choose from CMIP3, CMIP5, CMIP6, CORDEX, or obs4MIPs.
short_name : str
The name of the variable.
dataset : str
The name of the dataset.
**facets : typing.Union[str, list[str]]
Any other search facets. An ``'*'`` can be used to match
any value. By default, only the latest version of a file will
be returned. To select all versions use ``version='*'`` while other
omitted facets will default to ``'*'``. It is also
possible to specify multiple values for a facet, e.g.
``exp=['historical', 'ssp585']`` will match any file that belongs
to either the historical or ssp585 experiment.
The ``timerange`` facet can be specified in `ISO 8601 format
<https://en.wikipedia.org/wiki/ISO_8601>`__.
Note
----
A value of ``timerange='*'`` is supported, but combining a ``'*'`` with
a time or period :ref:`as supported in the recipe <datasets>` is currently
not supported and will return all found files.
Examples
--------
Examples of how to use this function for all supported projects.
Search for a CMIP3 dataset:
>>> find_files(
... project='CMIP3',
... frequency='mon',
... short_name='tas',
... dataset='cccma_cgcm3_1',
... exp='historical',
... ensemble='run1',
... ) # doctest: +SKIP
[ESGFFile:cmip3/CCCma/cccma_cgcm3_1/historical/mon/atmos/run1/tas/v1/tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc]
Search for a CMIP5 dataset:
>>> find_files(
... project='CMIP5',
... mip='Amon',
... short_name='tas',
... dataset='inmcm4',
... exp='historical',
... ensemble='r1i1p1',
... ) # doctest: +SKIP
[ESGFFile:cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc]
Search for a CMIP6 dataset:
>>> find_files(
... project='CMIP6',
... mip='Amon',
... short_name='tas',
... dataset='CanESM5',
... exp='historical',
... ensemble='r1i1p1f1',
... ) # doctest: +SKIP
[ESGFFile:CMIP6/CMIP/CCCma/CanESM5/historical/r1i1p1f1/Amon/tas/gn/v20190429/tas_Amon_CanESM5_historical_r1i1p1f1_gn_185001-201412.nc]
Search for a CORDEX dataset and limit the search results to files
containing data to the years in the range 1990-2000:
>>> find_files(
... project='CORDEX',
... frequency='mon',
... dataset='COSMO-crCLIM-v1-1',
... short_name='tas',
... exp='historical',
... ensemble='r1i1p1',
... domain='EUR-11',
... driver='MPI-M-MPI-ESM-LR',
... timerange='1990/2000',
... ) # doctest: +SKIP
[ESGFFile:cordex/output/EUR-11/CLMcom-ETH/MPI-M-MPI-ESM-LR/historical/r1i1p1/COSMO-crCLIM-v1-1/v1/mon/tas/v20191219/tas_EUR-11_MPI-M-MPI-ESM-LR_historical_r1i1p1_CLMcom-ETH-COSMO-crCLIM-v1-1_v1_mon_198101-199012.nc,
ESGFFile:cordex/output/EUR-11/CLMcom-ETH/MPI-M-MPI-ESM-LR/historical/r1i1p1/COSMO-crCLIM-v1-1/v1/mon/tas/v20191219/tas_EUR-11_MPI-M-MPI-ESM-LR_historical_r1i1p1_CLMcom-ETH-COSMO-crCLIM-v1-1_v1_mon_199101-200012.nc]
Search for an obs4MIPs dataset:
>>> find_files(
... project='obs4MIPs',
... frequency='mon',
... dataset='CERES-EBAF',
... short_name='rsutcs',
... ) # doctest: +SKIP
[ESGFFile:obs4MIPs/NASA-LaRC/CERES-EBAF/atmos/mon/v20160610/rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc]
Search for any ensemble member:
>>> find_files(
... project='CMIP6',
... mip='Amon',
... short_name='tas',
... dataset='BCC-CSM2-MR',
... exp='historical',
... ensemble='*',
... ) # doctest: +SKIP
[ESGFFile:CMIP6/CMIP/BCC/BCC-CSM2-MR/historical/r1i1p1f1/Amon/tas/gn/v20181126/tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc,
ESGFFile:CMIP6/CMIP/BCC/BCC-CSM2-MR/historical/r2i1p1f1/Amon/tas/gn/v20181115/tas_Amon_BCC-CSM2-MR_historical_r2i1p1f1_gn_185001-201412.nc,
ESGFFile:CMIP6/CMIP/BCC/BCC-CSM2-MR/historical/r3i1p1f1/Amon/tas/gn/v20181119/tas_Amon_BCC-CSM2-MR_historical_r3i1p1f1_gn_185001-201412.nc]
Search for all available versions of a file:
>>> find_files(
... project='CMIP5',
... mip='Amon',
... short_name='tas',
... dataset='CCSM4',
... exp='historical',
... ensemble='r1i1p1',
... version='*',
... ) # doctest: +SKIP
[ESGFFile:cmip5/output1/NCAR/CCSM4/historical/mon/atmos/Amon/r1i1p1/v20121031/tas_Amon_CCSM4_historical_r1i1p1_185001-200512.nc,
ESGFFile:cmip5/output1/NCAR/CCSM4/historical/mon/atmos/Amon/r1i1p1/v20130425/tas_Amon_CCSM4_historical_r1i1p1_185001-200512.nc,
ESGFFile:cmip5/output1/NCAR/CCSM4/historical/mon/atmos/Amon/r1i1p1/v20160829/tas_Amon_CCSM4_historical_r1i1p1_185001-200512.nc]
Search for a specific version of a file:
>>> find_files(
... project='CMIP5',
... mip='Amon',
... short_name='tas',
... dataset='CCSM4',
... exp='historical',
... ensemble='r1i1p1',
... version='v20130425',
... ) # doctest: +SKIP
[ESGFFile:cmip5/output1/NCAR/CCSM4/historical/mon/atmos/Amon/r1i1p1/v20130425/tas_Amon_CCSM4_historical_r1i1p1_185001-200512.nc]
Returns
-------
:obj:`list` of :obj:`ESGFFile`
A list of files that have been found.
""" # pylint: disable=locally-disabled, line-too-long
if project not in FACETS:
raise ValueError(
f"Unable to download from ESGF, because project {project} is not"
" on it or is not supported by the esmvalcore.esgf module.")
# The project is required for the function to work.
facets['project'] = project
# The dataset and short_name facet are not strictly required,
# but without these it seems likely that the user is requesting
# more results than they intended.
facets['dataset'] = dataset
facets['short_name'] = short_name
# Convert lists to tuples to allow caching results
for facet, value in facets.items():
if isinstance(value, list):
facets[facet] = tuple(value)
return cached_search(**facets)
@lru_cache(10000)
def cached_search(**facets):
"""Search for files on ESGF.
A cached search function will speed up recipes that use the same
variable multiple times.
"""
esgf_facets = get_esgf_facets(facets)
files = esgf_search_files(esgf_facets)
if 'version' not in facets or facets['version'] != '*':
files = select_latest_versions(files, facets.get('version'))
_replace_years_with_timerange(facets)
if 'timerange' in facets:
files = select_by_time(files, facets['timerange'])
logger.debug("Selected files:\n%s", '\n'.join(str(f) for f in files))
return files | PypiClean |
/GenIce-1.0.11.tar.gz/GenIce-1.0.11/genice/lattices/Struct57.py | pairs="""
8 79
132 124
126 134
61 149
49 142
48 36
10 159
136 104
110 3
28 18
29 3
41 149
20 140
67 72
28 136
34 68
139 81
152 71
20 12
131 133
104 49
72 14
109 53
152 133
37 73
86 121
13 133
134 145
108 55
106 54
147 127
135 151
32 153
21 45
85 132
8 55
86 137
92 48
41 112
126 14
69 9
159 142
109 47
83 94
86 96
17 58
13 19
102 143
109 159
46 149
28 82
78 50
115 70
108 142
97 120
44 53
24 61
17 158
5 31
115 126
0 105
137 101
60 89
31 53
40 95
48 156
117 76
26 49
35 3
35 73
96 103
84 89
110 11
127 68
121 41
78 36
157 52
144 96
65 148
69 112
20 30
83 46
29 89
16 73
92 102
22 74
100 155
109 148
100 157
63 62
123 47
117 87
108 15
106 21
124 75
1 150
156 143
107 25
22 87
11 0
106 74
52 111
146 111
133 81
25 57
91 140
59 130
125 116
96 87
144 49
26 103
0 129
68 71
2 131
27 34
18 137
148 57
115 153
144 33
67 73
61 39
131 45
125 87
43 45
7 18
23 8
147 24
8 113
62 122
29 127
75 46
141 3
154 25
74 77
1 54
86 139
11 113
34 155
9 104
107 32
22 57
156 50
44 66
60 135
93 101
93 88
6 103
148 77
156 32
100 72
29 130
102 123
10 26
136 78
90 61
107 93
134 32
7 40
26 76
150 56
84 158
154 82
123 129
89 138
125 131
124 40
35 99
27 98
5 159
151 146
55 70
121 39
64 132
134 66
90 98
128 84
52 38
92 4
67 62
94 152
81 39
78 65
93 85
38 79
104 51
120 98
130 24
64 46
80 119
55 51
97 157
88 9
13 95
18 41
7 54
125 139
23 42
74 103
30 79
2 95
27 40
10 77
37 63
12 35
152 43
16 59
33 105
36 77
52 84
147 19
13 68
150 75
114 124
16 94
136 101
7 56
138 111
6 116
145 53
118 70
15 138
155 62
116 76
129 118
16 141
4 47
145 65
9 76
141 71
150 122
48 25
63 75
119 38
97 128
153 51
108 11
64 139
129 151
42 143
141 135
83 81
12 90
27 63
60 59
50 51
110 138
54 82
2 22
132 137
44 146
58 122
17 71
67 140
30 99
4 117
119 14
21 19
144 101
91 79
1 45
107 65
60 118
85 69
23 50
56 149
64 112
106 95
114 154
157 30
142 105
72 99
33 88
58 43
126 113
114 1
114 2
58 128
0 115
4 57
19 39
116 112
59 140
44 123
80 146
98 56
97 34
91 14
117 33
23 145
31 111
15 5
21 6
120 122
128 127
6 121
147 43
66 143
80 158
28 69
47 105
91 70
12 130
90 37
99 113
94 24
31 42
85 154
110 151
37 83
42 38
15 118
20 120
92 10
66 119
135 158
100 80
88 153
17 155
82 36
102 5
"""
waters="""
0.8125 0.80241 0.19017
0.5 0.25631 0.72057
0.8125 0.19759 0.80983
0.0 0.57611 0.125
0.6875 0.97648 0.9375
0.3125 0.82131 0.94195
0.3125 0.17869 0.05805
0.1875 0.24211 0.56695
0.1875 0.75789 0.43305
0.5 0.01882 0.28211
0.3125 0.97648 0.9375
0.0 0.74369 0.22057
0.1875 0.50402 0.3125
0.0 0.33606 0.9033
0.6875 0.67813 0.49612
0.3125 0.75789 0.06695
0.6875 0.50402 0.1875
0.6875 0.49598 0.8125
0.1875 0.17869 0.44195
0.1875 0.32187 0.99612
0.3125 0.53806 0.4375
0.3125 0.24211 0.93305
0.875 0.11065 0.87233
0.1875 0.82131 0.55805
0.375 0.41372 0.15533
0.6875 0.03293 0.6875
0.3125 0.02352 0.0625
0.0 0.37164 0.59468
0.3125 0.10594 0.47228
0.1875 0.53806 0.0625
0.1875 0.60732 0.49879
0.1875 0.77679 0.81517
0.6875 0.89406 0.52773
0.8125 0.96707 0.1875
0.0 0.44091 0.71903
0.0 0.55909 0.28097
0.3125 0.03293 0.6875
0.875 0.41372 0.34468
0.375 0.69233 0.6533
0.125 0.30767 0.1533
0.0 0.27487 0.62678
0.3125 0.22321 0.31517
0.3125 0.77679 0.68483
0.5 0.37164 0.90533
0.8125 0.77679 0.81517
0.5 0.27487 0.87322
0.625 0.30767 0.3467
0.8125 0.89406 0.97228
0.5 0.98118 0.7179
0.1875 0.96707 0.1875
0.3125 0.89406 0.52773
0.375 0.88936 0.37233
0.25 0.63854 0.75
0.0 0.82602 0.783
0.3125 0.19759 0.69017
0.3125 0.80241 0.30983
0.3125 0.32187 0.50388
0.8125 0.03293 0.8125
0.5 0.44091 0.78097
0.5 0.55909 0.21903
0.5 0.62836 0.09468
0.25 0.36147 0.25
0.6875 0.46195 0.5625
0.8125 0.39268 0.50121
0.6875 0.22321 0.31517
0.0 0.95296 0.625
0.6875 0.77679 0.68483
0.6875 0.53806 0.4375
0.0 0.42389 0.875
0.5 0.10124 0.37767
0.5 0.74369 0.27944
0.8125 0.46195 0.9375
0.8125 0.60732 0.49879
0.8125 0.50402 0.3125
0.125 0.11065 0.87233
0.6875 0.32187 0.50388
0.5 0.04705 0.125
0.1875 0.03293 0.8125
0.1875 0.97648 0.5625
0.3125 0.67813 0.49612
0.75 0.63854 0.75
0.875 0.30767 0.1533
0.375 0.11065 0.62767
0.75 0.36147 0.25
0.375 0.58629 0.84468
0.6875 0.10594 0.47228
0.0 0.17399 0.21701
0.8125 0.10594 0.02773
0.6875 0.96707 0.3125
0.3125 0.60732 0.00121
0.125 0.41372 0.34468
0.5 0.66394 0.4033
0.5 0.95296 0.875
0.8125 0.02352 0.4375
0.625 0.41372 0.15533
0.0 0.25631 0.77944
0.0 0.10124 0.12233
0.1875 0.49598 0.6875
0.1875 0.39268 0.50121
0.0 0.62836 0.40533
0.875 0.58629 0.65533
0.0 0.04705 0.375
0.5 0.85198 0.87678
0.1875 0.10594 0.02773
0.3125 0.96707 0.3125
0.875 0.88936 0.12767
0.1875 0.19759 0.80983
0.8125 0.97648 0.5625
0.1875 0.80241 0.19017
0.0 0.89877 0.87767
0.0 0.66394 0.0967
0.125 0.69233 0.8467
0.5 0.17399 0.283
0.0 0.72513 0.37322
0.6875 0.19759 0.69017
0.6875 0.80241 0.30983
0.5 0.14802 0.12322
0.6875 0.02352 0.0625
0.5 0.72513 0.12678
0.625 0.69233 0.6533
0.3125 0.46195 0.5625
0.1875 0.22321 0.18483
0.5 0.42389 0.625
0.6875 0.82131 0.94195
0.8125 0.24211 0.56695
0.6875 0.17869 0.05805
0.8125 0.75789 0.43305
0.1875 0.46195 0.9375
0.3125 0.49598 0.8125
0.6875 0.75789 0.06695
0.3125 0.50402 0.1875
0.6875 0.24211 0.93305
0.8125 0.17869 0.44195
0.8125 0.32187 0.99612
0.8125 0.82131 0.55805
0.6875 0.60732 0.00121
0.1875 0.02352 0.4375
0.0 0.14802 0.37678
0.1875 0.67813 0.00388
0.8125 0.22321 0.18483
0.5 0.57611 0.375
0.8125 0.53806 0.0625
0.125 0.88936 0.12767
0.5 0.82602 0.71701
0.0 0.01882 0.2179
0.0 0.85198 0.62322
0.875 0.69233 0.8467
0.3125 0.39268 0.99879
0.0 0.98118 0.78211
0.375 0.30767 0.3467
0.5 0.33606 0.5967
0.8125 0.67813 0.00388
0.6875 0.39268 0.99879
0.625 0.88936 0.37233
0.625 0.11065 0.62767
0.8125 0.49598 0.6875
0.5 0.89877 0.62233
0.125 0.58629 0.65533
0.625 0.58629 0.84468
0.1875 0.89406 0.97228
"""
coord= "relative"
cages="""
12 0.0 -0.21676 1.01068
14 0.0 0.43193 0.12387
14 -0.25 -0.65222 -0.25
12 0.25 0.09409 0.25
12 0.5 0.21676 0.51068
12 -0.25 -0.09409 -0.25
14 0.25 0.65222 0.25
16 0.5 0.11291 -0.12842
12 0.5 -0.5 1.0
14 0.5 -0.291 0.8787
14 0.25 -0.65222 0.75
14 0.5 0.291 0.1213
16 0.0 0.11291 0.62842
12 0.25 -0.09409 0.75
14 0.5 0.43193 0.37613
12 0.0 0.5 0.5
14 0.0 -0.291 -0.3787
12 0.0 0.21676 -0.01068
16 0.0 -0.11291 -0.62842
12 -0.5 -0.21676 -0.51068
14 -0.25 0.65222 -0.75
12 0.5 0.0 0.5
14 0.0 -0.43193 -0.12387
12 -0.25 0.09409 -0.75
14 0.5 -0.43193 0.62387
16 0.5 -0.11291 1.12842
14 0.0 0.291 0.3787
12 0.0 0.0 0.0
"""
bondlen = 3
cell = """
13.167286191434481 31.492589961461622 18.629903136229707
"""
density = 0.6190653349484135
from genice.cell import cellvectors
cell = cellvectors(a=13.167286191434481,
b=31.492589961461622,
c=18.629903136229707) | PypiClean |
/LightZero-0.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/zoo/board_games/tictactoe/config/tictactoe_muzero_bot_mode_config.py | from easydict import EasyDict
# ==============================================================
# begin of the most frequently changed config specified by the user
# ==============================================================
collector_env_num = 8
n_episode = 8
evaluator_env_num = 5
num_simulations = 25
update_per_collect = 40
batch_size = 256
max_env_step = int(2e6)
reanalyze_ratio = 0.
# ==============================================================
# end of the most frequently changed config specified by the user
# ==============================================================
tictactoe_muzero_config = dict(
exp_name=
f'data_mz_ctree/tictactoe_muzero_bot-mode_ns{num_simulations}_upc{update_per_collect}_rr{reanalyze_ratio}_seed0',
env=dict(
battle_mode='play_with_bot_mode',
collector_env_num=collector_env_num,
evaluator_env_num=evaluator_env_num,
n_evaluator_episode=evaluator_env_num,
manager=dict(shared_memory=False, ),
),
policy=dict(
model=dict(
observation_shape=(3, 3, 3),
action_space_size=9,
image_channel=3,
# We use the small size model for tictactoe.
num_res_blocks=1,
num_channels=16,
fc_reward_layers=[8],
fc_value_layers=[8],
fc_policy_layers=[8],
support_scale=10,
reward_support_size=21,
value_support_size=21,
),
cuda=True,
env_type='board_games',
update_per_collect=update_per_collect,
batch_size=batch_size,
optim_type='AdamW',
lr_piecewise_constant_decay=False,
learning_rate=0.003, # lr for Adam optimizer
grad_clip_value=0.5,
manual_temperature_decay=True,
num_simulations=num_simulations,
reanalyze_ratio=reanalyze_ratio,
game_segment_length=5,
# NOTE:In board_games, we set large td_steps to make sure the value target is the final outcome.
td_steps=9,
num_unroll_steps=3,
discount_factor=1,
n_episode=n_episode,
eval_freq=int(2e3),
replay_buffer_size=int(3e3),
collector_env_num=collector_env_num,
evaluator_env_num=evaluator_env_num,
),
)
tictactoe_muzero_config = EasyDict(tictactoe_muzero_config)
main_config = tictactoe_muzero_config
tictactoe_muzero_create_config = dict(
env=dict(
type='tictactoe',
import_names=['zoo.board_games.tictactoe.envs.tictactoe_env'],
),
env_manager=dict(type='subprocess'),
policy=dict(
type='muzero',
import_names=['lzero.policy.muzero'],
),
collector=dict(
type='episode_muzero',
import_names=['lzero.worker.muzero_collector'],
)
)
tictactoe_muzero_create_config = EasyDict(tictactoe_muzero_create_config)
create_config = tictactoe_muzero_create_config
if __name__ == "__main__":
from lzero.entry import train_muzero
train_muzero([main_config, create_config], seed=0, max_env_step=max_env_step) | PypiClean |
/GraphForm-0.1.1.tar.gz/GraphForm-0.1.1/graphform.py | from logging import getLogger, basicConfig, INFO, DEBUG
import sys
from p5 import *
import itertools as it
import sys
import numpy as np
import networkx as nx
from attrdict import AttrDict
from PIL.ImageFont import truetype
__version__ = "0.1.1"
class Interaction:
def __init__(self, forcefunc):
self.func = forcefunc
def force(self, pairs, vertices):
for a, b in pairs:
vertex0 = vertices[a]
vertex1 = vertices[b]
d = vertex0.position - vertex1.position
r = np.linalg.norm(d)
f = self.func(r) * d / r
vertex0.force -= f
vertex1.force += f
def debug(func):
def wrapper(*args, **kwargs):
logger = getLogger()
logger.debug(func.__name__)
func(*args, **kwargs)
return wrapper
def Depth(pi, pj, pk):
return pi[2] + pj[2] + pk[2]
def ArrangedColor(a, b, c, decay):
logger = getLogger()
ab = b - a
ac = c - a
# normal vector
n = np.cross(ab, ac)
n /= np.linalg.norm(n)
hue = abs(np.sum(n)) / sqrt(3.0)
#hue = hue + 0.5
if hue > 1.0:
hue -= 1.0
# final opacity is A+B
A = 0.3
B = 0.4
opacity = (1. - 0.99**decay) * A + B
return Color(hue, 0.8, abs(n[2]) * 0.4 + 0.6, alpha=opacity)
def perspective(v, eyepos=None):
if eyepos is None:
return v[:2]
zoom = eyepos / (eyepos - v[2])
return v[:2] * zoom
class Vertex():
"""
A vertex is a point mass with a label.
"""
def __init__(self, label, pos=None):
self.label = str(label)
if pos is None:
self.position = np.random.random(3) * 500
else:
self.position = pos
self.velocity = np.zeros(3)
self.force = np.zeros(3)
def perspective(self, eyepos=None):
return perspective(self.position, eyepos)
def force2vel(self):
self.velocity = self.force + 0
def progress(self, deltatime):
self.position += self.velocity * deltatime
# def draw(self):
# ellipse(self.position[0] - 1, self.position[1] - 1, 2, 2)
def resetf(self):
self.force = np.zeros(3)
def drawfaces_(faces, vertices):
logger = getLogger()
stroke(0)
stroke_weight(2)
k = faces.keys()
for face in sorted(k, key=lambda x: -faces[x].depth):
# triangle = PShape()
va, vb, vc = face
a = vertices[va].perspective()
b = vertices[vb].perspective()
c = vertices[vc].perspective()
fill(faces[face].color)
triangle(a, b, c)
class GraphForm():
@debug
def __init__(self, pairs):
self.repulse = 0
self.hold = None
self.keyhold = None
self.showface = True
self.showlabel = True
self.showtetrag = True
self.decay = 0
self.vertices = dict()
self.triangles = dict()
self.tetrahedra = set()
self.tetrag = nx.Graph() # adjacency graph of tetrahedra
K = 2.5
KR = 20
R0 = 200
# 立方体の一辺を1とすると面の対角線は√2、立方体の対角線は√3、隣接四面体間の距離は、√3/3
# 面の対角線を1とするので、
RT = R0 * (1 / 6)**0.5
KT = 20
self.attractive = Interaction(lambda r: K * (r - R0))
def repel(r, K, rmin):
"""
force function for replusive pairs
"""
if r < rmin:
return K * (r - rmin)
return 0
self.repulsive = Interaction(lambda r: repel(r, KR, R0 * 1.2))
self.frames = 0
labels = set()
for i, j in pairs:
labels.add(i)
labels.add(j)
labels = list(labels)
self.g = nx.Graph(pairs)
for i in labels:
#position, velocity, force
self.vertices[i] = Vertex(i)
self.reps = [
(i, j) for i, j in it.combinations(
labels, 2) if not self.g.has_edge(
i, j)]
for i, j in self.g.edges():
for v in self.g[j]:
if v in self.g[i]:
s = tuple(sorted([i, j, v]))
self.triangles[s] = AttrDict(
{"depth": None, "color": None})
for i, j, k in self.triangles:
adj = tuple(set(self.g[i]) & set(self.g[j]) & set(self.g[k]))
assert len(adj) <= 2
pair = []
for l in adj:
s = tuple(sorted([i, j, k, l]))
self.tetrahedra.add(s)
pair.append(s)
if len(pair) == 2:
self.tetrag.add_edge(*pair)
self.tetrepul = Interaction(lambda r: repel(r, KT, RT))
# virtual vertices for tetrahedra
self.vtet = dict()
for t in self.tetrag:
self.vtet[t] = Vertex(t)
def drawtetranetwork(self):
tpos = dict()
for tetra in self.tetrag:
com = np.zeros(3)
for v in tetra:
com += self.vertices[v].position
tpos[tetra] = com / 4
for edge in self.tetrag.edges:
t1, t2 = edge
p1 = perspective(tpos[t1])
p2 = perspective(tpos[t2])
stroke(1 / 6, 1, 0.8) # yellow
stroke_weight(4)
line(p1, p2)
def drawfaces(self):
for i, j, k in self.triangles.keys():
self.triangles[(i, j, k)].depth = Depth(
self.vertices[i].position, self.vertices[j].position, self.vertices[k].position)
self.triangles[(i,
j,
k)].color = ArrangedColor(self.vertices[i].position,
self.vertices[j].position,
self.vertices[k].position,
self.decay)
drawfaces_(self.triangles, self.vertices)
def drawedges(self):
for a, b in self.g.edges():
vertex0 = self.vertices[a].perspective()
vertex1 = self.vertices[b].perspective()
line(vertex0, vertex1)
def drawlabels(self):
for v in self.vertices.values():
vv = v.perspective()
fill(0)
no_stroke()
text(v.label, vv[0], vv[1])
def draw(self):
logger = getLogger()
background(1, 0, 1) # hsb
self.frames += 1
if self.frames == 100:
self.repulse = 0
self.decay += 1
self.attractive.force(self.g.edges(), self.vertices)
if self.repulse:
fill(0)
no_stroke()
text("Repulsive", 40, 40)
self.repulsive.force(self.reps, self.vertices)
# 常に四面体同士は重ならないようにする。
# if self.showtetrag:
for vertex in self.vtet.values():
vertex.resetf()
for tetra, vertex in self.vtet.items():
com = np.zeros(3)
for v in tetra:
com += self.vertices[v].position
vertex.position = com / 4
self.tetrepul.force(it.combinations(self.vtet, 2), self.vtet)
# feedback the forces to its vertices
for tetra, vertex in self.vtet.items():
f = vertex.force
for v in tetra:
self.vertices[v].force += f
for vertex in self.vertices.values():
vertex.force2vel()
vertex.progress(0.05)
vertex.resetf()
stroke(0)
if self.showface:
self.drawfaces()
else:
self.drawedges()
if self.showlabel:
self.drawlabels()
if self.showtetrag:
self.drawtetranetwork()
# # マウスでノードをひっぱる。
if mouse_is_pressed:
decay = 0
if self.hold is None:
min = 100000.0
nod = None
for vertex in self.vertices.values():
pixel = vertex.perspective()
dx = mouse_x - pixel[0]
dy = mouse_y - pixel[1]
d = dx**2 + dy**2
if d < min:
min = d
self.hold = vertex
pixel = self.hold.perspective()
dx = mouse_x - pixel[0]
dy = mouse_y - pixel[1]
self.hold.position[0] += dx / 2
self.hold.position[1] += dy / 2
else:
self.hold = None
if key_is_pressed:
if not self.keyhold:
if key == "s":
save("graphform.png")
print ("Saved")
if key == "r":
if not self.repulse:
self.repulse = 1
else:
self.repulse -= 1
if key == "f":
self.showface = not self.showface
if key == "l":
self.showlabel = not self.showlabel
if key == "t":
self.showtetrag = not self.showtetrag
if key == "q":
sys.exit(0)
self.keyhold = True
else:
self.keyhold = None
@debug
def setup(self):
size(512, 512)
color_mode('HSB', 1, 1, 1, 1)
font = truetype("Arial.ttf", size=16)
text_font(font)
if __name__ == "__main__":
# basicConfig(level=INFO, format="%(levelname)s %(message)s")
basicConfig(level=DEBUG, format="%(levelname)s %(message)s")
logger = getLogger()
logger.debug("Debug mode.")
pairs = [("A", "B"), ("B", "C"), ("C", "D"), ("D", "E"), ("E", "F"), ("Z", "N"),
("A", "Z"), ("B", "Z"), ("C", "Z"), ("D", "Z"), ("E", "Z"), ("F", "Z"),
("A", "N"), ("B", "N"), ("C", "N"), ("D", "N"), ("E", "N"), ("F", "N"), ]
gf = GraphForm(pairs)
draw = gf.draw
setup = gf.setup
run() | PypiClean |
/NlvWxPython-4.2.0-cp37-cp37m-win_amd64.whl/wx/lib/nvdlg.py | import wx
MARGIN = 4
class SimpleNameValueDialog(wx.Dialog):
def __init__(self, parent, id=-1, title="", pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.DEFAULT_DIALOG_STYLE,
fields=[], initialValues=None,
captionTitle="", captionDescr=""):
wx.Dialog.__init__(self, parent, id, title, pos, size, style)
self._fields = dict()
self.Sizer = wx.BoxSizer(wx.VERTICAL)
self._contentSizer = wx.FlexGridSizer(cols=2, hgap=MARGIN, vgap=MARGIN)
self._contentSizer.AddGrowableCol(1)
if captionTitle:
titleTxt = wx.StaticText(self, -1, captionTitle)
titleTxt.SetFont(wx.FFont(18, wx.FONTFAMILY_SWISS, wx.FONTFLAG_BOLD))
self.Sizer.Add(titleTxt, 0, wx.ALL, MARGIN)
if captionDescr:
descTxt = wx.StaticText(self, -1, captionDescr)
self.Sizer.Add(descTxt, 0, wx.LEFT|wx.RIGHT|wx.BOTTOM, MARGIN)
if captionTitle or captionDescr:
self.Sizer.Add(wx.StaticLine(self), 0, wx.EXPAND|wx.TOP|wx.BOTTOM, MARGIN)
self.createFields(fields)
self.loadValues(initialValues)
self.Sizer.Add(self._contentSizer, 1, wx.EXPAND|wx.ALL, MARGIN)
self.Sizer.Add(wx.StaticLine(self), 0, wx.EXPAND|wx.TOP|wx.BOTTOM, MARGIN)
# TODO: add ability to specify which stock or custom buttons are used
btnSizer = wx.StdDialogButtonSizer()
btnSizer.AddButton(wx.Button(self, wx.ID_OK))
btnSizer.AddButton(wx.Button(self, wx.ID_CANCEL))
btnSizer.Realize()
self.Sizer.Add(btnSizer, 0, wx.EXPAND|wx.ALL, MARGIN)
self.FindWindow(wx.ID_OK).SetDefault()
self.Fit()
def createFields(self, fields):
self.destroyFields()
for name, label, args in fields:
kwargs = dict(validator=_TransferValidator(name))
if args:
kwargs.update(args)
stxt = wx.StaticText(self, -1, label)
txt = wx.TextCtrl(self, **kwargs)
self._contentSizer.Add(stxt, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
self._contentSizer.Add(txt, 0, wx.EXPAND)
self.__dict__[name] = ""
self._fields[name] = (stxt, txt)
def destroyFields(self):
for name, widgets in self._fields.items():
for w in widgets:
w.Destroy()
del self.__dict__[name]
def loadValues(self, values):
self.clearValues()
if values:
for name, value in values.items():
if name in self._fields:
setattr(self, name, value)
def clearValues(self):
for name in self._fields:
setattr(self, name, "")
class _TransferValidator(wx.PyValidator):
"""
This validator is used to transfer values to/from the widgets and
attributes of the dialog.
"""
def __init__(self, name):
wx.PyValidator.__init__(self)
self.name = name
def Clone(self):
return _TransferValidator(self.name)
def Validate(self, win):
return True
def TransferFromWindow(self):
dlg = self.Window.Parent
value = dlg._fields[self.name][1].GetValue()
setattr(dlg, self.name, value)
return True
def TransferToWindow(self):
dlg = self.Window.Parent
value = getattr(dlg, self.name)
dlg._fields[self.name][1].SetValue(value)
return True
if __name__ == '__main__':
from wx.lib.mixins.inspection import InspectableApp
app = InspectableApp(redirect=False)
#app = wx.App(redirect=False)
fields = [ ('username', 'Login ID:', None),
('passwd', 'Password:', dict(size=(150,-1), style=wx.TE_PASSWORD)),
]
dlg = SimpleNameValueDialog(None, title="This is the title",
fields=fields,
initialValues=dict(username='rdunn'),
captionTitle="Login",
captionDescr="Enter your testing credentials")
if dlg.ShowModal() == wx.ID_OK:
print(dlg.username, dlg.passwd)
dlg.Destroy()
app.MainLoop() | PypiClean |
/ADPYNE-207-0.1.4.tar.gz/ADPYNE-207-0.1.4/ADPYNE/elemFunctions.py | import warnings
import numpy as np
from ADPYNE.AutoDiff import AutoDiff
from ADPYNE.Dual import Dual
#-------------------BASE TRIG FUNCTIONS-------------------#
# Sine function
def sin(x):
''' Compute the sine of an AutoDiff object and its derivative.
Compute the sine of Dual Number
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(0.5, 2.0, 1.0)
>>> myAutoDiff = sin(x)
>>> myAutoDiff.val
0.479425538604
>>> myAutoDiff.der
1.75516512378
>>> myAutoDiff.jacobian
0.87758256189
'''
try:
new_val = np.sin(x.val)
new_der = np.cos(x.val) * x.der
new_jacobian = np.cos(x.val) * x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.sin(x.Real), x.Dual * np.cos(x.Real))
except AttributeError:
try:
return Dual(sin(x.Real), x.Dual * cos(x.Real))
except AttributeError:
return np.sin(x)
# Cosine function
def cos(x):
''' Compute the cosine of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(0.5, 2.0, 1.0)
>>> myAutoDiff = cos(x)
>>> myAutoDiff.val
0.87758256189
>>> myAutoDiff.der
-0.958851077208
>>> myAutoDiff.jacobian
-0.479425538604
'''
try:
new_val = np.cos(x.val)
new_der = -1.0 * np.sin(x.val) * x.der
new_jacobian = -1.0 * np.sin(x.val) * x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.cos(x.Real), x.Dual * -np.sin(x.Real))
except AttributeError:
try:
return Dual(cos(x.Real), x.Dual * -sin(x.Real))
except AttributeError:
return np.cos(x)
# Tangent function
def tan(x):
''' Compute the tangent of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
'''
try:
# Value and derivative undefined when divisible by pi/2 but not pi
# To make sure the asymptotes are undefined:
if x.val%(np.pi/2)==0 and x.val%np.pi!=0:
new_val = np.nan
new_der = np.nan
new_jacobian = np.nan
warnings.warn('Undefined at value', RuntimeWarning)
else:
new_val = np.tan(x.val)
new_der = x.der / (np.cos(x.val)**2.0)
new_jacobian = x.jacobian / (np.cos(x.val)**2.0)
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
if x.Real%(np.pi/2)==0 and x.Real%np.pi!=0:
ans = Dual(np.nan,np.nan)
warnings.warn('Undefined at value', RuntimeWarning)
return ans
else:
return Dual(np.tan(x.Real), x.Dual / (np.cos(x.Real))**2)
except AttributeError:
try:
if x.Real%(np.pi/2)==0 and x.Real%np.pi!=0:
ans = Dual(np.nan,np.nan)
warnings.warn('Undefined at value', RuntimeWarning)
return ans
else:
# return Dual(tan(x.Real), x.Dual / (cos(x.Real))**2)
return sin(x)/cos(x)
except AttributeError:
if x%(np.pi/2)==0 and x%np.pi!=0:
warnings.warn('Undefined at value', RuntimeWarning)
return np.nan
else:
return np.tan(x)
#-------------------INVERSE TRIG FUNCTIONS-------------------#
# arc sin
def arcsin(X):
''' Compute the arcsin of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object or constant
RETURNS
=======
A new AutoDiff object or scalar with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2)
>>> arcsinAutoDiff = arcsin(X)
>>> arcsinAutoDiff.val
0.5235987755982988
>>> arcsinAutoDiff.der
2.3094010767585034
>>> arcsinAutoDiff.jacobian
1.1547005383792517
'''
try:
# Is another ADT
new_val = np.arcsin(X.val)
new_der = (1/np.sqrt(1-X.val**2))*X.der
new_jacobian = (1/np.sqrt(1-X.val**2))*X.jacobian
return AutoDiff(new_val, new_der, X.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.arcsin(X.Real), X.Dual/np.sqrt(1-X.Real**2))
except AttributeError:
try:
# return Dual(arcsin(X.Real), X.Dual/sqrt(1-X.Real**2))
return Dual(arcsin(X.Real), (X.Dual*(1-X.Real**2)**-0.5))
except AttributeError:
# Constant
return_val = np.arcsin(X)
return return_val
# arc cosine
def arccos(X):
''' Compute the arccos of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object or constant
RETURNS
=======
A new AutoDiff object or scalar with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2)
>>> arccosAutoDiff = arccos(X)
>>> arccosAutoDiff.val
1.0471975511965976
>>> arccosAutoDiff.der
-2.3094010767585034
>>> arccosAutoDiff.jacobian
-1.1547005383792517
'''
try:
# Is another ADT
new_val = np.arccos(X.val) #if (-1 <= X.val and X.val <= 1) else np.nan
new_der = (-1/np.sqrt(1-X.val**2))*X.der #if (-1 < X.val and X.val < 1) else np.nan
new_jacobian = (-1/np.sqrt(1-X.val**2))*X.jacobian #if (-1 < X.val and X.val < 1) else np.nan
return AutoDiff(new_val, new_der, X.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.arccos(X.Real), -X.Dual/np.sqrt(1-X.Real**2))
except AttributeError:
try:
return Dual(arccos(X.Real), -X.Dual/sqrt(1-X.Real**2))
except AttributeError:
# Constant
return_val = np.arccos(X)
return return_val
# arc tangent
def arctan(X):
''' Compute the arctan of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object or constant
RETURNS
=======
A new AutoDiff object or scalar with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(3, 2)
>>> arctanAutoDiff = arctan(X)
>>> arctanAutoDiff.val
1.2490457723982544
>>> arctanAutoDiff.der
0.2
>>> arctanAutoDiff.jacobian
0.1
'''
try:
# Is another ADT
new_val = np.arctan(X.val)
new_der = (1/(1+X.val**2))*X.der
new_jacobian = (1/(1+X.val**2))*X.jacobian
return AutoDiff(new_val, new_der, X.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.arctan(X.Real), X.Dual/(1+X.Real**2))
except AttributeError:
try:
return Dual(arctan(X.Real), X.Dual/(1+X.Real**2))
except AttributeError:
# Constant
return_val = np.arctan(X)
return return_val
#-------------------HYPERBOLIC TRIG FUNCTIONS-------------------#
# hyperbolic sin
def sinh(X):
''' Compute the sinh of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2, 1)
>>> sinhAutoDiff = sinh(X)
>>> sinhAutoDiff.val
0.5210953054937474
>>> sinhAutoDiff.der
2.2552519304127614
>>> sinhAutoDiff.jacobian
1.1276259652063807
'''
try:
val = np.sinh(X.val)
der = np.cosh(X.val)*X.der
jacobian = np.cosh(X.val)*X.jacobian
return AutoDiff(val, der, X.n, 0, jacobian)
except AttributeError:
try:
return Dual(np.sinh(X.Real), X.Dual*np.cosh(X.Real))
except AttributeError:
try:
return Dual(sinh(X.Real), X.Dual*cosh(X.Real))
except AttributeError:
# Constant
return_val = np.sinh(X)
return return_val
# hyperbolic cos
def cosh(X):
''' Compute the cosh of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2, 1)
>>> coshAutoDiff = cosh(X)
>>> coshAutoDiff.val
1.1276259652063807
>>> coshAutoDiff.der
1.0421906109874948
>>> coshAutoDiff.jacobian
0.5210953054937474
'''
try:
val = np.cosh(X.val)
der = np.sinh(X.val)*X.der
jacobian = np.sinh(X.val)*X.jacobian
return AutoDiff(val, der, X.n, 0, jacobian)
except AttributeError:
try:
return Dual(np.cosh(X.Real), X.Dual*np.sinh(X.Real))
except AttributeError:
try:
return Dual(cosh(X.Real), X.Dual*sinh(X.Real))
except AttributeError:
# Constant
return_val = np.cosh(X)
return return_val
# hyperbolic tan
def tanh(X):
''' Compute the tanh of an AutoDiff object and its derivative.
INPUTS
======
X: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2, 1)
>>> tanhAutoDiff = tanh(X)
>>> tanhAutoDiff.val
0.46211715726000974
>>> tanhAutoDiff.der
1.572895465931855
>>>tanhAutoDiff.jacobian
0.7864477329659275
'''
try:
val = np.tanh(X.val)
der = 1/(np.cosh(X.val)**2)*X.der
jacobian = 1/(np.cosh(X.val)**2)*X.jacobian
return AutoDiff(val, der, X.n, 0, jacobian)
except AttributeError:
try:
return Dual(np.tanh(X.Real), X.Dual/(np.cosh(X.Real)**2))
except AttributeError:
try:
X.Real
return sinh(X)/cosh(X)
except AttributeError:
# Constant
return_val = np.tanh(X)
return return_val
#-------------------ARC HYPERBOLIC TRIG FUNCTIONS-------------------#
# hyperbolic arcsin
def arcsinh(x):
''' Compute the hyperbolic arc sine of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(0.5, 2, 1)
>>> myAutoDiff = arcsinh(x)
>>> myAutoDiff.val
2.3124383412727525
>>> myAutoDiff.der
0.39223227027
>>> myAutoDiff.jacobian
0.19611613513818404
'''
try:
new_val = np.arcsinh(x.val)
new_der = ((1)/np.sqrt(x.val**2 + 1))*x.der
new_jacobian = ((1)/np.sqrt(x.val**2 + 1))*x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.arcsinh(x.Real), x.Dual/np.sqrt((x.Real**2)+1))
except AttributeError:
try:
return Dual(arcsinh(x.Real), (x.Dual*(1+x.Real**2)**-0.5))
except AttributeError:
# Constant
return_val = np.arcsinh(x)
return return_val
# hyperbolic arc cosine
def arccosh(x):
''' Compute the hyperbolic arc cosine of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(1.1, 2)
>>> myAutoDiff = arccosh(x)
>>> myAutoDiff.val
0.4435682543851154
>>> myAutoDiff.der
(2/np.sqrt(1.1**2 - 1))
>>> myAutoDiff.jacobian
(1/np.sqrt(1.1**2 - 1))
'''
try:
new_val = np.arccosh(x.val)
# Derivative of arccosh is only defined when x > 1
new_der = ((1)/np.sqrt(x.val**2 - 1))*x.der # if x.val > 1 else None
new_jacobian = ((1)/np.sqrt(x.val**2 - 1))*x.jacobian # if x.val > 1 else None
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.arccosh(x.Real), x.Dual/np.sqrt((x.Real**2)-1))
except AttributeError:
try:
return Dual(arccosh(x.Real), (x.Dual*((x.Real**2)-1)**-0.5))
except AttributeError:
# Constant
return_val = np.arccosh(x)
return return_val
# hyperbolic arc tangent
def arctanh(x):
''' Compute the hyperbolic arc tangent of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(0.5, 2)
>>> myAutoDiff = arctanh(x)
>>> myAutoDiff.val
0.5493061443340548
>>> myAutoDiff.der
2/(1-(0.5)**2)
>>> myAutoDiff.jacobian
1/(1-(0.5)**2)
'''
try:
new_val = np.arctanh(x.val)
new_der = ((1)/(1-x.val**2))*x.der
new_jacobian = ((1)/(1-x.val**2))*x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
if(np.abs(x.Real)==1):
real = np.inf
dual = np.inf
warnings.warn('Undefined at value', RuntimeWarning)
else:
real = np.arctanh(x.Real)
dual = x.Dual/(1-x.Real**2)
return Dual(real, dual)
except AttributeError:
try:
return Dual(arctanh(x.Real), x.Dual/(1-x.Real**2))
except AttributeError:
# Constant
return_val = np.arctanh(x)
return return_val
#--------------------------EXPONENT FAMILY----------------------------#
# Exponential
def exp(x):
''' Compute the exponential of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(10, 2)
>>> myAutoDiff = exp(x)
>>> myAutoDiff.val
22026.465794806718
>>> myAutoDiff.der
2*22026.465794806718
>>> myAutoDiff.jacobian
22026.465794806718
'''
try:
new_val = np.exp(x.val)
new_der = np.exp(x.val) * x.der
new_jacobian = np.exp(x.val) * x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.exp(x.Real), x.Dual*np.exp(x.Real))
except AttributeError:
try:
return Dual(exp(x.Real), x.Dual*exp(x.Real))
except AttributeError:
# Constant
return_val = np.exp(x)
return return_val
# natural log
def log(x):
''' Compute the natural log of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
x = AutoDiff(4, 2)
>>> myAutoDiff = log(x)
>>> myAutoDiff.val
1.3862943611198906
>>> myAutoDiff.der
0.5
>>> myAutoDiff.jacobian
0.25
'''
try:
new_val = np.log(x.val)
# Derivative not defined when x = 0
new_der = (1/(x.val*np.sum(1)))*x.der # if x.val != 0 else None
new_jacobian = (1/(x.val*np.sum(1)))*x.jacobian # if x.val != 0 else None
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
if(x.Real==0):
real = -np.inf
dual = np.inf
else:
real = np.log(x.Real)
dual = x.Dual/x.Real
return Dual(real, dual)
except AttributeError:
try:
return Dual(log(x.Real), x.Dual/x.Real)
except AttributeError:
# Constant
return_val = np.log(x)
return return_val
# log base 10
def log10(x):
''' Compute the natural log of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> X = AutoDiff(0.5, 2, 1)
>>> myAutoDiff = log(X)
>>> myAutoDiff.val
-0.3010299956639812
>>> myAutoDiff.der
1.737177927613007
>>>myAutoDiff.jacobian
0.8685889638065035
'''
try:
new_val = np.log10(x.val)
# Derivative not defined when x = 0
new_der = (1/(x.val*np.log(10)))*x.der
new_jacobian = (1/(x.val*np.log(10)))*x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
real = np.log10(x.Real)
dual = x.Dual/(x.Real*np.log(10))
return Dual(real, dual)
except AttributeError:
try:
return Dual(log(x.Real)/np.log(10), x.Dual/(x.Real*(np.log(10))))
except AttributeError:
# Constant
return_val = np.log10(x)
return return_val
# Square Root
def sqrt(x):
''' Compute the square root an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
>>> x = AutoDiff(np.array([[5]]).T, np.array([[1]]), 1, 1)
>>> myAutoDiff = sqrt(x)
>>> myAutoDiff.val
2.2360679775
>>> myAutoDiff.der
0.2236068
'''
try:
new_val = np.sqrt(x.val)
new_der = 0.5 * x.val ** (-0.5) * x.der
new_jacobian = 0.5 * x.val ** (-0.5) * x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
if x.Real < 0.0:
warnings.warn('Undefined at value', RuntimeWarning)
dual=np.nan
elif(x.Real==0):
warnings.warn('Undefined at value', RuntimeWarning)
dual = np.inf
else:
dual = 0.5 * x.Real ** (-0.5) * x.Dual
real = np.sqrt(x.Real)
return Dual(real, dual)
except AttributeError:
if x < 0.0:
warnings.warn('Undefined at value', RuntimeWarning)
return np.nan
else:
return np.sqrt(x)
# log base
def logbase(x,base):
''' Compute any log base of an AutoDiff object and its derivative.
INPUTS
======
x: an AutoDiff object
RETURNS
=======
A new AutoDiff object with calculated value and derivative.
EXAMPLES
========
x = AutoDiff(2, 2)
>>> myAutoDiff = logbase(x,7)
>>> myAutoDiff.val
0.35620719
>>> myAutoDiff.der
0.51389834
>>> myAutoDiff.jacobian
0.25694917
'''
try:
new_val = np.log(x.val)/np.log(base)
# Derivative not defined when x = 0
new_der = (1/(x.val*np.log(base)))*x.der
new_jacobian = (1/(x.val*np.log(base)))*x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
return Dual(np.log(x.Real)/np.log(base), x.Dual/(x.Real*np.log(base)))
except AttributeError:
try:
return Dual(log(x.Real)/np.log(base), x.Dual/(x.Real*(np.log(base))))
except AttributeError:
# Constant
return_val = np.log(x)/np.log(base)
return return_val
def logistic(x):
''' Compute logistic function for AutoDiff or Dual object.
INPUTS
======
x: an AutoDiff object or Dual object
RETURNS
=======
A new AutoDiff or Dual object with calculated value and derivative.
'''
try:
f_l = (1/(1+np.exp(-x.val)))
new_val = f_l
new_der = (1 - f_l)*f_l*x.der
new_jacobian = (1 - f_l)*f_l*x.jacobian
return AutoDiff(new_val, new_der, x.n, 0, new_jacobian)
except AttributeError:
try:
f_l = (1/(1 + np.exp(-x.Real)))
return Dual(f_l, (1 - f_l)*f_l*x.Dual)
except AttributeError:
try:
return Dual(logistic(x.Real), (1 - logistic(x.Real))*logistic(x.Real)*x.Dual)
except AttributeError:
# Constant
return_val = (1/(1+np.exp(-x)))
return return_val | PypiClean |
/DjangoDjangoAppCenter-0.0.11-py3-none-any.whl/DjangoAppCenter/simpleui/static/admin/simpleui-x/elementui/locale/lang/lt.js | 'use strict';
exports.__esModule = true;
exports.default = {
el: {
colorpicker: {
confirm: 'OK',
clear: 'Valyti'
},
datepicker: {
now: 'Dabar',
today: 'Šiandien',
cancel: 'Atšaukti',
clear: 'Valyti',
confirm: 'OK',
selectDate: 'Pasirink datą',
selectTime: 'Pasirink laiką',
startDate: 'Data nuo',
startTime: 'Laikas nuo',
endDate: 'Data iki',
endTime: 'Laikas iki',
prevYear: 'Metai atgal',
nextYear: 'Metai į priekį',
prevMonth: 'Mėn. atgal',
nextMonth: 'Mėn. į priekį',
year: '',
month1: 'Sausis',
month2: 'Vasaris',
month3: 'Kovas',
month4: 'Balandis',
month5: 'Gegužė',
month6: 'Birželis',
month7: 'Liepa',
month8: 'Rugpjūtis',
month9: 'Rugsėjis',
month10: 'Spalis',
month11: 'Lapkritis',
month12: 'Gruodis',
// week: 'savaitė',
weeks: {
sun: 'S.',
mon: 'Pr.',
tue: 'A.',
wed: 'T.',
thu: 'K.',
fri: 'Pn.',
sat: 'Š.'
},
months: {
jan: 'Sau',
feb: 'Vas',
mar: 'Kov',
apr: 'Bal',
may: 'Geg',
jun: 'Bir',
jul: 'Lie',
aug: 'Rugp',
sep: 'Rugs',
oct: 'Spa',
nov: 'Lap',
dec: 'Gruo'
}
},
select: {
loading: 'Kraunasi',
noMatch: 'Duomenų nerasta',
noData: 'Nėra duomenų',
placeholder: 'Pasirink'
},
cascader: {
noMatch: 'Duomenų nerasta',
loading: 'Kraunasi',
placeholder: 'Pasirink',
noData: 'Nėra duomenų'
},
pagination: {
goto: 'Eiti į',
pagesize: '/p',
total: 'Viso {total}',
pageClassifier: ''
},
messagebox: {
title: 'Žinutė',
confirm: 'OK',
cancel: 'Atšaukti',
error: 'Klaida įvestuose duomenyse'
},
upload: {
deleteTip: 'spauskite "Trinti" norėdami pašalinti',
delete: 'Trinti',
preview: 'Peržiūrėti',
continue: 'Toliau'
},
table: {
emptyText: 'Duomenų nerasta',
confirmFilter: 'Patvirtinti',
resetFilter: 'Atstatyti',
clearFilter: 'Išvalyti',
sumText: 'Suma'
},
tree: {
emptyText: 'Nėra duomenų'
},
transfer: {
noMatch: 'Duomenų nerasta',
noData: 'Nėra duomenų',
titles: ['Sąrašas 1', 'Sąrašas 2'],
filterPlaceholder: 'Įvesk raktažodį',
noCheckedFormat: 'Viso: {total}',
hasCheckedFormat: 'Pažymėta {checked} iš {total}'
},
image: {
error: 'FAILED' // to be translated
},
pageHeader: {
title: 'Back' // to be translated
}
}
}; | PypiClean |
/AuthKit-0.4.5.tar.gz/AuthKit-0.4.5/authkit/authenticate/multi.py | from paste.auth import multi
import logging
log = logging.getLogger('authkit.authenticate.multi')
class MultiHandler(multi.MultiHandler):
def __init__(self, application):
multi.MultiHandler.__init__(self, application)
self.checker = []
def add_checker(self, name, checker):
self.checker.append((checker,self.binding[name]))
def __call__(self, environ, start_response):
status_ = []
headers_ = []
exc_info_ = []
result_ = []
def app(environ, start_response):
def find(status, headers, exc_info=None):
status_.append(status)
headers_.append(headers)
exc_info_.append(exc_info)
log.debug("Status: %r, Headers: %r", status, headers)
# XXX Needs to return the writable
if not status_:
raise Exception('WSGI start_response was not called before a result'
' was returned')
result = check()
result_.append(result)
if result is None:
log.debug("Multi: No binding was found for the check")
# XXX Shouldn't this be returning the writable to the application?
writable = start_response(
status_[-1],
headers_ and headers_[-1] or [],
exc_info_[-1]
)
return writable
else:
# This application response isn't actually going to be used because
# another application was found to handle the response instead
# so it is this other application's call to start_response()
# which should actually return the writable.
class NotWritableShouldntBeUsed: pass
return NotWritableShouldntBeUsed()
return self.default(environ, find)
def logging_start_response(status, headers, exc_info=None):
log.debug("Matched binding returns status: %r, headers: %r, "
"exc_info: %r", status, headers, exc_info)
return start_response(status, headers, exc_info)
def check():
for (checker, binding) in self.predicate:
if checker(environ):
log.debug(
"MultMiddleware self.predicate check() returning %r",
binding)
environ['authkit.multi'] = True
return binding(environ, logging_start_response)
for (checker, binding) in self.checker:
if not len(status_):
raise Exception('No status was returned by the applicaiton')
if not len(headers_):
raise Exception('No headers were returned by the '
'application')
if checker(environ, status_[-1], headers_ and headers_[-1] or []):
log.debug(
"MultiMiddleware self.checker check() returning %r",
binding
)
environ['authkit.multi'] = True
environ['pylons.error_call'] = 'authkit'
environ['pylons.status_code_redirect'] = 'authkit'
return binding(environ, logging_start_response)
return None
app_iter = app(environ, start_response)
if result_ and result_[-1]:
app_iter = result_[-1]
# if not result_:
# raise Exception('Invalid WSGI response (%r), did the application return an iterable?'%result_)
# if result_[-1] is None:
# # The check failed and the initial app should be used.
# return app_iter
# else:
# # Close the unused app which we don't want
# if hasattr(app_iter, 'close'):
# app_iter.close()
# return result_[-1]
# Actually this could cause problems too.
# for data in app_iter:
# yield data
# if hasattr(app_iter, 'close'):
# app_iter.close()
# Instead, just return the result
return app_iter
#return result_[-1]
def status_checker(environ, status, headers):
"""
Used by AuthKit to intercept statuses specified in the config file
option ``authkit.intercept``.
"""
log.debug(
"Status checker recieved status %r, headers %r, intecept %r",
status,
headers,
environ['authkit.intercept']
)
if str(status[:3]) in environ['authkit.intercept']:
log.debug("Status checker returns True")
return True
log.debug("Status checker returns False")
return False
class AuthSwitcher:
def __init__(self):
pass
def __call__(self, environ, status, headers):
if status_checker(environ, status, headers):
return self.switch(environ, status, headers)
return False
def switch(self, environ, status, headers):
return False | PypiClean |
/DjAIoT-23.6.21.1.tar.gz/DjAIoT-23.6.21.1/src/djaiot/device_data/forms/forms.py | from django.contrib.admin import site
from django.contrib.admin.widgets import RelatedFieldWidgetWrapper
from django.forms import ModelChoiceField, ModelMultipleChoiceField
from django_admin_hstore_widget.widgets import HStoreFormWidget
from django_json_widget.widgets import JSONEditorWidget
from dal.autocomplete import ModelSelect2, ModelSelect2Multiple
from dal.forms import FutureModelForm
from .autocompletes import \
PhysicalDataTypeAutoComplete, \
MeasurementUnitAutoComplete, \
MachineClassAutoComplete, \
MachineComponentAutoComplete, \
MachineDataStreamAutoComplete, \
MachineFamilyAutoComplete, \
MachineSKUAutoComplete, \
LocationAutoComplete, \
MachineFamilyComponentAutoComplete
from .models import \
EnvironmentVariable, \
PhysicalDataType, \
MeasurementUnit, \
MachineClass, \
MachineComponent, \
MachineDataStream, \
MachineFamily, \
MachineSKU, \
MachineFamilyComponent, \
Location, \
Machine, \
MachineFamilyData, \
MachineFamilyDataStreamsCheck, \
MachineFamilyDataStreamProfile, \
MachineFamilyDataStreamPairCorr, \
MachineFamilyDataStreamAgg, \
MachineData, \
MachineDataStreamAgg
from .queries import \
PHYSICAL_DATA_TYPE_STR_QUERY_SET, PHYSICAL_DATA_TYPE_STR_UNORDERED_QUERY_SET, \
MEASUREMENT_UNIT_STR_UNORDERED_QUERY_SET, \
MACHINE_CLASS_STR_UNORDERED_QUERY_SET, \
MACHINE_COMPONENT_STR_SUBSET_ORDERED_QUERY_SET, MACHINE_COMPONENT_STR_UNORDERED_QUERY_SET, \
MACHINE_DATA_STREAM_STR_SUBSET_ORDERED_QUERY_SET, \
MACHINE_FAMILY_STR_SUBSET_ORDERED_QUERY_SET, MACHINE_FAMILY_STR_UNORDERED_QUERY_SET, \
MACHINE_SKU_STR_SUBSET_ORDERED_QUERY_SET, MACHINE_SKU_STR_UNORDERED_QUERY_SET, \
LOCATION_STR_UNORDERED_QUERY_SET, \
MACHINE_FAMILY_COMPONENT_STR_SUBSET_ORDERED_QUERY_SET
MACHINE_CLASS_MODEL_CHOICE_FIELD = \
ModelChoiceField(
queryset=MACHINE_CLASS_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=MachineClassAutoComplete.name),
# RelatedFieldWidgetWrapper: very hard to use
# RelatedFieldWidgetWrapper(
# widget=ModelSelect2(
# url=MachineClassAutoComplete.name),
# rel=???, # MUST BE SPECIFIC TO 1 RELATION
# admin_site=site,
# can_add_related=True,
# can_change_related=True,
# can_delete_related=True,
# can_view_related=True)
required=True)
MACHINE_COMPONENTS_MULTIPLE_CHOICE_FIELD = \
ModelMultipleChoiceField(
queryset=MACHINE_COMPONENT_STR_SUBSET_ORDERED_QUERY_SET,
widget=ModelSelect2Multiple(
url=MachineComponentAutoComplete.name,
attrs={# https://django-autocomplete-light.readthedocs.io/en/master/tutorial.html#passing-options-to-select2
'data-minimum-input-length': MachineComponentAutoComplete.data_min_input_len}),
required=False)
MACHINE_DATA_STREAMS_MULTIPLE_CHOICE_FIELD = \
ModelMultipleChoiceField(
queryset=MACHINE_DATA_STREAM_STR_SUBSET_ORDERED_QUERY_SET,
widget=ModelSelect2Multiple(
url=MachineDataStreamAutoComplete.name,
attrs={'data-minimum-input-length': MachineDataStreamAutoComplete.data_min_input_len}),
required=False)
MACHINE_FAMILIES_MODEL_MULTIPLE_CHOICE_FIELD = \
ModelMultipleChoiceField(
queryset=MACHINE_FAMILY_STR_SUBSET_ORDERED_QUERY_SET,
widget=ModelSelect2Multiple(
url=MachineFamilyAutoComplete.name,
attrs={'data-minimum-input-length': MachineFamilyAutoComplete.data_min_input_len}),
required=False)
MACHINE_SKUS_MODEL_MULTIPLE_CHOICE_FIELD = \
ModelMultipleChoiceField(
queryset=MACHINE_SKU_STR_SUBSET_ORDERED_QUERY_SET,
widget=ModelSelect2Multiple(
url=MachineSKUAutoComplete.name,
attrs={'data-minimum-input-length': MachineSKUAutoComplete.data_min_input_len}),
required=False)
class EnvironmentVariableForm(FutureModelForm):
class Meta:
model = EnvironmentVariable
fields = \
'key', \
'value'
widgets = \
dict(value=JSONEditorWidget)
class PhysicalDataTypeForm(FutureModelForm):
same = \
ModelMultipleChoiceField(
queryset=PHYSICAL_DATA_TYPE_STR_QUERY_SET,
widget=ModelSelect2Multiple(
url=PhysicalDataTypeAutoComplete.name),
required=False)
class Meta:
model = PhysicalDataType
fields = \
'unique_name', \
'min', \
'max', \
'same'
class MeasurementUnitForm(FutureModelForm):
class Meta:
model = MeasurementUnit
fields = \
'unique_name', \
'descriptions'
widgets = \
dict(descriptions=JSONEditorWidget)
class MachineClassForm(FutureModelForm):
class Meta:
model = MachineClass
fields = \
'unique_name', \
'descriptions'
widgets = \
dict(descriptions=JSONEditorWidget)
class MachineComponentForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
# BELOW FIELDS ARE READ-ONLY IN ADMIN
# machine_families = MACHINE_FAMILIES_MODEL_MULTIPLE_CHOICE_FIELD
# directly_interacting_components = MACHINE_COMPONENTS_MULTIPLE_CHOICE_FIELD
# sub_components = MACHINE_COMPONENTS_MULTIPLE_CHOICE_FIELD
# machine_data_streams = MACHINE_DATA_STREAMS_MULTIPLE_CHOICE_FIELD
machine_skus = MACHINE_SKUS_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = MachineComponent
fields = \
'machine_class', \
'name', \
'descriptions', \
'directly_interacting_components', \
'sub_components', \
'machine_data_streams', \
'machine_skus'
widgets = \
dict(descriptions=JSONEditorWidget)
class MachineDataStreamForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
# BELOW FIELDS ARE READ-ONLY IN ADMIN
# machine_components = MACHINE_COMPONENTS_MULTIPLE_CHOICE_FIELD
physical_data_type = \
ModelChoiceField(
queryset=PHYSICAL_DATA_TYPE_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=PhysicalDataTypeAutoComplete.name),
required=False)
measurement_unit = \
ModelChoiceField(
queryset=MEASUREMENT_UNIT_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=MeasurementUnitAutoComplete.name),
required=False)
machine_skus = MACHINE_SKUS_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = MachineDataStream
fields = \
'machine_class', \
'name', \
'descriptions', \
'machine_data_stream_type', \
'logical_data_type', \
'physical_data_type', \
'measurement_unit', \
'neg_invalid', \
'pos_invalid', \
'default', \
'range', \
'machine_components', \
'machine_skus'
widgets = \
dict(descriptions=JSONEditorWidget)
class MachineFamilyForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
filtered_from_machine_family = \
ModelChoiceField(
queryset=MACHINE_FAMILY_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=MachineFamilyAutoComplete.name),
required=False)
machine_skus = MACHINE_SKUS_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = MachineFamily
fields = \
'machine_class', \
'unique_name', \
'descriptions', \
'filtered_from_machine_family', \
'machine_data_filter_condition', \
'machine_skus', \
'machine_data_streams'
widgets = \
dict(descriptions=JSONEditorWidget)
class MachineSKUForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
machine_components = MACHINE_COMPONENTS_MULTIPLE_CHOICE_FIELD
machine_data_streams = MACHINE_DATA_STREAMS_MULTIPLE_CHOICE_FIELD
machine_families = MACHINE_FAMILIES_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = MachineSKU
fields = \
'machine_class' ,\
'unique_name', \
'descriptions', \
'machine_components', \
'machine_data_streams', \
'machine_families'
widgets = \
dict(descriptions=JSONEditorWidget)
class LocationForm(FutureModelForm):
class Meta:
model = Location
fields = \
'unique_name', \
'descriptions', \
'info'
widgets = \
dict(descriptions=JSONEditorWidget,
info=JSONEditorWidget)
MACHINE_SKU_MODEL_CHOICE_FIELD = \
ModelChoiceField(
queryset=MACHINE_SKU_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=MachineSKUAutoComplete.name,
attrs={'data-minimum-input-length': MachineSKUAutoComplete.data_min_input_len}),
required=False)
LOCATION_MODEL_CHOICE_FIELD = \
ModelChoiceField(
queryset=LOCATION_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=LocationAutoComplete.name,
attrs={'data-minimum-input-length': LocationAutoComplete.data_min_input_len}),
required=False)
class MachineForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
machine_sku = MACHINE_SKU_MODEL_CHOICE_FIELD
location = LOCATION_MODEL_CHOICE_FIELD
machine_families = MACHINE_FAMILIES_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = Machine
fields = \
'machine_class', \
'machine_sku', \
'unique_id', \
'info', \
'machine_families', \
'location'
widgets = \
dict(info=JSONEditorWidget)
class LocationMachineInLineForm(FutureModelForm):
machine_class = MACHINE_CLASS_MODEL_CHOICE_FIELD
machine_sku = MACHINE_SKU_MODEL_CHOICE_FIELD
machine_families = MACHINE_FAMILIES_MODEL_MULTIPLE_CHOICE_FIELD
class Meta:
model = Machine
fields = \
'machine_class', \
'machine_sku', \
'unique_id', \
'info', \
'machine_families'
widgets = \
dict(info=JSONEditorWidget)
MACHINE_FAMILY_COMPONENTS_MULTIPLE_CHOICE_FIELD = \
ModelMultipleChoiceField(
queryset=MACHINE_FAMILY_COMPONENT_STR_SUBSET_ORDERED_QUERY_SET,
widget=ModelSelect2Multiple(
url=MachineFamilyComponentAutoComplete.name,
attrs={'data-minimum-input-length': MachineFamilyComponentAutoComplete.data_min_input_len}),
required=False)
class MachineFamilyComponentInLineForm(FutureModelForm):
machine_component = \
ModelChoiceField(
queryset=MACHINE_COMPONENT_STR_UNORDERED_QUERY_SET,
widget=ModelSelect2(
url=MachineComponentAutoComplete.name,
attrs={'data-minimum-input-length': MachineComponentAutoComplete.data_min_input_len}),
required=True)
directly_interacting_components = MACHINE_FAMILY_COMPONENTS_MULTIPLE_CHOICE_FIELD
sub_components = MACHINE_FAMILY_COMPONENTS_MULTIPLE_CHOICE_FIELD
machine_data_streams = MACHINE_DATA_STREAMS_MULTIPLE_CHOICE_FIELD
class Meta:
model = MachineFamilyComponent
fields = \
'machine_component', \
'directly_interacting_components', \
'sub_components', \
'machine_data_streams'
class MachineFamilyDataForm(FutureModelForm):
class Meta:
model = MachineFamilyData
fields = \
'machine_family', \
'date', \
'url', \
'n_cols', \
'n_rows', \
'schema'
widgets = \
dict(schema=HStoreFormWidget)
class MachineFamilyDataStreamsCheckForm(FutureModelForm):
class Meta:
model = MachineFamilyDataStreamsCheck
fields = \
'machine_family_data', \
'machine_data_stream_names_not_in_db', \
'machine_data_stream_names_not_on_disk'
class MachineFamilyDataStreamProfileForm(FutureModelForm):
class Meta:
model = MachineFamilyDataStreamProfile
fields = \
'machine_family_data', \
'data_to_date', \
'machine_data_stream', \
'n_samples', \
'valid_fraction', \
'n_distinct_values', \
'distinct_value_proportions', \
'strictly_outlier_robust_proportion', \
'min', \
'robust_min', \
'quartile', \
'median', \
'_3rd_quartile', \
'robust_max', \
'max'
class MachineFamilyDataStreamPairCorrForm(FutureModelForm):
class Meta:
model = MachineFamilyDataStreamPairCorr
fields = \
'machine_family_data', \
'data_to_date', \
'machine_data_stream', \
'other_machine_data_stream', \
'corr', \
'n_samples', \
'machine_data_stream_range', \
'other_machine_data_stream_range'
class MachineFamilyDataStreamAggForm(FutureModelForm):
class Meta:
model = MachineFamilyDataStreamAgg
fields = \
'machine_family_data', \
'machine_data_stream', \
'count_incl_invalid', \
'counts_incl_invalid', \
'distinct_value_counts_incl_invalid', \
'distinct_value_proportions_incl_invalid', \
'count_excl_invalid', \
'counts_excl_invalid', \
'distinct_value_proportions_excl_invalid', \
'weighted_average_min', \
'weighted_average_robust_min', \
'weighted_average_quartile', \
'weighted_average_median', \
'weighted_average_mean', \
'weighted_average_3rd_quartile', \
'weighted_average_robust_max', \
'weighted_average_max'
class MachineDataForm(FutureModelForm):
class Meta:
model = MachineData
fields = \
'machine', \
'date', \
'url', \
'n_cols', \
'n_rows', \
'schema'
widgets = \
dict(schema=HStoreFormWidget)
class MachineDataStreamAggForm(FutureModelForm):
class Meta:
model = MachineDataStreamAgg
fields = \
'machine', \
'machine_family_data_stream_agg', \
'count_incl_invalid', \
'distinct_value_counts_incl_invalid', \
'distinct_value_proportions_incl_invalid', \
'count_excl_invalid', \
'distinct_value_proportions_excl_invalid', \
'min', \
'robust_min', \
'quartile', \
'median', \
'mean', \
'_3rd_quartile', \
'robust_max', \
'max' | PypiClean |
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/reading_import_profile_response_py3.py |
from msrest.serialization import Model
class ReadingImportProfileResponse(Model):
"""ReadingImportProfileResponse.
:param profile_id: The profile Identifier
:type profile_id: int
:param profile_code: The profile code
:type profile_code: str
:param created_by:
:type created_by: ~energycap.sdk.models.UserChild
:param meter_import_id_column_number: The number of the column that holds
the Meter import identifier
:type meter_import_id_column_number: int
:param channel_import_id_column_number: The number of the column that
holds the Channel import identifier
:type channel_import_id_column_number: int
:param number_of_columns: The total number of columns in the import sheet
:type number_of_columns: int
:param channel_interval_in_seconds: The interval of the readings
:type channel_interval_in_seconds: int
:param delimiter: The string that represents how the file contents are
delimited. Valid options are "\\t" for tab, " " for space and "," for
comma.
:type delimiter: str
:param number_of_header_rows: Number of header rows before the data begins
:type number_of_header_rows: int
:param timestamp_column_number: The number of the column that holds the
timestamp
:type timestamp_column_number: int
:param timestamp_format: The format for the timestamp of the readings. An
example is MM/dd/yyyy mm:hh:ss:zzz
:type timestamp_format: str
:param date_column_number: The number of the column that holds the date
:type date_column_number: int
:param time_column_number: The number of the column that holds the time
:type time_column_number: int
:param date_format: The format for the date of the readings. An example is
MM/dd/yyyy
:type date_format: str
:param time_format: The format for the time of the readings. An example is
mm:hh:ss:zzz
:type time_format: str
:param time_zone:
:type time_zone: ~energycap.sdk.models.TimeZoneChild
:param data_mapping: A list of columns from the import sheet with their
observation type and unit
:type data_mapping: list[~energycap.sdk.models.ReadingImportProfileColumn]
:param estimated:
:type estimated: ~energycap.sdk.models.Estimated
:param note_column_number: Column number that holds a note to be stored
with the reading
:type note_column_number: int
"""
_attribute_map = {
'profile_id': {'key': 'profileId', 'type': 'int'},
'profile_code': {'key': 'profileCode', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'UserChild'},
'meter_import_id_column_number': {'key': 'meterImportIdColumnNumber', 'type': 'int'},
'channel_import_id_column_number': {'key': 'channelImportIdColumnNumber', 'type': 'int'},
'number_of_columns': {'key': 'numberOfColumns', 'type': 'int'},
'channel_interval_in_seconds': {'key': 'channelIntervalInSeconds', 'type': 'int'},
'delimiter': {'key': 'delimiter', 'type': 'str'},
'number_of_header_rows': {'key': 'numberOfHeaderRows', 'type': 'int'},
'timestamp_column_number': {'key': 'timestampColumnNumber', 'type': 'int'},
'timestamp_format': {'key': 'timestampFormat', 'type': 'str'},
'date_column_number': {'key': 'dateColumnNumber', 'type': 'int'},
'time_column_number': {'key': 'timeColumnNumber', 'type': 'int'},
'date_format': {'key': 'dateFormat', 'type': 'str'},
'time_format': {'key': 'timeFormat', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'TimeZoneChild'},
'data_mapping': {'key': 'dataMapping', 'type': '[ReadingImportProfileColumn]'},
'estimated': {'key': 'estimated', 'type': 'Estimated'},
'note_column_number': {'key': 'noteColumnNumber', 'type': 'int'},
}
def __init__(self, *, profile_id: int=None, profile_code: str=None, created_by=None, meter_import_id_column_number: int=None, channel_import_id_column_number: int=None, number_of_columns: int=None, channel_interval_in_seconds: int=None, delimiter: str=None, number_of_header_rows: int=None, timestamp_column_number: int=None, timestamp_format: str=None, date_column_number: int=None, time_column_number: int=None, date_format: str=None, time_format: str=None, time_zone=None, data_mapping=None, estimated=None, note_column_number: int=None, **kwargs) -> None:
super(ReadingImportProfileResponse, self).__init__(**kwargs)
self.profile_id = profile_id
self.profile_code = profile_code
self.created_by = created_by
self.meter_import_id_column_number = meter_import_id_column_number
self.channel_import_id_column_number = channel_import_id_column_number
self.number_of_columns = number_of_columns
self.channel_interval_in_seconds = channel_interval_in_seconds
self.delimiter = delimiter
self.number_of_header_rows = number_of_header_rows
self.timestamp_column_number = timestamp_column_number
self.timestamp_format = timestamp_format
self.date_column_number = date_column_number
self.time_column_number = time_column_number
self.date_format = date_format
self.time_format = time_format
self.time_zone = time_zone
self.data_mapping = data_mapping
self.estimated = estimated
self.note_column_number = note_column_number | PypiClean |
/NESTML-5.3.0-py3-none-any.whl/pynestml/transformers/synapse_post_neuron_transformer.py |
from __future__ import annotations
from typing import Any, Sequence, Mapping, Optional, Union
from pynestml.frontend.frontend_configuration import FrontendConfiguration
from pynestml.meta_model.ast_equations_block import ASTEquationsBlock
from pynestml.meta_model.ast_inline_expression import ASTInlineExpression
from pynestml.meta_model.ast_neuron_or_synapse import ASTNeuronOrSynapse
from pynestml.meta_model.ast_node import ASTNode
from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression
from pynestml.meta_model.ast_variable import ASTVariable
from pynestml.symbols.symbol import SymbolKind
from pynestml.symbols.variable_symbol import BlockType
from pynestml.transformers.transformer import Transformer
from pynestml.utils.ast_utils import ASTUtils
from pynestml.utils.logger import Logger
from pynestml.utils.logger import LoggingLevel
from pynestml.utils.string_utils import removesuffix
from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor
from pynestml.visitors.ast_higher_order_visitor import ASTHigherOrderVisitor
from pynestml.visitors.ast_visitor import ASTVisitor
class SynapsePostNeuronTransformer(Transformer):
r"""In a (pre neuron, synapse, post neuron) tuple, process (synapse, post_neuron) to move all variables that are only triggered by postsynaptic events to the postsynaptic neuron."""
_default_options = {
"neuron_synapse_pairs": []
}
def __init__(self, options: Optional[Mapping[str, Any]] = None):
super(Transformer, self).__init__(options)
def is_special_port(self, special_type: str, port_name: str, neuron_name: str, synapse_name: str) -> bool:
"""
Check if a port by the given name is specified as connecting to the postsynaptic neuron. Only makes sense
for synapses.
"""
assert special_type in ["post", "vt"]
if not "neuron_synapse_pairs" in self._options.keys():
return False
for neuron_synapse_pair in self._options["neuron_synapse_pairs"]:
if not (neuron_name in [neuron_synapse_pair["neuron"], neuron_synapse_pair["neuron"] + FrontendConfiguration.suffix]
and synapse_name in [neuron_synapse_pair["synapse"], neuron_synapse_pair["synapse"] + FrontendConfiguration.suffix]):
continue
if not special_type + "_ports" in neuron_synapse_pair.keys():
return False
post_ports = neuron_synapse_pair[special_type + "_ports"]
if not isinstance(post_ports, list):
# only one port name given, not a list
return port_name == post_ports
for post_port in post_ports:
if type(post_port) is not str and len(post_port) == 2: # (syn_port_name, neuron_port_name) tuple
post_port = post_port[0]
if type(post_port) is not str and len(post_port) == 1: # (syn_port_name)
return post_port[0] == port_name
if port_name == post_port:
return True
return False
def is_continuous_port(self, port_name: str, parent_node: ASTNeuronOrSynapse):
for input_block in parent_node.get_input_blocks():
for port in input_block.get_input_ports():
if port.is_continuous() and port_name == port.get_name():
return True
return False
def is_post_port(self, port_name: str, neuron_name: str, synapse_name: str) -> bool:
return self.is_special_port("post", port_name, neuron_name, synapse_name)
def is_vt_port(self, port_name: str, neuron_name: str, synapse_name: str) -> bool:
return self.is_special_port("vt", port_name, neuron_name, synapse_name)
def get_spiking_post_port_names(self, synapse, neuron_name: str, synapse_name: str):
post_port_names = []
for input_block in synapse.get_input_blocks():
for port in input_block.get_input_ports():
if self.is_post_port(port.name, neuron_name, synapse_name) and port.is_spike():
post_port_names.append(port.get_name())
return post_port_names
def get_post_port_names(self, synapse, neuron_name: str, synapse_name: str):
post_port_names = []
for input_block in synapse.get_input_blocks():
for port in input_block.get_input_ports():
if self.is_post_port(port.name, neuron_name, synapse_name):
post_port_names.append(port.get_name())
return post_port_names
def get_vt_port_names(self, synapse, neuron_name: str, synapse_name: str):
post_port_names = []
for input_block in synapse.get_input_blocks():
for port in input_block.get_input_ports():
if self.is_vt_port(port.name, neuron_name, synapse_name):
post_port_names.append(port.get_name())
return post_port_names
def get_neuron_var_name_from_syn_port_name(self, port_name: str, neuron_name: str, synapse_name: str) -> Optional[str]:
"""
Check if a port by the given name is specified as connecting to the postsynaptic neuron. Only makes sense for synapses.
"""
if not "neuron_synapse_pairs" in self._options.keys():
return False
for neuron_synapse_pair in self._options["neuron_synapse_pairs"]:
if not (neuron_name in [neuron_synapse_pair["neuron"], neuron_synapse_pair["neuron"] + FrontendConfiguration.suffix]
and synapse_name in [neuron_synapse_pair["synapse"], neuron_synapse_pair["synapse"] + FrontendConfiguration.suffix]):
continue
if not "post_ports" in neuron_synapse_pair.keys():
return None
post_ports = neuron_synapse_pair["post_ports"]
for post_port in post_ports:
if type(post_port) is not str and len(post_port) == 2: # (syn_port_name, neuron_var_name) tuple
if port_name == post_port[0]:
return post_port[1]
return None
return None
def get_convolve_with_not_post_vars(self, nodes: Union[ASTEquationsBlock, Sequence[ASTEquationsBlock]], neuron_name: str, synapse_name: str, parent_node: ASTNode):
class ASTVariablesUsedInConvolutionVisitor(ASTVisitor):
_variables = []
def __init__(self, node: ASTNode, parent_node: ASTNode, codegen_class):
super(ASTVariablesUsedInConvolutionVisitor, self).__init__()
self.node = node
self.parent_node = parent_node
self.codegen_class = codegen_class
def visit_function_call(self, node):
func_name = node.get_name()
if func_name == "convolve":
symbol_buffer = node.get_scope().resolve_to_symbol(str(node.get_args()[1]),
SymbolKind.VARIABLE)
input_port = ASTUtils.get_input_port_by_name(
self.parent_node.get_input_blocks(), symbol_buffer.name)
if input_port and not self.codegen_class.is_post_port(input_port.name, neuron_name, synapse_name):
kernel_name = node.get_args()[0].get_variable().name
self._variables.append(kernel_name)
found_parent_assignment = False
node_ = node
while not found_parent_assignment:
node_ = self.parent_node.get_parent(node_)
# XXX TODO also needs to accept normal ASTExpression, ASTAssignment?
if isinstance(node_, ASTInlineExpression):
found_parent_assignment = True
var_name = node_.get_variable_name()
self._variables.append(var_name)
if not nodes:
return []
if isinstance(nodes, ASTNode):
nodes = [nodes]
variables = []
for node in nodes:
visitor = ASTVariablesUsedInConvolutionVisitor(node, parent_node, self)
node.accept(visitor)
variables.extend(visitor._variables)
return variables
def get_all_variables_assigned_to(self, node):
class ASTAssignedToVariablesFinderVisitor(ASTVisitor):
_variables = []
def __init__(self, synapse):
super(ASTAssignedToVariablesFinderVisitor, self).__init__()
self.synapse = synapse
def visit_assignment(self, node):
symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(), SymbolKind.VARIABLE)
assert symbol is not None # should have been checked in a CoCo before
self._variables.append(symbol)
if node is None:
return []
visitor = ASTAssignedToVariablesFinderVisitor(node)
node.accept(visitor)
return [v.name for v in visitor._variables]
def transform_neuron_synapse_pair_(self, neuron, synapse):
r"""
"Co-generation" or in-tandem generation of neuron and synapse code.
Does not modify existing neurons or synapses, but returns lists with additional elements representing new pair neuron and synapse
"""
new_neuron = neuron.clone()
new_synapse = synapse.clone()
assert len(new_neuron.get_equations_blocks()) <= 1, "Only one equations block per neuron supported for now."
assert len(new_synapse.get_equations_blocks()) <= 1, "Only one equations block per synapse supported for now."
assert len(new_neuron.get_state_blocks()) <= 1, "Only one state block supported per neuron for now."
assert len(new_synapse.get_state_blocks()) <= 1, "Only one state block supported per synapse for now."
assert len(new_neuron.get_update_blocks()) <= 1, "Only one update block supported per neuron for now."
assert len(new_synapse.get_update_blocks()) <= 1, "Only one update block supported per synapse for now."
#
# suffix for variables that will be transferred to neuron
#
var_name_suffix = "__for_" + synapse.get_name()
#
# determine which variables and dynamics in synapse can be transferred to neuron
#
all_state_vars = ASTUtils.all_variables_defined_in_block(synapse.get_state_blocks()[0])
all_state_vars = [var.get_complete_name() for var in all_state_vars]
# add names of convolutions
all_state_vars += ASTUtils.get_all_variables_used_in_convolutions(synapse.get_equations_blocks(), synapse)
# add names of kernels
kernel_buffers = ASTUtils.generate_kernel_buffers_(synapse, synapse.get_equations_blocks())
all_state_vars += [var.name for k in kernel_buffers for var in k[0].variables]
# if any variable is assigned to in any block that is not connected to a postsynaptic port
strictly_synaptic_vars = []
for input_block in new_synapse.get_input_blocks():
for port in input_block.get_input_ports():
if not self.is_post_port(port.name, neuron.name, synapse.name):
strictly_synaptic_vars += self.get_all_variables_assigned_to(
synapse.get_on_receive_block(port.name))
for update_block in synapse.get_update_blocks():
strictly_synaptic_vars += self.get_all_variables_assigned_to(update_block)
convolve_with_not_post_vars = self.get_convolve_with_not_post_vars(
synapse.get_equations_blocks(), neuron.name, synapse.name, synapse)
syn_to_neuron_state_vars = list(set(all_state_vars) - (set(strictly_synaptic_vars) | set(convolve_with_not_post_vars)))
Logger.log_message(None, -1, "State variables that will be moved from synapse to neuron: " + str(syn_to_neuron_state_vars),
None, LoggingLevel.INFO)
#
# collect all the variable/parameter/kernel/function/etc. names used in defining expressions of `syn_to_neuron_state_vars`
#
recursive_vars_used = ASTUtils.recursive_dependent_variables_search(syn_to_neuron_state_vars, synapse)
new_neuron.recursive_vars_used = recursive_vars_used
new_neuron._transferred_variables = [neuron_state_var + var_name_suffix
for neuron_state_var in syn_to_neuron_state_vars
if new_synapse.get_kernel_by_name(neuron_state_var) is None]
#
# collect all the parameters
#
all_declared_params = [s.get_variables() for s in new_synapse.get_parameters_blocks()[0].get_declarations()]
all_declared_params = sum(all_declared_params, [])
all_declared_params = [var.name for var in all_declared_params]
syn_to_neuron_params = [v for v in recursive_vars_used if v in all_declared_params]
# parameters used in the declarations of the state variables
vars_used = []
for var in syn_to_neuron_state_vars:
decls = ASTUtils.get_declarations_from_block(var, neuron.get_state_blocks()[0])
for decl in decls:
if decl.has_expression():
vars_used.extend(ASTUtils.collect_variable_names_in_expression(decl.get_expression()))
# parameters used in equations
for equations_block in neuron.get_equations_blocks():
vars_used.extend(ASTUtils.collects_vars_used_in_equation(var, equations_block))
syn_to_neuron_params.extend([var for var in vars_used if var in all_declared_params])
Logger.log_message(None, -1, "Parameters that will be copied from synapse to neuron: " + str(syn_to_neuron_params),
None, LoggingLevel.INFO)
#
# collect all the internal parameters
#
# XXX: TODO
#
# move state variable declarations from synapse to neuron
#
for state_var in syn_to_neuron_state_vars:
decls = ASTUtils.move_decls(state_var,
neuron.get_state_blocks()[0],
synapse.get_state_blocks()[0],
var_name_suffix,
block_type=BlockType.STATE)
ASTUtils.add_suffix_to_variable_names(decls, var_name_suffix)
#
# move defining equations for variables from synapse to neuron
#
if not new_synapse.get_equations_blocks():
ASTUtils.create_equations_block(new_synapse)
if not new_neuron.get_equations_blocks():
ASTUtils.create_equations_block(new_neuron)
for state_var in syn_to_neuron_state_vars:
Logger.log_message(None, -1, "Moving state var defining equation(s) " + str(state_var),
None, LoggingLevel.INFO)
decls = ASTUtils.equations_from_block_to_block(state_var,
new_synapse.get_equations_blocks()[0],
new_neuron.get_equations_blocks()[0],
var_name_suffix,
mode="move")
ASTUtils.add_suffix_to_variable_names(decls, var_name_suffix)
#
# move initial values for equations
#
for state_var in syn_to_neuron_state_vars:
Logger.log_message(None, -1, "Moving state variables for equation(s) " + str(state_var),
None, LoggingLevel.INFO)
ASTUtils.move_decls(var_name=state_var,
from_block=new_synapse.get_state_blocks()[0],
to_block=new_neuron.get_state_blocks()[0],
var_name_suffix=var_name_suffix,
block_type=BlockType.STATE,
mode="move")
#
# mark variables in the neuron pertaining to synapse postsynaptic ports
#
# convolutions with them ultimately yield variable updates when post neuron calls emit_spike()
#
def mark_post_ports(neuron, synapse, mark_node):
post_ports = []
def mark_post_port(_expr=None):
var = None
if isinstance(_expr, ASTSimpleExpression) and _expr.is_variable():
var = _expr.get_variable()
elif isinstance(_expr, ASTVariable):
var = _expr
if var:
var_base_name = var.name[:-len(var_name_suffix)] # prune the suffix
if self.is_post_port(var_base_name, neuron.name, synapse.name):
post_ports.append(var)
var._is_post_port = True
mark_node.accept(ASTHigherOrderVisitor(lambda x: mark_post_port(x)))
return post_ports
mark_post_ports(new_neuron, new_synapse, new_neuron)
#
# move statements in post receive block from synapse to ``new_neuron.moved_spike_updates``
#
vars_used = []
new_neuron.moved_spike_updates = []
spiking_post_port_names = self.get_spiking_post_port_names(synapse, neuron.name, synapse.name)
assert len(spiking_post_port_names) <= 1, "Can only handle one spiking \"post\" port"
if len(spiking_post_port_names) > 0:
post_port_name = spiking_post_port_names[0]
post_receive_block = new_synapse.get_on_receive_block(post_port_name)
assert post_receive_block is not None
for state_var in syn_to_neuron_state_vars:
Logger.log_message(None, -1, "Moving onPost updates for " + str(state_var), None, LoggingLevel.INFO)
stmts = ASTUtils.get_statements_from_block(state_var, post_receive_block)
if stmts:
Logger.log_message(None, -1, "Moving state var updates for " + state_var
+ " from synapse to neuron", None, LoggingLevel.INFO)
for stmt in stmts:
vars_used.extend(ASTUtils.collect_variable_names_in_expression(stmt))
post_receive_block.block.stmts.remove(stmt)
ASTUtils.add_suffix_to_decl_lhs(stmt, suffix=var_name_suffix)
ASTUtils.add_suffix_to_variable_names(stmt, var_name_suffix)
stmt.update_scope(new_neuron.get_update_blocks()[0].get_scope())
stmt.accept(ASTSymbolTableVisitor())
new_neuron.moved_spike_updates.append(stmt)
vars_used = list(set([v.name for v in vars_used]))
syn_to_neuron_params.extend([v for v in vars_used if v in [p + var_name_suffix for p in all_declared_params]])
#
# replace ``continuous`` type input ports that are connected to postsynaptic neuron with suffixed external variable references
#
Logger.log_message(
None, -1, "In synapse: replacing ``continuous`` type input ports that are connected to postsynaptic neuron with suffixed external variable references", None, LoggingLevel.INFO)
post_connected_continuous_input_ports = []
post_variable_names = []
for input_block in synapse.get_input_blocks():
for port in input_block.get_input_ports():
if self.is_post_port(port.get_name(), neuron.name, synapse.name) and self.is_continuous_port(port.get_name(), synapse):
post_connected_continuous_input_ports.append(port.get_name())
post_variable_names.append(self.get_neuron_var_name_from_syn_port_name(
port.get_name(), neuron.name, synapse.name))
for state_var, alternate_name in zip(post_connected_continuous_input_ports, post_variable_names):
Logger.log_message(None, -1, "\t• Replacing variable " + str(state_var), None, LoggingLevel.INFO)
ASTUtils.replace_with_external_variable(state_var, new_synapse, "",
new_synapse.get_equations_blocks()[0], alternate_name)
#
# copy parameters
#
Logger.log_message(None, -1, "Copying parameters from synapse to neuron...", None, LoggingLevel.INFO)
for param_var in syn_to_neuron_params:
Logger.log_message(None, -1, "\tCopying parameter with name " + str(param_var)
+ " from synapse to neuron", None, LoggingLevel.INFO)
decls = ASTUtils.move_decls(param_var,
new_synapse.get_parameters_blocks()[0],
new_neuron.get_parameters_blocks()[0],
var_name_suffix,
block_type=BlockType.PARAMETERS,
mode="copy")
#
# add suffix to variables in spike updates
#
Logger.log_message(
None, -1, "Adding suffix to variables in spike updates", None, LoggingLevel.INFO)
for stmt in new_neuron.moved_spike_updates:
for param_var in syn_to_neuron_params:
param_var = str(param_var)
ASTUtils.add_suffix_to_variable_name(param_var, stmt, var_name_suffix, scope=new_neuron.get_update_blocks()[0].get_scope())
#
# replace occurrences of the variables in expressions in the original synapse with calls to the corresponding neuron getters
#
Logger.log_message(
None, -1, "In synapse: replacing variables with suffixed external variable references", None, LoggingLevel.INFO)
for state_var in syn_to_neuron_state_vars:
Logger.log_message(None, -1, "\t• Replacing variable " + str(state_var), None, LoggingLevel.INFO)
ASTUtils.replace_with_external_variable(
state_var, new_synapse, var_name_suffix, new_neuron.get_equations_blocks()[0])
#
# rename neuron
#
name_separator_str = "__with_"
new_neuron_name = neuron.get_name() + name_separator_str + synapse.get_name()
new_neuron.set_name(new_neuron_name)
new_neuron.paired_synapse = new_synapse
#
# rename synapse
#
new_synapse_name = synapse.get_name() + name_separator_str + neuron.get_name()
new_synapse.set_name(new_synapse_name)
new_synapse.paired_neuron = new_neuron
new_neuron.paired_synapse = new_synapse
base_neuron_name = removesuffix(neuron.get_name(), FrontendConfiguration.suffix)
base_synapse_name = removesuffix(synapse.get_name(), FrontendConfiguration.suffix)
new_synapse.post_port_names = self.get_post_port_names(synapse, base_neuron_name, base_synapse_name)
new_synapse.spiking_post_port_names = self.get_spiking_post_port_names(synapse, base_neuron_name, base_synapse_name)
new_synapse.vt_port_names = self.get_vt_port_names(synapse, base_neuron_name, base_synapse_name)
#
# add modified versions of neuron and synapse to list
#
new_neuron.accept(ASTSymbolTableVisitor())
new_synapse.accept(ASTSymbolTableVisitor())
ASTUtils.update_blocktype_for_common_parameters(new_synapse)
Logger.log_message(None, -1, "Successfully constructed neuron-synapse pair "
+ new_neuron.name + ", " + new_synapse.name, None, LoggingLevel.INFO)
return new_neuron, new_synapse
def transform(self, models: Union[ASTNode, Sequence[ASTNode]]) -> Union[ASTNode, Sequence[ASTNode]]:
for neuron_synapse_pair in self.get_option("neuron_synapse_pairs"):
neuron_name = neuron_synapse_pair["neuron"]
synapse_name = neuron_synapse_pair["synapse"]
neuron = ASTUtils.find_model_by_name(neuron_name + FrontendConfiguration.suffix, models)
if neuron is None:
raise Exception("Neuron used in pair (\"" + neuron_name + "\") not found") # XXX: log error
synapse = ASTUtils.find_model_by_name(synapse_name + FrontendConfiguration.suffix, models)
if synapse is None:
raise Exception("Synapse used in pair (\"" + synapse_name + "\") not found") # XXX: log error
new_neuron, new_synapse = self.transform_neuron_synapse_pair_(neuron, synapse)
# Replace the original synapse model with the co-generated one
model_idx = models.index(synapse)
models[model_idx] = new_synapse
models.append(new_neuron)
return models | PypiClean |
/EnergySystemModels-0.1.17.post63-py3-none-any.whl/NodeEditor/nodeeditor/node_graphics_node.py | from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
class QDMGraphicsNode(QGraphicsItem):
"""Class describing Graphics representation of :class:`~nodeeditor.node_node.Node`"""
def __init__(self, node:'Node', parent:QWidget=None):
"""
:param node: reference to :class:`~nodeeditor.node_node.Node`
:type node: :class:`~nodeeditor.node_node.Node`
:param parent: parent widget
:type parent: QWidget
:Instance Attributes:
- **node** - reference to :class:`~nodeeditor.node_node.Node`
"""
super().__init__(parent)
self.node = node
# init our flags
self.hovered = False
self._was_moved = False
self._last_selected_state = False
self.initSizes()
self.initAssets()
self.initUI()
@property
def content(self):
"""Reference to `Node Content`"""
return self.node.content if self.node else None
@property
def title(self):
"""title of this `Node`
:getter: current Graphics Node title
:setter: stores and make visible the new title
:type: str
"""
return self._title
@title.setter
def title(self, value):
self._title = value
self.title_item.setPlainText(self._title)
def initUI(self):
"""Set up this ``QGraphicsItem``"""
self.setFlag(QGraphicsItem.ItemIsSelectable)
self.setFlag(QGraphicsItem.ItemIsMovable)
self.setAcceptHoverEvents(True)
# init title
self.initTitle()
self.title = self.node.title
self.initContent()
def initSizes(self):
"""Set up internal attributes like `width`, `height`, etc."""
self.width = 180
self.height = 140
self.edge_roundness = 10.0
self.edge_padding = 10.0
self.title_height = 24.0
self.title_horizontal_padding = 4.0
self.title_vertical_padding = 4.0
def initAssets(self):
"""Initialize ``QObjects`` like ``QColor``, ``QPen`` and ``QBrush``"""
self._title_color = Qt.white
self._title_font = QFont("Ubuntu", 10)
self._color = QColor("#7F000000")
self._color_selected = QColor("#FFFFA637")
self._color_hovered = QColor("#FF37A6FF")
self._pen_default = QPen(self._color)
self._pen_default.setWidthF(2.0)
self._pen_selected = QPen(self._color_selected)
self._pen_selected.setWidthF(2.0)
self._pen_hovered = QPen(self._color_hovered)
self._pen_hovered.setWidthF(3.0)
self._brush_title = QBrush(QColor("#FF313131"))
self._brush_background = QBrush(QColor("#E3212121"))
def onSelected(self):
"""Our event handling when the node was selected"""
self.node.scene.grScene.itemSelected.emit()
def doSelect(self, new_state=True):
"""Safe version of selecting the `Graphics Node`. Takes care about the selection state flag used internally
:param new_state: ``True`` to select, ``False`` to deselect
:type new_state: ``bool``
"""
self.setSelected(new_state)
self._last_selected_state = new_state
if new_state: self.onSelected()
def mouseMoveEvent(self, event):
"""Overriden event to detect that we moved with this `Node`"""
super().mouseMoveEvent(event)
# optimize me! just update the selected nodes
for node in self.scene().scene.nodes:
if node.grNode.isSelected():
node.updateConnectedEdges()
self._was_moved = True
def mouseReleaseEvent(self, event):
"""Overriden event to handle when we moved, selected or deselected this `Node`"""
super().mouseReleaseEvent(event)
# handle when grNode moved
if self._was_moved:
self._was_moved = False
self.node.scene.history.storeHistory("Node moved", setModified=True)
self.node.scene.resetLastSelectedStates()
self.doSelect() # also trigger itemSelected when node was moved
# we need to store the last selected state, because moving does also select the nodes
self.node.scene._last_selected_items = self.node.scene.getSelectedItems()
# now we want to skip storing selection
return
# handle when grNode was clicked on
if self._last_selected_state != self.isSelected() or self.node.scene._last_selected_items != self.node.scene.getSelectedItems():
self.node.scene.resetLastSelectedStates()
self._last_selected_state = self.isSelected()
self.onSelected()
def mouseDoubleClickEvent(self, event):
"""Overriden event for doubleclick. Resend to `Node::onDoubleClicked`"""
self.node.onDoubleClicked(event)
def hoverEnterEvent(self, event: 'QGraphicsSceneHoverEvent') -> None:
"""Handle hover effect"""
self.hovered = True
self.update()
def hoverLeaveEvent(self, event: 'QGraphicsSceneHoverEvent') -> None:
"""Handle hover effect"""
self.hovered = False
self.update()
def boundingRect(self) -> QRectF:
"""Defining Qt' bounding rectangle"""
return QRectF(
0,
0,
self.width,
self.height
).normalized()
def initTitle(self):
"""Set up the title Graphics representation: font, color, position, etc."""
self.title_item = QGraphicsTextItem(self)
self.title_item.node = self.node
self.title_item.setDefaultTextColor(self._title_color)
self.title_item.setFont(self._title_font)
self.title_item.setPos(self.title_horizontal_padding, 0)
self.title_item.setTextWidth(
self.width
- 2 * self.title_horizontal_padding
)
def initContent(self):
"""Set up the `grContent` - ``QGraphicsProxyWidget`` to have a container for `Graphics Content`"""
if self.content is not None:
self.content.setGeometry(self.edge_padding, self.title_height + self.edge_padding,
self.width - 2 * self.edge_padding, self.height - 2 * self.edge_padding - self.title_height)
# get the QGraphicsProxyWidget when inserted into the grScene
self.grContent = self.node.scene.grScene.addWidget(self.content)
self.grContent.setParentItem(self)
def paint(self, painter, QStyleOptionGraphicsItem, widget=None):
"""Painting the rounded rectanglar `Node`"""
# title
path_title = QPainterPath()
path_title.setFillRule(Qt.WindingFill)
path_title.addRoundedRect(0, 0, self.width, self.title_height, self.edge_roundness, self.edge_roundness)
path_title.addRect(0, self.title_height - self.edge_roundness, self.edge_roundness, self.edge_roundness)
path_title.addRect(self.width - self.edge_roundness, self.title_height - self.edge_roundness, self.edge_roundness, self.edge_roundness)
painter.setPen(Qt.NoPen)
painter.setBrush(self._brush_title)
painter.drawPath(path_title.simplified())
# content
path_content = QPainterPath()
path_content.setFillRule(Qt.WindingFill)
path_content.addRoundedRect(0, self.title_height, self.width, self.height - self.title_height, self.edge_roundness, self.edge_roundness)
path_content.addRect(0, self.title_height, self.edge_roundness, self.edge_roundness)
path_content.addRect(self.width - self.edge_roundness, self.title_height, self.edge_roundness, self.edge_roundness)
painter.setPen(Qt.NoPen)
painter.setBrush(self._brush_background)
painter.drawPath(path_content.simplified())
# outline
path_outline = QPainterPath()
path_outline.addRoundedRect(-1, -1, self.width+2, self.height+2, self.edge_roundness, self.edge_roundness)
painter.setBrush(Qt.NoBrush)
if self.hovered:
painter.setPen(self._pen_hovered)
painter.drawPath(path_outline.simplified())
painter.setPen(self._pen_default)
painter.drawPath(path_outline.simplified())
else:
painter.setPen(self._pen_default if not self.isSelected() else self._pen_selected)
painter.drawPath(path_outline.simplified()) | PypiClean |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.