ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a45ef0600a967b03e4cc0b3c9a1226a49587a8c | import csv
import re
from lxml.html import fromstring
class CsvCallback:
def __init__(self):
self.writer = csv.writer(open('../data/countries_or_districts.csv', 'w'))
self.fields = ('area', 'population', 'iso', 'country_or_district', 'capital',
'continent', 'tld', 'currency_code', 'currency_name',
'phone', 'postal_code_format', 'postal_code_regex',
'languages', 'neighbours')
self.writer.writerow(self.fields)
def __call__(self, url, html):
if re.search('/view/', url):
tree = fromstring(html)
all_rows = [
tree.xpath('//tr[@id="places_%s__row"]/td[@class="w2p_fw"]' % field)[0].text_content()
for field in self.fields]
self.writer.writerow(all_rows)
|
py | 1a45ef8601b86b0df75845603184660a9f9e1d49 | print "***************************************"
print " test1.py "
print "***************************************"
from MainWindow import *
from Globals import *
import SampleWidget
import time
class Widget(QWidget):
def __init__(self):
QWidget.__init__(self, None, 'comeback')
self.button1 = QPushButton(self, 'hideme')
self.button1.setGeometry(0,0,100,20)
self.button2 = QPushButton(self, 'closeme')
self.button2.setGeometry(0,20,100,20)
self.child = QWidget()
self.shown = False
QObject.connect(self.button1, SIGNAL("clicked()"),
self.slot)
QObject.connect(self.button2, SIGNAL("clicked()"),
self.close)
def slot(self):
if self.shown:
self.child.hide()
self.shown = False
else:
self.child.show()
self.shown = True
##~ PKAudio = Globals.getPKAudio().start_server()
##~ s = SampleWidget.SampleControl()
##~ s.load('/home/ajole/wav/Patrick Kidd - Birdman.wav')
##~ #s.sample.play()
##~ s.slotStart()
##~ s.slotSetZone(0,1)
##~
##~ time.sleep(100)
a = QApplication([])
PKAudio.start_server()
w = Widget()
a.setMainWidget(w)
w.show()
s = PKAudio.Sample('/home/ajole/wav/Patrick Kidd - Birdman.wav')
d = PKAudio.Driver()
d.getMixer().connect(s.outputPort())
s.play()
w.show()
a.exec_loop()
|
py | 1a45f070b8b05030346ef8bb1129ab45cf2d5ce4 | # """
# This MobileNetV2 implementation is modified from the following repository:
# https://github.com/tonylins/pytorch-mobilenet-v2
# """
# import torch.nn as nn
# import math
# from .utils import load_url
# from mit_semseg.lib.nn import SynchronizedBatchNorm2d
# BatchNorm2d = SynchronizedBatchNorm2d
# __all__ = ['mobilenetv2']
# model_urls = {
# 'mobilenetv2': 'http://sceneparsing.csail.mit.edu/model/pretrained_resnet/mobilenet_v2.pth.tar',
# }
# def conv_bn(inp, oup, stride):
# return nn.Sequential(
# nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
# BatchNorm2d(oup),
# nn.ReLU6(inplace=True)
# )
# def conv_1x1_bn(inp, oup):
# return nn.Sequential(
# nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
# BatchNorm2d(oup),
# nn.ReLU6(inplace=True)
# )
# class InvertedResidual(nn.Module):
# def __init__(self, inp, oup, stride, expand_ratio):
# super(InvertedResidual, self).__init__()
# self.stride = stride
# assert stride in [1, 2]
# hidden_dim = round(inp * expand_ratio)
# self.use_res_connect = self.stride == 1 and inp == oup
# if expand_ratio == 1:
# self.conv = nn.Sequential(
# # dw
# nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
# BatchNorm2d(hidden_dim),
# nn.ReLU6(inplace=True),
# # pw-linear
# nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
# BatchNorm2d(oup),
# )
# else:
# self.conv = nn.Sequential(
# # pw
# nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
# BatchNorm2d(hidden_dim),
# nn.ReLU6(inplace=True),
# # dw
# nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),
# BatchNorm2d(hidden_dim),
# nn.ReLU6(inplace=True),
# # pw-linear
# nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
# BatchNorm2d(oup),
# )
# def forward(self, x):
# if self.use_res_connect:
# return x + self.conv(x)
# else:
# return self.conv(x)
# class MobileNetV2(nn.Module):
# def __init__(self, n_class=1000, input_size=224, width_mult=1.):
# super(MobileNetV2, self).__init__()
# block = InvertedResidual
# input_channel = 32
# last_channel = 1280
# interverted_residual_setting = [
# # t, c, n, s
# [1, 16, 1, 1],
# [6, 24, 2, 2],
# [6, 32, 3, 2],
# [6, 64, 4, 2],
# [6, 96, 3, 1],
# [6, 160, 3, 2],
# [6, 320, 1, 1],
# ]
# # building first layer
# assert input_size % 32 == 0
# input_channel = int(input_channel * width_mult)
# self.last_channel = int(last_channel * width_mult) if width_mult > 1.0 else last_channel
# self.features = [conv_bn(3, input_channel, 2)]
# # building inverted residual blocks
# for t, c, n, s in interverted_residual_setting:
# output_channel = int(c * width_mult)
# for i in range(n):
# if i == 0:
# self.features.append(block(input_channel, output_channel, s, expand_ratio=t))
# else:
# self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))
# input_channel = output_channel
# # building last several layers
# self.features.append(conv_1x1_bn(input_channel, self.last_channel))
# # make it nn.Sequential
# self.features = nn.Sequential(*self.features)
# # building classifier
# self.classifier = nn.Sequential(
# nn.Dropout(0.2),
# nn.Linear(self.last_channel, n_class),
# )
# self._initialize_weights()
# def forward(self, x):
# x = self.features(x)
# x = x.mean(3).mean(2)
# x = self.classifier(x)
# return x
# def _initialize_weights(self):
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# if m.bias is not None:
# m.bias.data.zero_()
# elif isinstance(m, BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
# elif isinstance(m, nn.Linear):
# n = m.weight.size(1)
# m.weight.data.normal_(0, 0.01)
# m.bias.data.zero_()
# def mobilenetv2(pretrained=False, **kwargs):
# """Constructs a MobileNet_V2 model.
# Args:
# pretrained (bool): If True, returns a model pre-trained on ImageNet
# """
# model = MobileNetV2(n_class=1000, **kwargs)
# if pretrained:
# model.load_state_dict(load_url(model_urls['mobilenetv2']), strict=False)
# return model
|
py | 1a45f0bccc4a78e28a2f11339d96818bfcf3d924 | from .core import *
from .plotting import *
from .benchmarks import *
from .examples import *
import warnings
warnings.filterwarnings("ignore")
|
py | 1a45f231cba07d82a2b8bd4027585d494bcaf0c5 | import os
import requests
import asyncio
import logging
from bot.services.request_service import RequestService
from bot.cache.token_cache import TokenCache
from bot.util.log import setup_logging_queue
from bot.exceptions.exceptions import RequestException, UserException
LOG = logging.getLogger('simple')
class UserService(RequestService):
def __init__(self, token_cache: TokenCache):
super().__init__(token_cache)
self.CACHED_USER_ENDPOINT = os.environ['CACHED_USER_ENDPOINT']
async def get_user_id(self, discord_id: int):
params = {
'discord_id': discord_id
}
user_request = self._construct_awaitable(requests.get,
self.CACHED_USER_ENDPOINT,
params=params)
try:
user_data = await self._call(user_request)
return user_data['id']
except RequestException as err:
LOG.error(f"UserService error while fetching userID: {str(err)}")
raise UserException
|
bzl | 1a45f245bd58b0bdefe3d885f79a8d3ea2dd09aa | # Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load("@io_bazel_rules_rust//rust:private/utils.bzl", "relative_path")
load("@io_bazel_rules_rust//rust:private/legacy_cc_starlark_api_shim.bzl", "get_libs_for_static_executable")
load(
"@bazel_tools//tools/build_defs/cc:action_names.bzl",
"CPP_LINK_EXECUTABLE_ACTION_NAME",
)
load(
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
"find_cpp_toolchain",
)
load("@bazel_skylib//lib:versions.bzl", "versions")
load("@bazel_version//:def.bzl", "BAZEL_VERSION")
CrateInfo = provider(
fields = {
"name": "str: The name of this crate.",
"type": "str: The type of this crate. eg. lib or bin",
"root": "File: The source File entrypoint to this crate, eg. lib.rs",
"srcs": "List[File]: All source Files that are part of the crate.",
"deps": "List[Provider]: This crate's (rust or cc) dependencies' providers.",
"proc_macro_deps": "List[CrateInfo]: This crate's rust proc_macro dependencies' providers.",
"aliases": "Dict[Label, String]: Renamed and aliased crates",
"output": "File: The output File that will be produced, depends on crate type.",
"edition": "str: The edition of this crate.",
"rustc_env": """Dict[String, String]: Additional `"key": "value"` environment variables to set for rustc.""",
},
)
BuildInfo = provider(
fields = {
"flags": """File: file containing additional flags to pass to rustc""",
"out_dir": """File: directory containing the result of a build script""",
"rustc_env": """File: file containing additional environment variables to set for rustc.""",
"dep_env": """File: extra build script environment varibles to be set to direct dependencies.""",
"link_flags": """File: file containing flags to pass to the linker""",
},
)
AliasableDep = provider(
fields = {
"name": "str",
"dep": "CrateInfo",
},
)
DepInfo = provider(
fields = {
"direct_crates": "depset[CrateInfo]",
"transitive_crates": "depset[CrateInfo]",
"transitive_dylibs": "depset[File]",
"transitive_staticlibs": "depset[File]",
"transitive_libs": "List[File]: All transitive dependencies, not filtered by type.",
"transitive_build_infos": "depset[BuildInfo]",
"dep_env": """File: File with environment variables direct dependencies build scripts rely upon.""",
},
)
def _get_rustc_env(ctx, toolchain):
version = ctx.attr.version if hasattr(ctx.attr, "version") else "0.0.0"
major, minor, patch = version.split(".", 2)
if "-" in patch:
patch, pre = patch.split("-", 1)
else:
pre = ""
return {
"CARGO_PKG_VERSION": version,
"CARGO_PKG_VERSION_MAJOR": major,
"CARGO_PKG_VERSION_MINOR": minor,
"CARGO_PKG_VERSION_PATCH": patch,
"CARGO_PKG_VERSION_PRE": pre,
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_NAME": ctx.label.name,
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_CFG_TARGET_OS": toolchain.os,
"CARGO_CFG_TARGET_ARCH": toolchain.target_arch,
}
def get_compilation_mode_opts(ctx, toolchain):
comp_mode = ctx.var["COMPILATION_MODE"]
if not comp_mode in toolchain.compilation_mode_opts:
fail("Unrecognized compilation mode {} for toolchain.".format(comp_mode))
return toolchain.compilation_mode_opts[comp_mode]
def get_lib_name(lib):
"""Returns the name of a library artifact, eg. libabc.a -> abc"""
libname, ext = lib.basename.split(".", 2)
if libname.startswith("lib"):
return libname[3:]
else:
return libname
def collect_deps(label, deps, proc_macro_deps, aliases, toolchain):
"""
Walks through dependencies and collects the transitive dependencies.
Args:
label: str: Label of the current target.
deps: List[Label]: The deps from ctx.attr.deps.
proc_macro_deps: List[Label]: The proc_macro deps from ctx.attr.proc_macro_deps.
Returns:
Returns a DepInfo provider.
"""
for dep in deps:
if CrateInfo in dep:
if dep[CrateInfo].type == "proc-macro":
fail(
"{} listed {} in its deps, but it is a proc-macro. It should instead be in proc-macro-deps.".format(
label,
dep.label,
)
)
for dep in proc_macro_deps:
type = dep[CrateInfo].type
if type != "proc-macro":
fail(
"{} listed {} in its proc_macro_deps, but it is not proc-macro, it is a {}. It should probably instead be listed in deps.".format(
label,
dep.label,
type,
)
)
# TODO: Fix depset union (https://docs.bazel.build/versions/master/skylark/depsets.html)
direct_crates = []
transitive_crates = depset()
transitive_dylibs = depset(order = "topological") # dylib link flag ordering matters.
transitive_staticlibs = depset()
transitive_build_infos = depset()
build_info = None
aliases = {k.label: v for k,v in aliases.items()}
for dep in deps + proc_macro_deps:
if CrateInfo in dep:
# This dependency is a rust_library
direct_dep = dep[CrateInfo]
aliasable_dep = AliasableDep(
name = aliases.get(dep.label, direct_dep.name),
dep = direct_dep,
)
direct_crates += [aliasable_dep]
transitive_crates = depset([dep[CrateInfo]], transitive = [transitive_crates])
transitive_crates = depset(transitive = [transitive_crates, dep[DepInfo].transitive_crates])
transitive_dylibs = depset(transitive = [transitive_dylibs, dep[DepInfo].transitive_dylibs])
transitive_staticlibs = depset(transitive = [transitive_staticlibs, dep[DepInfo].transitive_staticlibs])
transitive_build_infos = depset(transitive = [transitive_build_infos, dep[DepInfo].transitive_build_infos])
elif CcInfo in dep:
# This dependency is a cc_library
# TODO: We could let the user choose how to link, instead of always preferring to link static libraries.
libs = get_libs_for_static_executable(dep)
dylibs = [l for l in libs.to_list() if l.basename.endswith(toolchain.dylib_ext)]
staticlibs = [l for l in libs.to_list() if l.basename.endswith(toolchain.staticlib_ext)]
transitive_dylibs = depset(transitive = [transitive_dylibs, depset(dylibs)])
transitive_staticlibs = depset(transitive = [transitive_staticlibs, depset(staticlibs)])
elif BuildInfo in dep:
if build_info:
fail("Several deps are providing build information, only one is allowed in the dependencies", "deps")
build_info = dep[BuildInfo]
transitive_build_infos = depset([build_info], transitive = [transitive_build_infos])
else:
fail("rust targets can only depend on rust_library, rust_*_library or cc_library targets." + str(dep), "deps")
transitive_libs = depset(
[c.output for c in transitive_crates.to_list()],
transitive = [transitive_staticlibs, transitive_dylibs],
)
return (
DepInfo(
direct_crates = depset(direct_crates),
transitive_crates = transitive_crates,
transitive_dylibs = transitive_dylibs,
transitive_staticlibs = transitive_staticlibs,
transitive_libs = transitive_libs.to_list(),
transitive_build_infos = transitive_build_infos,
dep_env = build_info.dep_env if build_info else None,
),
build_info,
)
def get_linker_and_args(ctx, rpaths):
if (len(BAZEL_VERSION) == 0 or
versions.is_at_least("0.18.0", BAZEL_VERSION)):
user_link_flags = ctx.fragments.cpp.linkopts
else:
user_link_flags = depset(ctx.fragments.cpp.linkopts)
cc_toolchain = find_cpp_toolchain(ctx)
kwargs = {
"ctx": ctx,
} if len(BAZEL_VERSION) == 0 or versions.is_at_least(
"0.25.0",
BAZEL_VERSION,
) else {}
feature_configuration = cc_common.configure_features(
cc_toolchain = cc_toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
**kwargs
)
link_variables = cc_common.create_link_variables(
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
is_linking_dynamic_library = False,
runtime_library_search_directories = rpaths,
user_link_flags = user_link_flags,
)
link_args = cc_common.get_memory_inefficient_command_line(
feature_configuration = feature_configuration,
action_name = CPP_LINK_EXECUTABLE_ACTION_NAME,
variables = link_variables,
)
link_env = cc_common.get_environment_variables(
feature_configuration = feature_configuration,
action_name = CPP_LINK_EXECUTABLE_ACTION_NAME,
variables = link_variables,
)
ld = cc_common.get_tool_for_action(
feature_configuration = feature_configuration,
action_name = CPP_LINK_EXECUTABLE_ACTION_NAME,
)
return ld, link_args, link_env
def _process_build_scripts(
ctx,
file,
crate_info,
build_info,
dep_info,
compile_inputs):
extra_inputs, prep_commands, dynamic_env, dynamic_build_flags = _create_out_dir_action(ctx, file, build_info, dep_info)
if extra_inputs:
compile_inputs = depset(extra_inputs, transitive = [compile_inputs])
return compile_inputs, prep_commands, dynamic_env, dynamic_build_flags
def collect_inputs(
ctx,
file,
files,
toolchain,
crate_info,
dep_info,
build_info):
linker_script = getattr(file, "linker_script") if hasattr(file, "linker_script") else None
if (len(BAZEL_VERSION) == 0 or
versions.is_at_least("0.25.0", BAZEL_VERSION)):
linker_depset = find_cpp_toolchain(ctx).all_files
else:
linker_depset = depset(files._cc_toolchain)
compile_inputs = depset(
crate_info.srcs +
getattr(files, "data", []) +
dep_info.transitive_libs +
[toolchain.rustc] +
toolchain.crosstool_files +
([build_info.rustc_env, build_info.flags] if build_info else []) +
([] if linker_script == None else [linker_script]),
transitive = [
toolchain.rustc_lib.files,
toolchain.rust_lib.files,
linker_depset,
],
)
return _process_build_scripts(ctx, file, crate_info, build_info, dep_info, compile_inputs)
def construct_arguments(
ctx,
file,
toolchain,
crate_info,
dep_info,
output_hash,
rust_flags,
dynamic_env):
output_dir = getattr(crate_info.output, "dirname") if hasattr(crate_info.output, "dirname") else None
linker_script = getattr(file, "linker_script") if hasattr(file, "linker_script") else None
env = _get_rustc_env(ctx, toolchain)
args = ctx.actions.args()
args.add(crate_info.root)
args.add("--crate-name=" + crate_info.name)
args.add("--crate-type=" + crate_info.type)
# Mangle symbols to disambiguate crates with the same name
extra_filename = "-" + output_hash if output_hash else ""
args.add("--codegen=metadata=" + extra_filename)
if output_dir:
args.add("--out-dir=" + output_dir)
args.add("--codegen=extra-filename=" + extra_filename)
compilation_mode = get_compilation_mode_opts(ctx, toolchain)
args.add("--codegen=opt-level=" + compilation_mode.opt_level)
args.add("--codegen=debuginfo=" + compilation_mode.debug_info)
args.add("--emit=dep-info,link")
args.add("--color=always")
args.add("--target=" + toolchain.target_triple)
if hasattr(ctx.attr, "crate_features"):
args.add_all(getattr(ctx.attr, "crate_features"), before_each = "--cfg", format_each = 'feature="%s"')
if linker_script:
args.add(linker_script.path, format = "--codegen=link-arg=-T%s")
# Gets the paths to the folders containing the standard library (or libcore)
rust_lib_paths = depset([file.dirname for file in toolchain.rust_lib.files.to_list()]).to_list()
# Tell Rustc where to find the standard library
args.add_all(rust_lib_paths, before_each = "-L", format_each = "%s")
args.add_all(rust_flags)
args.add_all(getattr(ctx.attr, "rustc_flags", []))
add_edition_flags(args, crate_info)
# Link!
# Rust's built-in linker can handle linking wasm files. We don't want to attempt to use the cc
# linker since it won't understand.
if toolchain.target_arch != "wasm32":
rpaths = _compute_rpaths(toolchain, output_dir, dep_info)
ld, link_args, link_env = get_linker_and_args(ctx, rpaths)
env.update(link_env)
args.add("--codegen=linker=" + ld)
args.add_joined("--codegen", link_args, join_with = " ", format_joined = "link-args=%s")
add_native_link_flags(args, dep_info)
add_crate_link_flags(args, dep_info)
# Make bin crate data deps available to tests.
for data in getattr(ctx.attr, "data", []):
if CrateInfo in data:
dep_crate_info = data[CrateInfo]
if dep_crate_info.type == "bin":
env["CARGO_BIN_EXE_" + dep_crate_info.output.basename] = dep_crate_info.output.short_path
# Update environment with user provided variables.
env.update(crate_info.rustc_env)
# This empty value satisfies Clippy, which otherwise complains about the
# sysroot being undefined.
env["SYSROOT"] = ""
# Certain rust build processes expect to find files from the environment variable
# `$CARGO_MANIFEST_DIR`. Examples of this include pest, tera, asakuma.
#
# The compiler and by extension proc-macros see the current working directory as the Bazel exec
# root. Therefore, in order to fix this without an upstream code change, we have to set
# `$CARGO_MANIFEST_DIR`.
#
# As such we attempt to infer `$CARGO_MANIFEST_DIR`.
# Inference cannot be derived from `attr.crate_root`, as this points at a source file which may or
# may not follow the `src/lib.rs` convention. As such we use `ctx.build_file_path` mapped into the
# `exec_root`. Since we cannot (seemingly) get the `exec_root` from skylark, we cheat a little
# and use `$(pwd)` which resolves the `exec_root` at action execution time.
package_dir = ctx.build_file_path[:ctx.build_file_path.rfind("/")]
dynamic_env["CARGO_MANIFEST_DIR"] = "${{EXEC_ROOT}}/{}".format(package_dir)
return args, env, dynamic_env
def construct_compile_command(
ctx,
command,
toolchain,
crate_info,
build_info,
dep_info,
prep_commands,
dynamic_env,
dynamic_build_flags):
# Handle that the binary name and crate name may be different.
#
# If a target name contains a - then cargo (and rules_rust) will generate a
# crate name with _ instead. Accordingly, rustc will generate a output
# file (executable, or rlib, or whatever) with _ not -. But when cargo
# puts a binary in the target/${config} directory, and sets environment
# variables like `CARGO_BIN_EXE_${binary_name}` it will use the - version
# not the _ version. So we rename the rustc-generated file (with _s) to
# have -s if needed.
maybe_rename = ""
if crate_info.type == "bin" and crate_info.output != None:
generated_file = crate_info.name
if toolchain.target_arch == "wasm32":
generated_file = generated_file + ".wasm"
src = "/".join([crate_info.output.dirname, generated_file])
dst = crate_info.output.path
if src != dst:
maybe_rename = " && /bin/mv {src} {dst}".format(src=src, dst=dst)
# Set ${EXEC_ROOT} so that actions which chdir still work.
# See https://github.com/google/cargo-raze/issues/71#issuecomment-433225853 for the rationale as
# to why.
return 'export EXEC_ROOT=$(pwd) && {} && {} "$@" --remap-path-prefix="$(pwd)"=__bazel_redacted_pwd {}{}'.format(
" && ".join(["export {}={}".format(key, value) for key, value in dynamic_env.items()] + prep_commands),
command,
" ".join(dynamic_build_flags),
maybe_rename,
)
def rustc_compile_action(
ctx,
toolchain,
crate_info,
output_hash = None,
rust_flags = []):
"""
Constructs the rustc command used to build the current target.
Returns:
List[Provider]: A list of the following providers:
- CrateInfo: info for the crate we just built; same as `crate_info` parameter.
- DepInfo: The transitive dependencies of this crate.
- DefaultInfo: The output file for this crate, and its runfiles.
"""
dep_info, build_info = collect_deps(
ctx.label,
crate_info.deps,
crate_info.proc_macro_deps,
crate_info.aliases,
toolchain,
)
compile_inputs, prep_commands, dynamic_env, dynamic_build_flags = collect_inputs(
ctx,
ctx.file,
ctx.files,
toolchain,
crate_info,
dep_info,
build_info,
)
args, env, dynamic_env = construct_arguments(
ctx,
ctx.file,
toolchain,
crate_info,
dep_info,
output_hash,
rust_flags,
dynamic_env,
)
command = construct_compile_command(
ctx,
toolchain.rustc.path,
toolchain,
crate_info,
build_info,
dep_info,
prep_commands,
dynamic_env,
dynamic_build_flags,
)
if hasattr(ctx.attr, "version") and ctx.attr.version != "0.0.0":
formatted_version = " v{}".format(ctx.attr.version)
else:
formatted_version = ""
ctx.actions.run_shell(
command = command,
inputs = compile_inputs,
outputs = [crate_info.output],
env = env,
arguments = [args],
mnemonic = "Rustc",
progress_message = "Compiling Rust {} {}{} ({} files)".format(
crate_info.type,
ctx.label.name,
formatted_version,
len(crate_info.srcs),
),
)
runfiles = ctx.runfiles(
files = dep_info.transitive_dylibs.to_list() + getattr(ctx.files, "data", []),
collect_data = True,
)
out_binary = False
if hasattr(ctx.attr, "out_binary"):
out_binary = getattr(ctx.attr, "out_binary")
return [
crate_info,
dep_info,
DefaultInfo(
# nb. This field is required for cc_library to depend on our output.
files = depset([crate_info.output]),
runfiles = runfiles,
executable = crate_info.output if crate_info.type == "bin" or out_binary else None,
),
]
def add_edition_flags(args, crate):
if crate.edition != "2015":
args.add("--edition={}".format(crate.edition))
def _create_out_dir_action(ctx, file, build_info, dep_info):
tar_file_attr = getattr(file, "out_dir_tar", None)
if build_info and tar_file_attr:
fail("Target {} has both a build_script dependency and an out_dir_tar - this is not allowed.".format(ctx.label))
prep_commands = []
input_files = []
# Env vars and build flags which need to be set in the action's command line, rather than on the action's env,
# because they rely on other env vars or commands.
dynamic_env = {}
dynamic_build_flags = []
# TODO: Remove system tar usage
if build_info:
prep_commands.append("export $(cat %s)" % build_info.rustc_env.path)
# out_dir will be added as input by the transitive_build_infos loop below.
dynamic_env["OUT_DIR"] = "${{EXEC_ROOT}}/{}".format(build_info.out_dir.path)
dynamic_build_flags.append("$(cat '%s')" % build_info.flags.path)
elif tar_file_attr:
out_dir = ".out-dir"
prep_commands.append("mkdir -p $OUT_DIR")
prep_commands.append("tar -xzf {tar} -C $OUT_DIR".format(tar=tar_file_attr.path))
input_files.append(tar_file_attr)
dynamic_env["OUT_DIR"] = "${{EXEC_ROOT}}/{}".format(out_dir)
# This should probably only actually be exposed to actions which link.
for dep_build_info in dep_info.transitive_build_infos.to_list():
input_files.append(dep_build_info.out_dir)
dynamic_build_flags.append("$(cat '{}' | sed -e \"s#\${{EXEC_ROOT}}#${{EXEC_ROOT}}#g\")".format(dep_build_info.link_flags.path))
input_files.append(dep_build_info.link_flags)
return input_files, prep_commands, dynamic_env, dynamic_build_flags
def _compute_rpaths(toolchain, output_dir, dep_info):
"""
Determine the artifact's rpaths relative to the bazel root
for runtime linking of shared libraries.
"""
if not dep_info.transitive_dylibs:
return depset([])
if toolchain.os != "linux":
fail("Runtime linking is not supported on {}, but found {}".format(
toolchain.os,
dep_info.transitive_dylibs,
))
# Multiple dylibs can be present in the same directory, so deduplicate them.
return depset([
relative_path(output_dir, lib_dir)
for lib_dir in _get_dir_names(dep_info.transitive_dylibs.to_list())
])
def _get_dir_names(files):
dirs = {}
for f in files:
dirs[f.dirname] = None
return dirs.keys()
def add_crate_link_flags(args, dep_info):
# nb. Crates are linked via --extern regardless of their crate_type
args.add_all(dep_info.direct_crates, map_each = _crate_to_link_flag)
args.add_all(
dep_info.transitive_crates,
map_each = _get_crate_dirname,
uniquify = True,
format_each = "-Ldependency=%s",
)
def _crate_to_link_flag(crate_info):
return ["--extern", "{}={}".format(crate_info.name, crate_info.dep.output.path)]
def _get_crate_dirname(crate):
return crate.output.dirname
def add_native_link_flags(args, dep_info):
native_libs = depset(transitive = [dep_info.transitive_dylibs, dep_info.transitive_staticlibs])
args.add_all(native_libs, map_each = _get_dirname, uniquify = True, format_each = "-Lnative=%s")
args.add_all(dep_info.transitive_dylibs, map_each = get_lib_name, format_each = "-ldylib=%s")
args.add_all(dep_info.transitive_staticlibs, map_each = get_lib_name, format_each = "-lstatic=%s")
def _get_dirname(file):
return file.dirname
|
py | 1a45f268868462f9e23daed56984437e68e2504a | from panda3d.core import TextNode
from direct.gui.DirectGui import DirectFrame
from direct.gui.DirectGui import DirectButton
from direct.gui.DirectGui import DirectLabel
from direct.gui import DirectGuiGlobals
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
class JellybeanRewardGui(DirectFrame):
notify = directNotify.newCategory('JellybeanRewardGui')
PreCountdownDelay = 1.0
CountDownRate = 0.2
JarLabelTextColor = (0.95, 0.95, 0.0, 1.0)
JarLabelMaxedTextColor = (1.0, 0.0, 0.0, 1.0)
def __init__(self, doneEvent):
self.doneEvent = doneEvent
DirectFrame.__init__(self)
self.reparentTo(aspect2d)
self.setPos(0.0, 0.0, 0.16)
self.stash()
publicPartyGui = loader.loadModel('phase_4/models/parties/publicPartyGUI')
self.frame = DirectFrame(parent=self, geom=publicPartyGui.find('**/activities_background'), geom_pos=(-0.8, 0.0, 0.2), geom_scale=2.0, relief=None)
self.earnedLabel = DirectLabel(parent=self, relief=None, text=str(0), text_align=TextNode.ACenter, text_pos=(0.0, -0.07), text_scale=0.2, text_fg=(0.95, 0.95, 0.0, 1.0), text_font=ToontownGlobals.getSignFont(), textMayChange=True, image=DirectGuiGlobals.getDefaultDialogGeom(), image_scale=(0.33, 1.0, 0.33), pos=(-0.3, 0.0, 0.2), scale=0.9)
purchaseModels = loader.loadModel('phase_4/models/gui/purchase_gui')
jarImage = purchaseModels.find('**/Jar')
self.jarLabel = DirectLabel(parent=self, relief=None, text=str(0), text_align=TextNode.ACenter, text_pos=(0.0, -0.07), text_scale=0.2, text_fg=JellybeanRewardGui.JarLabelTextColor, text_font=ToontownGlobals.getSignFont(), textMayChange=True, image=jarImage, scale=0.7, pos=(0.3, 0.0, 0.17))
purchaseModels.removeNode()
del purchaseModels
jarImage.removeNode()
del jarImage
self.messageLabel = DirectLabel(parent=self, relief=None, text='', text_align=TextNode.ALeft, text_wordwrap=16.0, text_scale=0.07, pos=(-0.52, 0.0, -0.1), textMayChange=True)
self.doubledJellybeanLabel = DirectLabel(parent=self, relief=None, text=TTLocalizer.PartyRewardDoubledJellybean, text_align=TextNode.ACenter, text_wordwrap=12.0, text_scale=0.09, text_fg=(1.0, 0.125, 0.125, 1.0), pos=(0.0, 0.0, -0.465), textMayChange=False)
self.doubledJellybeanLabel.hide()
self.closeButton = DirectButton(parent=self, relief=None, text=TTLocalizer.PartyJellybeanRewardOK, text_align=TextNode.ACenter, text_scale=0.065, text_pos=(0.0, -0.625), geom=(publicPartyGui.find('**/startButton_up'),
publicPartyGui.find('**/startButton_down'),
publicPartyGui.find('**/startButton_rollover'),
publicPartyGui.find('**/startButton_inactive')), geom_pos=(-0.39, 0.0, 0.125), command=self._close)
publicPartyGui.removeNode()
del publicPartyGui
self.countSound = base.loader.loadSfx('phase_13/audio/sfx/tick_counter_short.ogg')
self.overMaxSound = base.loader.loadSfx('phase_13/audio/sfx/tick_counter_overflow.ogg')
return
def showReward(self, earnedAmount, jarAmount, message):
JellybeanRewardGui.notify.debug('showReward( earnedAmount=%d, jarAmount=%d, ...)' % (earnedAmount, jarAmount))
self.earnedCount = earnedAmount
self.earnedLabel['text'] = str(self.earnedCount)
self.jarCount = jarAmount
self.jarMax = base.localAvatar.getMaxMoney()
self.jarLabel['text'] = str(self.jarCount)
self.jarLabel['text_fg'] = JellybeanRewardGui.JarLabelTextColor
self.messageLabel['text'] = message
if base.cr.newsManager.isHolidayRunning(ToontownGlobals.JELLYBEAN_DAY) or base.cr.newsManager.isHolidayRunning(ToontownGlobals.JELLYBEAN_PARTIES_HOLIDAY) or base.cr.newsManager.isHolidayRunning(ToontownGlobals.JELLYBEAN_PARTIES_HOLIDAY_MONTH):
self.doubledJellybeanLabel.show()
else:
self.doubledJellybeanLabel.hide()
self.unstash()
taskMgr.doMethodLater(JellybeanRewardGui.PreCountdownDelay, self.transferOneJellybean, 'JellybeanRewardGuiTransferOneJellybean', extraArgs=[])
def transferOneJellybean(self):
if self.earnedCount == 0:
return
self.earnedCount -= 1
self.earnedLabel['text'] = str(self.earnedCount)
self.jarCount += 1
if self.jarCount <= self.jarMax:
self.jarLabel['text'] = str(self.jarCount)
elif self.jarCount > self.jarMax:
self.jarLabel['text_fg'] = JellybeanRewardGui.JarLabelMaxedTextColor
if self.jarCount <= self.jarMax:
base.playSfx(self.countSound)
else:
base.playSfx(self.overMaxSound)
taskMgr.doMethodLater(JellybeanRewardGui.CountDownRate, self.transferOneJellybean, 'JellybeanRewardGuiTransferOneJellybean', extraArgs=[])
def _close(self):
taskMgr.remove('JellybeanRewardGuiTransferOneJellybean')
self.stash()
messenger.send(self.doneEvent)
def destroy(self):
taskMgr.remove('JellybeanRewardGuiTransferOneJellybean')
del self.countSound
del self.overMaxSound
self.frame.destroy()
self.earnedLabel.destroy()
self.jarLabel.destroy()
self.messageLabel.destroy()
self.closeButton.destroy()
DirectFrame.destroy(self)
|
py | 1a45f3bbdfbb0b8a97c17804b027f08ee8cec56b | from random import uniform
elements = ["rock", "paper", "scissors"]
print("1) Rock")
print("2) Paper")
print("3) Scissors")
print()
player_choice = int(input("Your choice: "))
computer_choice = int(uniform(1, 3))
print("Player's choice: ", end="")
print(elements[player_choice - 1])
print()
print("Computer's choice: ", end="")
print(elements[computer_choice - 1])
print()
if (player_choice == computer_choice):
print("This game is draw.")
elif (player_choice == 1):
if (computer_choice == 2):
print("You lose.")
else:
print("You win.")
elif (player_choice == 2):
if (computer_choice == 3):
print("You lose.")
else:
print("You win.")
elif (player_choice == 3):
if (computer_choice == 1):
print("You lose.")
else:
print("You win.") |
py | 1a45f40063baf1b7ab41ef36be5c34f1a3f32305 | #!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__rcs_id__ = '$Id: dbapi20.py,v 1.11 2005/01/02 02:41:01 zenzen Exp $'
__version__ = '$Revision: 1.12 $'[11:-2]
__author__ = 'Stuart Bishop <[email protected]>'
import unittest
import time
import sys
# Revision 1.12 2009/02/06 03:35:11 kf7xm
# Tested okay with Python 3.0, includes last minute patches from Mark H.
#
# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole
# Include latest changes from main branch
# Updates for py3k
#
# Revision 1.11 2005/01/02 02:41:01 zenzen
# Update author email address
#
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception hierarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propagates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
def str2bytes(sval):
if sys.version_info < (3,0) and isinstance(sval, str):
sval = sval.decode("latin1")
return sval.encode("latin1")
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
self.assertTrue(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.assertTrue(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined hierarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.assertTrue(issubclass(self.driver.Warning,Exception))
self.assertTrue(issubclass(self.driver.Error,Exception))
else:
self.assertTrue(issubclass(self.driver.Warning,Exception))
self.assertTrue(issubclass(self.driver.Error,Exception))
self.assertTrue(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.assertTrue(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
self.assertTrue(con.Warning is drv.Warning)
self.assertTrue(con.Error is drv.Error)
self.assertTrue(con.InterfaceError is drv.InterfaceError)
self.assertTrue(con.DatabaseError is drv.DatabaseError)
self.assertTrue(con.OperationalError is drv.OperationalError)
self.assertTrue(con.IntegrityError is drv.IntegrityError)
self.assertTrue(con.InternalError is drv.InternalError)
self.assertTrue(con.ProgrammingError is drv.ProgrammingError)
self.assertTrue(con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount should be -1 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertTrue(cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
self.assertTrue(cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
# Issue discussed on DB-SIG: consensus seem that close() should not
# raised if called on closed objects. Issue reported back to Stuart.
# self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL1(cur)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertTrue(cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
'insert into %sbooze values (?)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
'insert into %sbooze values (:1)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
'insert into %sbooze values (:beer)' % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
'insert into %sbooze values (%%s)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
'insert into %sbooze values (%%(beer)s)' % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
self.assertTrue(cur.rowcount in (-1,1))
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
self.assertTrue(cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
self.assertTrue(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
self.assertTrue(cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
self.assertTrue(cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
self.assertTrue(cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
self.assertTrue(cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
self.assertTrue(cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
self.assertTrue(cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
self.assertTrue(cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
self.assertTrue(cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
self.assertTrue(cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
self.assertTrue(cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError('Helper not implemented')
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError('Helper not implemented')
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError('Drivers need to override this test')
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
self.assertTrue(hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependent
raise NotImplementedError('Driver needed to override this test')
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary(str2bytes('Something'))
b = self.driver.Binary(str2bytes(''))
def test_STRING(self):
self.assertTrue(hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
self.assertTrue(hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
self.assertTrue(hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
self.assertTrue(hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
self.assertTrue(hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
|
py | 1a45f427f7f9d367cf83f7b7adfe90918355dce4 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def print_labels():
print(
"LABELS INFO:\n\n",
"prob_min : Minimum probability in a particular decile\n",
"prob_max : Minimum probability in a particular decile\n",
"prob_avg : Average probability in a particular decile\n",
"cnt_events : Count of events in a particular decile\n",
"cnt_resp : Count of responders in a particular decile\n",
"cnt_non_resp : Count of non-responders in a particular decile\n",
"cnt_resp_rndm : Count of responders if events assigned randomly in a particular decile\n",
"cnt_resp_wiz : Count of best possible responders in a particular decile\n",
"resp_rate : Response Rate in a particular decile [(cnt_resp/cnt_cust)*100]\n",
"cum_events : Cumulative sum of events decile-wise \n",
"cum_resp : Cumulative sum of responders decile-wise \n",
"cum_resp_wiz : Cumulative sum of best possible responders decile-wise \n",
"cum_non_resp : Cumulative sum of non-responders decile-wise \n",
"cum_events_pct : Cumulative sum of percentages of events decile-wise \n",
"cum_resp_pct : Cumulative sum of percentages of responders decile-wise \n",
"cum_resp_pct_wiz : Cumulative sum of percentages of best possible responders decile-wise \n",
"cum_non_resp_pct : Cumulative sum of percentages of non-responders decile-wise \n",
"KS : KS Statistic decile-wise \n",
"lift : Cumuative Lift Value decile-wise",
)
def decile_table(y_true, y_prob, change_deciles=10, labels=True, round_decimal=3):
"""Generates the Decile Table from labels and probabilities
The Decile Table is creared by first sorting the customers by their predicted
probabilities, in decreasing order from highest (closest to one) to
lowest (closest to zero). Splitting the customers into equally sized segments,
we create groups containing the same numbers of customers, for example, 10 decile
groups each containing 10% of the customer base.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct/actual) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier/algorithm.
change_deciles (int, optional): The number of partitions for creating the table
can be changed. Defaults to '10' for deciles.
labels (bool, optional): If True, prints a legend for the abbreviations of
decile table column names. Defaults to True.
round_decimal (int, optional): The decimal precision till which the result is
needed. Defaults to '3'.
Returns:
dt: The dataframe dt (decile-table) with the deciles and related information.
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.decile_table(y_test, y_prob[:,1])
"""
y_true = np.array(y_true)
y_prob = np.array(y_prob)
df = pd.DataFrame()
df['y_true'] = y_true
df['y_prob'] = y_prob
# df['decile']=pd.qcut(df['y_prob'], 10, labels=list(np.arange(10,0,-1)))
# ValueError: Bin edges must be unique
df.sort_values('y_prob', ascending=False, inplace=True)
df['decile'] = np.linspace(1, change_deciles+1, len(df), False, dtype=int)
# dt abbreviation for decile_table
dt = df.groupby('decile').apply(lambda x: pd.Series([
np.min(x['y_prob']),
np.max(x['y_prob']),
np.mean(x['y_prob']),
np.size(x['y_prob']),
np.sum(x['y_true']),
np.size(x['y_true'][x['y_true'] == 0]),
],
index=(["prob_min", "prob_max", "prob_avg",
"cnt_cust", "cnt_resp", "cnt_non_resp"])
)).reset_index()
dt['prob_min']=dt['prob_min'].round(round_decimal)
dt['prob_max']=dt['prob_max'].round(round_decimal)
dt['prob_avg']=round(dt['prob_avg'],round_decimal)
# dt=dt.sort_values(by='decile',ascending=False).reset_index(drop=True)
tmp = df[['y_true']].sort_values('y_true', ascending=False)
tmp['decile'] = np.linspace(1, change_deciles+1, len(tmp), False, dtype=int)
dt['cnt_resp_rndm'] = np.sum(df['y_true']) / change_deciles
dt['cnt_resp_wiz'] = tmp.groupby('decile', as_index=False)['y_true'].sum()['y_true']
dt['resp_rate'] = round(dt['cnt_resp'] * 100 / dt['cnt_cust'], round_decimal)
dt['cum_cust'] = np.cumsum(dt['cnt_cust'])
dt['cum_resp'] = np.cumsum(dt['cnt_resp'])
dt['cum_resp_wiz'] = np.cumsum(dt['cnt_resp_wiz'])
dt['cum_non_resp'] = np.cumsum(dt['cnt_non_resp'])
dt['cum_cust_pct'] = round(dt['cum_cust'] * 100 / np.sum(dt['cnt_cust']), round_decimal)
dt['cum_resp_pct'] = round(dt['cum_resp'] * 100 / np.sum(dt['cnt_resp']), round_decimal)
dt['cum_resp_pct_wiz'] = round(dt['cum_resp_wiz'] * 100 / np.sum(dt['cnt_resp_wiz']), round_decimal)
dt['cum_non_resp_pct'] = round(
dt['cum_non_resp'] * 100 / np.sum(dt['cnt_non_resp']), round_decimal)
dt['KS'] = round(dt['cum_resp_pct'] - dt['cum_non_resp_pct'], round_decimal)
dt['lift'] = round(dt['cum_resp_pct'] / dt['cum_cust_pct'], round_decimal)
if labels is True:
print_labels()
return dt
def plot_lift(y_true, y_prob, title='Lift Plot', title_fontsize=14,
text_fontsize=10, figsize=None):
"""Generates the Decile based cumulative Lift Plot from labels and probabilities
The lift curve is used to determine the effectiveness of a
binary classifier. A detailed explanation can be found at
http://www2.cs.uregina.ca/~dbd/cs831/notes/lift_chart/lift_chart.html
The implementation here works only for binary classification.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
title (string, optional): Title of the generated plot. Defaults to
"Lift Plot".
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 14.
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 10.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
Returns:
None
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.plot_lift(y_test, y_prob[:,1])
"""
# Cumulative Lift Plot
# plt.subplot(2, 2, 1)
pl = decile_table(y_true,y_prob,labels=False)
plt.plot(pl.decile.values, pl.lift.values, marker='o', label='Model')
# plt.plot(list(np.arange(1,11)), np.ones(10), 'k--',marker='o')
plt.plot([1, 10], [1, 1], 'k--', marker='o', label='Random')
plt.title(title, fontsize=title_fontsize)
plt.xlabel('Deciles', fontsize=text_fontsize)
plt.ylabel('Lift', fontsize=text_fontsize)
plt.legend()
plt.grid(True)
# plt.show()
def plot_lift_decile_wise(y_true, y_prob, title='Decile-wise Lift Plot',
title_fontsize=14, text_fontsize=10, figsize=None):
"""Generates the Decile-wise Lift Plot from labels and probabilities
The lift curve is used to determine the effectiveness of a
binary classifier. A detailed explanation can be found at
http://www2.cs.uregina.ca/~dbd/cs831/notes/lift_chart/lift_chart.html
The implementation here works only for binary classification.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
title (string, optional): Title of the generated plot. Defaults to
"Decile-wise Lift Plot".
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 14.
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 10.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
Returns:
None
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.plot_lift_decile_wise(y_test, y_prob[:,1])
"""
# Decile-wise Lift Plot
# plt.subplot(2, 2, 2)
pldw = decile_table(y_true,y_prob,labels=False)
plt.plot(pldw.decile.values, pldw.cnt_resp.values / pldw.cnt_resp_rndm.values, marker='o', label='Model')
# plt.plot(list(np.arange(1,11)), np.ones(10), 'k--',marker='o')
plt.plot([1, 10], [1, 1], 'k--', marker='o', label='Random')
plt.title(title, fontsize=title_fontsize)
plt.xlabel('Deciles', fontsize=text_fontsize)
plt.ylabel('Lift @ Decile', fontsize=text_fontsize)
plt.legend()
plt.grid(True)
# plt.show()
def plot_cumulative_gain(y_true, y_prob, title='Cumulative Gain Plot',
title_fontsize=14, text_fontsize=10, figsize=None):
"""Generates the cumulative Gain Plot from labels and probabilities
The cumulative gains chart is used to determine the effectiveness of a
binary classifier. A detailed explanation can be found at
http://www2.cs.uregina.ca/~dbd/cs831/notes/lift_chart/lift_chart.html
The implementation here works only for binary classification.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
title (string, optional): Title of the generated plot. Defaults to
"Decile-wise Lift Plot".
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 14.
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 10.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
Returns:
None
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.plot_cumulative_gain(y_test, y_prob[:,1])
"""
# Cumulative Gains Plot
# plt.subplot(2, 2, 3)
pcg = decile_table(y_true,y_prob,labels=False)
plt.plot(np.append(0, pcg.decile.values), np.append(0, pcg.cum_resp_pct.values), marker='o', label='Model')
plt.plot(np.append(0, pcg.decile.values), np.append(0, pcg.cum_resp_pct_wiz.values), 'c--', label='Wizard')
# plt.plot(list(np.arange(1,11)), np.ones(10), 'k--',marker='o')
plt.plot([0, 10], [0, 100], 'k--', marker='o', label='Random')
plt.title(title, fontsize=title_fontsize)
plt.xlabel('Deciles', fontsize=text_fontsize)
plt.ylabel('% Resonders', fontsize=text_fontsize)
plt.legend()
plt.grid(True)
# plt.show()
def plot_ks_statistic(y_true, y_prob, title='KS Statistic Plot',
title_fontsize=14, text_fontsize=10, figsize=None):
"""Generates the KS Statistic Plot from labels and probabilities
Kolmogorov-Smirnov (KS) statistic is used to measure how well the
binary classifier model separates the Responder class (Yes) from
Non-Responder class (No). The range of K-S statistic is between 0 and 1.
Higher the KS statistic value better the model in separating the
Responder class from Non-Responder class.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
title (string, optional): Title of the generated plot. Defaults to
"KS Statistic Plot".
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 14.
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 10.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
Returns:
None
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.plot_ks_statistic(y_test, y_prob[:,1])
"""
# KS Statistic Plot
# plt.subplot(2, 2, 4)
pks = decile_table(y_true, y_prob, labels=False)
plt.plot(np.append(0, pks.decile.values), np.append(0, pks.cum_resp_pct.values),
marker='o', label='Responders')
plt.plot(np.append(0, pks.decile.values), np.append(0, pks.cum_non_resp_pct.values),
marker='o', label='Non-Responders')
# plt.plot(list(np.arange(1,11)), np.ones(10), 'k--',marker='o')
ksmx = pks.KS.max()
ksdcl = pks[pks.KS == ksmx].decile.values
plt.plot([ksdcl, ksdcl],
[pks[pks.KS == ksmx].cum_resp_pct.values,
pks[pks.KS == ksmx].cum_non_resp_pct.values],
'g--', marker='o', label='KS Statisic: ' + str(ksmx) + ' at decile ' + str(list(ksdcl)[0]))
plt.title(title, fontsize=title_fontsize)
plt.xlabel('Deciles', fontsize=text_fontsize)
plt.ylabel('% Resonders', fontsize=text_fontsize)
plt.legend()
plt.grid(True)
# plt.show()
def report(y_true, y_prob, labels=True, plot_style = None, round_decimal=3,
title_fontsize=14, text_fontsize=10, figsize=(16, 10)):
"""Generates decile table and 4 plots (Lift, Lift@Decile, Gain and KS)
from labels and probabilities
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_prob (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
labels (bool, optional): If True, prints a legend for the abbreviations of
decile table column names. Defaults to True.
plot_style(string, optional): Check available styles "plt.style.available".
few examples: ['ggplot', 'seaborn', 'bmh', 'classic', 'dark_background',
'fivethirtyeight', 'grayscale', 'seaborn-bright', 'seaborn-colorblind',
'seaborn-dark', 'seaborn-dark-palette', 'tableau-colorblind10','fast']
Defaults to ``None``.
round_decimal (int, optional): The decimal precision till which the result is
needed. Defaults to '3'.
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 14.
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values (8, 10, 12, etc.)
Defaults to 10.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
Returns:
dc: The dataframe dc (decile-table) with the deciles and related information.
Example:
>>> import kds
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> from sklearn import tree
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33,random_state=3)
>>> clf = tree.DecisionTreeClassifier(max_depth=1,random_state=3)
>>> clf = clf.fit(X_train, y_train)
>>> y_prob = clf.predict_proba(X_test)
>>> kds.metrics.report(y_test, y_prob[:,1])
"""
dc = decile_table(y_true,y_prob,labels=labels,round_decimal=round_decimal)
if plot_style is None:
None
else:
plt.style.use(plot_style)
fig = plt.figure(figsize=figsize)
# Cumulative Lift Plot
plt.subplot(2, 2, 1)
plot_lift(y_true,y_prob)
# Decile-wise Lift Plot
plt.subplot(2, 2, 2)
plot_lift_decile_wise(y_true,y_prob)
# Cumulative Gains Plot
plt.subplot(2, 2, 3)
plot_cumulative_gain(y_true,y_prob)
# KS Statistic Plot
plt.subplot(2, 2, 4)
plot_ks_statistic(y_true,y_prob)
return (dc) |
py | 1a45f48dbb7c84568d6c841de7696db90e433929 | {%- set python_versions = cookiecutter.python_versions.split(",") -%}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import distutils
import subprocess
from os.path import dirname, join
from setuptools import setup, find_packages
{%- if cookiecutter.use_i18n == "Yes" %}
from setuptools.command.sdist import sdist
from wheel.bdist_wheel import bdist_wheel
{%- endif %}
def read(*args):
return open(join(dirname(__file__), *args)).read()
{%- if cookiecutter.use_tox == "Yes" %}
class ToxTestCommand(distutils.cmd.Command):
"""Distutils command to run tests via tox with 'python setup.py test'.
Please note that in our standard configuration tox uses the dependencies in
`requirements/dev.txt`, the list of dependencies in `tests_require` in
`setup.py` is ignored!
See https://docs.python.org/3/distutils/apiref.html#creating-a-new-distutils-command
for more documentation on custom distutils commands.
"""
description = "Run tests via 'tox'."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self.announce("Running tests with 'tox'...", level=distutils.log.INFO)
return subprocess.call(['tox'])
{%- endif %}
{%- if cookiecutter.use_i18n == "Yes" %}
def compile_translations(self):
"""
Wrapper around the `run` method of distutils or setuptools commands.
The method creates the compiled translation files before the `run` method of the superclass is run.
"""
self.announce("Compiling translations", level=distutils.log.INFO)
self.run_command('compile_catalog')
super(self.__class__, self).run()
def command_factory(name, base_class, wrapper_method):
"""Factory method to create a distutils or setuptools command with a patched `run` method."""
return type(str(name), (base_class, object), {'run': wrapper_method})
{%- endif %}
exec(open('{{ cookiecutter.package_name }}/version.py').read())
{%- set license_classifiers = {
'MIT license': 'License :: OSI Approved :: MIT License',
'BSD license': 'License :: OSI Approved :: BSD License',
'ISC license': 'License :: OSI Approved :: ISC License (ISCL)',
'Apache Software License 2.0': 'License :: OSI Approved :: Apache Software License',
'GNU General Public License v3': 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)'
} %}
install_requires = [
]
tests_require = [
'coverage',
'flake8',
'pydocstyle',
'pylint',
{% if cookiecutter.use_pytest == "Yes" -%}
'pytest-pep8',
'pytest-cov',
# for pytest-runner to work, it is important that pytest comes last in
# this list: https://github.com/pytest-dev/pytest-runner/issues/11
'pytest'{%- endif %}
]
exec(read('{{ cookiecutter.package_name }}', 'version.py'))
setup(name='{{ cookiecutter.project_name }}',
version=__version__, # noqa
description='{{ cookiecutter.description }}',
long_description=read('README.rst'),
author='{{ cookiecutter.author_name }}',
author_email='{{ cookiecutter.email }}',
url='https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}',
classifiers=[
'Development Status :: 2 - Alpha',
'Intended Audience :: Developers',
{%- if cookiecutter.open_source_license in license_classifiers %}
'{{ license_classifiers[cookiecutter.open_source_license] }}',
{%- endif %}
'Natural Language :: English',
'Programming Language :: Python',
{% for version in python_versions -%}
'Programming Language :: Python :: {{ version|trim }}',
{% endfor -%}
'Topic :: Internet'
],
include_package_data=True,
install_requires=install_requires,
packages=find_packages(include=['{{ cookiecutter.package_name }}*']),
test_suite='tests',
setup_requires=['pytest-runner'],
tests_require=tests_require,
{# This breaks for me, so commenting it out.
cmdclass={
{%- if cookiecutter.use_tox == "Yes" %}
'test': ToxTestCommand,
{ % - endif %}
{%- if cookiecutter.use_i18n == "Yes" %}
'sdist': command_factory('SDistCommand', sdist, compile_translations),
'bdist_wheel': command_factory('BDistWheelCommand', bdist_wheel, compile_translations),
{%- endif %}
}#}
)
|
py | 1a45f4e0c84056fd27a299e24a1377e37223d18c | # Copyright (c) Open-MMLab. All rights reserved.
from .colorspace import (bgr2gray, bgr2hls, bgr2hsv, bgr2rgb, bgr2ycbcr,
gray2bgr, gray2rgb, hls2bgr, hsv2bgr, imconvert,
rgb2bgr, rgb2gray, rgb2ycbcr, ycbcr2bgr, ycbcr2rgb)
from .geometric import (cutout, imcrop, imflip, imflip_, impad,
impad_to_multiple, imrescale, imresize, imresize_like,
imresize_to_multiple, imrotate, imshear, imtranslate,
rescale_size)
from .io import imfrombytes, imread, imwrite, supported_backends, use_backend
from .misc import tensor2imgs
from .photometric import (adjust_brightness, adjust_color, adjust_contrast,
adjust_lighting, adjust_sharpness, auto_contrast,
clahe, imdenormalize, imequalize, iminvert,
imnormalize, imnormalize_, lut_transform, posterize,
solarize)
__all__ = [
'bgr2gray', 'bgr2hls', 'bgr2hsv', 'bgr2rgb', 'gray2bgr', 'gray2rgb',
'hls2bgr', 'hsv2bgr', 'imconvert', 'rgb2bgr', 'rgb2gray', 'imrescale',
'imresize', 'imresize_like', 'imresize_to_multiple', 'rescale_size',
'imcrop', 'imflip', 'imflip_', 'impad', 'impad_to_multiple', 'imrotate',
'imfrombytes', 'imread', 'imwrite', 'supported_backends', 'use_backend',
'imdenormalize', 'imnormalize', 'imnormalize_', 'iminvert', 'posterize',
'solarize', 'rgb2ycbcr', 'bgr2ycbcr', 'ycbcr2rgb', 'ycbcr2bgr',
'tensor2imgs', 'imshear', 'imtranslate', 'adjust_color', 'imequalize',
'adjust_brightness', 'adjust_contrast', 'lut_transform', 'clahe',
'adjust_sharpness', 'auto_contrast', 'cutout', 'adjust_lighting'
]
|
py | 1a45f5c7f38394d0b5b8e2d741cb04a192be96fb | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#author:fugui
from typing import Counter, Text
import urllib.request
import ssl
import json
import os
import sys
import datetime
#定义11点 用于开启server 酱推送
global d_time0,d_time1,d_time2,n_time
d_time0 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '11:00', '%Y-%m-%d%H:%M')
n_time = datetime.datetime.now()
#定义抢大额红包时间段d_time3和d_time4和d_time5和d_time6之间 ,d_time4提前11分钟意在防止下午红包池提前10分钟关闭和脚本抢大额红包有些地区到最后一刻10元以上红包都有剩余导致脚本报错,
# 若到最后一刻会自动放弃监测,抢所拥有的必中符的面值保底
###默认 抢大额(15元以上) 时间段为下午17:00点到16:49分和晚上21:00到23点59分 不建议进行更改
##以下默认中午试图抢大额红包 前提是道具库存中有10元以上必中符!!!!!!!!!
global d_time3,d_time4,d_time5,d_time6
d_time3 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '17:00', '%Y-%m-%d%H:%M')
d_time4 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '20:49', '%Y-%m-%d%H:%M')
d_time5 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '21:00', '%Y-%m-%d%H:%M')
d_time6 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '23:59', '%Y-%m-%d%H:%M')
#d_time6定义几点前不使用必中符,注意是不使用!!!若时间定义为17:00点,也就是17:00点之前的抽奖不会使用必中符,优先级高于自定义的大额抢红包时间,以节约道具库中的有效的必中符
##若d_time6定义为11:00点,则代表不对使用必中符时间进行限制,切记不能删除d_time7,若不需限制,只需将d_time7时间改为11:00,注意是英文的冒号
global d_time7
d_time7 = datetime.datetime.strptime(str(datetime.datetime.now().date()) + '11:00', '%Y-%m-%d%H:%M')
### 定义红包豆攒到多少数量才会执行兑换必中符脚本,以免一直兑换减5元的必中符
setexchangedou = int(sys.argv[12])
#关闭ssl校验,用于抓包调试请求
ssl._create_default_https_context = ssl._create_unverified_context
#定义短期(半年以上)不会变的量
parActivityId="Gh1tkq-wvFU2xEP_ZPzHPQ"
wm_ctype="mtandroid"
#以下portraitId参数含义未知,用于每日浏览天天神卷30s后可领30豆的请求
portraitId=498
#定义精简通用请求头部
head={"Host": "i.waimai.meituan.com","User-Agent":"MeituanGroup/11.9.208","x-requested-with": "XMLHttpRequest","content-type":"application/x-www-form-urlencoded"}
#定义美团外卖服务器地址
baseurl=r"https://i.waimai.meituan.com"
#定义 pushPlus 的webhook地址,用于企业微信等渠道的推送,默认为空,若采用企业微信,请手动填写
global webhook
webhook = sys.argv[1]
#定义全局变量并初始化 以下初始化赋值的变量不要改!!!!
global propIdforuse,token,batchId,propId
showPriceNumber = "1"
propIdforuse =2
batchId = "haha"
wm_latitude =sys.argv[2]
wm_longitude=sys.argv[3]
token =sys.argv[4]
propId=sys.argv[5]
exchangeCoinNumber=sys.argv[6]
serverkey=sys.argv[7]
pushPlusToken =sys.argv[8]
yesornot = sys.argv[9]
yesornot2 = sys.argv[10]
leftdou=0
counttime = 0
cwd = os.path.dirname(os.path.realpath(__file__))
##############################################################################
##标记这四类红包数量不为空,用来在有10元以上必中符时循环判断红包池余量抢购大额元红包,若您不需该功能,请自行将下一行的1改为0
eight = ten = fifteen = thirty =fifty=sys.argv[11]
##############################################################################
# eight_left= 10
################################################################################
#若在您自定义的抢大额红包时间段中,您无法通过10元以上必中符抢到任何红包!!,则请将下面两行数值改大些,如改成10左右的数字
ten_left=0
fifteen_left=0
thirty_left=0
fifty_left=0
#将print内容同步写到output.txt文件
class Logger(object):
def __init__(self, fileN='Default.log'):
self.terminal = sys.stdout
self.log = open(fileN, 'w+',encoding='utf-8')
def write(self, message):
'''print实际相当于sys.stdout.write'''
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
###获取serverkey
# def getserverkey():
# global yesornot
# global serverkey
# if os.path.exists(str(cwd)+r"/serverkey.txt"):
# # file1 = open(r"./token.txt", mode='r',encoding="UTF-8")
# # token = file1.readline()
# # file1.close
# if os.path.getsize(str(cwd)+r"/serverkey.txt")!=0:
# yesornot = "y"
# else:
# yesornot = "n"
# return -1
# else:
# while True:
# try:
# print("请选择是否开启server酱推送!\n")
# yesornot=input("是否开启server酱推送(y/n):\n")
# if type(yesornot)==str and yesornot=='y':
# print("获取serverkey请访问:https://sct.ftqq.com/\n")
# serverkey=input("请输入serverkey:\n")
# except:
# pass
# if type(yesornot)==str and (yesornot =="n" or yesornot=='y'):
# break
# file =open(str(cwd)+r"/serverkey.txt", mode='w+',encoding="UTF-8")
# file.write(serverkey)
# file.close
###获取pushPlusToken
# def getpushPlusToken():
# global yesornot2
# global pushPlusToken
# if os.path.exists(str(cwd)+r"/pushPlusToken.txt"):
# # file1 = open(r"./token.txt", mode='r',encoding="UTF-8")
# # token = file1.readline()
# # file1.close
# if os.path.getsize(str(cwd)+r"/pushPlusToken.txt")!=0:
# yesornot2 = "y"
# else:
# yesornot2 = "n"
# return -1
# else:
# while True:
# try:
# print("请选择是否开启pushPlus推送\n")
# yesornot2=input("是否开启pushPlus推送(y/n):\n")
# if type(yesornot2)==str and yesornot2=='y':
# print("获取pushPlusToken请访问:https://www.pushplus.plus/\n")
# pushPlusToken=input("请输入pushPlusToken:\n")
# except:
# pass
# if type(yesornot2)==str and (yesornot2 =="n" or yesornot2=='y'):
# break
# file =open(str(cwd)+r"/pushPlusToken.txt", mode='w+',encoding="UTF-8")
# file.write(pushPlusToken)
# file.close
#获取token
# def gettoken():
# if os.path.exists(str(cwd)+r"/token.txt"):
# file1 = open(str(cwd)+r"/token.txt", mode='r',encoding="UTF-8")
# token = file1.readline()
# file1.close
# return token
# else:
# while True:
# try:
# print("获取token方法参考readme.md!\n")
# token=input("请输入token:\n")
# except:
# pass
# if type(token)==str and token !="":
# break
# file =open(str(cwd)+r"/token.txt", mode='w+',encoding="UTF-8")
# file.write(token)
# file.close
# return token
#获取经纬度函数并存入当前目录文本(美团活动为随机地点固定半年以上,各地大额红包概率可能不同,若长期小额,可尝试换地址或换号)
# def getlatlongitude():
# if os.path.exists(str(cwd)+r"/wm_latitudewm_longitude.txt"):
# return -1
# else:
# while True:
# try:
# print("若您不知道🙏限时抢红包开放城市,可试各地省会,如成都(30657401,104065827)\n")
# wm_latitude=eval(input("请输入去除小数点的纬度(如30657401):\n"))
# wm_longitude=eval(input("请输入去除小数点的经度(如104065827):\n"))
# except:
# pass
# if type(wm_latitude)==int and type(wm_longitude)==int :
# break
# file =open(str(cwd)+r"/wm_latitudewm_longitude.txt", mode='w+',encoding="UTF-8")
# file.write(str(wm_latitude)+"\n"+str(wm_longitude))
# file.close
#定义一个云端查询必中符库中所有的propId 和needNumber 的函数,并传给getpropId_Coninnumber()函数作为用户输入参考提示
# def myredbean():
# wm_latitude = 1
# wm_longitude = 1
# print("开始执行从美团接口查询propid 和 needNumber参数脚本:\n")
# datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+str(token)+"&userPortraitId="+str(portraitId)
# url_drawlottery = baseurl+r"/cfeplay/playcenter/batchgrabred/myRedBean"
# request =urllib.request.Request(url_drawlottery,headers=head,data=datas.encode("utf-8"),method="POST")
# try:
# response = urllib.request.urlopen(request,timeout=10)
# result = response.read().decode("utf-8")
# result2 = json.loads(result)
# cent = 1
# if(result2["code"]==0 and result2["subcode"]==0 and len(result2["data"]["propExchangeRuleInfos"])):
# for k in result2["data"]["propExchangeRuleInfos"]:
# print("第%d类必中符 所需设置propId参数为%d\t所需红包豆数量为:%d\t总量为%d\n"%(cent,k["propId"],k["needNumber"],k["amount"]))
# cent=cent+1
# print("一般这几类必中符金额依次为5元 8元 15元,大概率使用后兑换到20-5,25-8,40-15的红包,建议选择面值最大的一类,即propId填5,所需豆子数量填1800即可\n脚本会自动从设定的面值去尝试兑换,逐级尝试面值,直到兑换成功,所以推荐设置默认兑换15面值的必中符\n注意填写的propId和所需豆子数之间是上方的一一对应关系,错误对应将导致兑换失败!\n")
# elif (result2["code"]==1 and result2["subcode"]==-1):
# print("%s,原因:输入token失效或错误 请继续运行程序并输入,脚本将在运行一遍后自动删除异常配置文件!!\n"%(result2["msg"]))
# else:
# print("请求接口失效或参数异常,建议🙏重置参数!\n")
# sys.exit(0)
# except urllib.error.URLError as e:
# if hasattr(e,"code"):
# print("脚本执行失败,错误代码如下:\n")
# print(e.code)
# if hasattr(e,"reason"):
# print(e,"reason")
#定义获得需要兑换的必中符道具类型和兑换所需的豆子
# def getpropId_Coinnumber(token):
# if os.path.exists(str(cwd)+r"/propId_Coinnumbe.txt"):
# return -1
# else:
# while True:
# myredbean(token)
# try:
# propId=eval(input("请输入所需要兑换道具的propId(推荐填写5):\n"))
# exchangeCoinNumber=eval(input("请输入propId对应某类必中符所需的豆子数量(推荐填写1800):\n"))
# except:
# pass
# if type(propId)==int and type(exchangeCoinNumber)==int :
# if propId == 2 or propId == 4 or propId == 5:
# if exchangeCoinNumber ==500 or exchangeCoinNumber ==1000 or exchangeCoinNumber ==1800 :
# break
# file =open(str(cwd)+r"/propId_Coinnumbe.txt", mode='w+',encoding="UTF-8")
# file.write(str(propId)+"\n"+str(exchangeCoinNumber))
# file.close
#定义从文本文件中获取存入变量的函数,第二次运行时不用输入,若需改变经纬度和token,则直接删除文件即可
# def getVar():
# if not os.path.exists(str(cwd)+r"/wm_latitudewm_longitude.txt"):
# print("程序运行中配置文件异常,文件或者权限异常,已自动为您删除脚本目录下所有已生成的txt文档并停止程序!\n")
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# os.remove(str(cwd)+r"/pushPlusToken.txt")
# sys.exit(0)
# file1 = open(str(cwd)+r"/wm_latitudewm_longitude.txt", mode='r',encoding="UTF-8")
# wm_latitude = int(file1.readline())
# wm_longitude = int(file1.readline())
# file1.close()
# file2 = open(str(cwd)+r"/token.txt", mode='r',encoding="UTF-8")
# if not os.path.exists(str(cwd)+r"/token.txt"):
# print("程序运行中配置文件异常,文件或者权限异常,已自动为您删除脚本目录下所有已生成的txt文档并停止程序!\n")
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# os.remove(str(cwd)+r"/pushPlusToken.txt")
# sys.exit(0)
# token = file2.readline()
# file2.close()
# if not os.path.exists(str(cwd)+r"/propId_Coinnumbe.txt"):
# print("程序运行中配置文件异常,文件或者权限异常,已自动为您删除脚本目录下所有已生成的txt文档并停止程序!\n")
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# os.remove(str(cwd)+r"/pushPlusToken.txt")
# sys.exit(0)
# file3 = open(str(cwd)+r"/propId_Coinnumbe.txt", mode='r',encoding="UTF-8")
# propId = int(file3.readline())
# exchangeCoinNumber = int(file3.readline())
# file3.close()
# return wm_latitude,wm_longitude,token,propId,exchangeCoinNumber
##获得pushPlusToken
# def pushPlusTokenvar():
# global pushPlusToken
# if not os.path.exists(str(cwd)+r"/pushPlusToken.txt"):
# print("程序运行中配置文件异常,文件或者权限异常,已自动为您删除脚本目录下所有已生成的txt文档并停止程序!\n")
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# os.remove(str(cwd)+r"/pushPlusToken.txt")
# sys.exit(0)
# file = open(str(cwd)+r"/pushPlusToken.txt", mode='r',encoding="UTF-8")
# pushPlusToken = file.readline()
# file.close()
# return pushPlusToken
##获得serverkey
# def serverkeyvar():
# global serverkey
# if not os.path.exists(str(cwd)+r"/serverkey.txt"):
# print("程序运行中配置文件异常,文件或者权限异常,已自动为您删除脚本目录下所有已生成的txt文档并停止程序!\n")
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# os.remove(str(cwd)+r"/pushPlusToken.txt")
# sys.exit(0)
# file = open(str(cwd)+r"/serverkey.txt", mode='r',encoding="UTF-8")
# serverkey = file.readline()
# file.close()
# return serverkey
#定义获取batchId的函数
def getbatchId():
global wm_latitude,wm_longitude
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行获取batchId脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_ctype="+wm_ctype+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token
url_getbatchId = baseurl+r"/cfeplay/playcenter/batchgrabred/corepage"
request =urllib.request.Request(url_getbatchId,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0):
if "batchId" in result2["data"]:
print("batchId:%s\n"%(result2["data"]["batchId"]))
return result2["data"]["batchId"]
else:
print("获取batchId失败👀,当前非限时抢红包时间段,无法进行下一步,但已为您签到完毕🙏!\n")
os._exit(1)
elif (result2["code"]==1):
print("%s,接口需提交的token参数已改变👀,请重新运行一遍脚本!\n"%(result2["msg"]))
# os.remove(str(cwd)+r"/wm_latitudewm_longitude.txt")
# os.remove(str(cwd)+r"/token.txt")
# os.remove(str(cwd)+r"/propId_Coinnumbe.txt")
# os.remove(str(cwd)+r"/serverkey.txt")
# sys.exit(0)
os._exit(1)
else:
print("获取batchId错误👀,请检查网络,否则为接口失效!\n")
os._exit(1)
except urllib.error.URLError as e:
if hasattr(e,"code"):
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义每天七次签到领豆的函数,需传入获取的token
def signForBeans():
global token
print("**开始执行签到领豆脚本:** \n")
datas = "token="+token
url_signforbeans = baseurl+r"/cfeplay/playcenter/batchgrabred/drawPoints/v2"
request =urllib.request.Request(url_signforbeans,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0):
print("👴%s\n"%(result2["msg"]))
elif (result2["code"]==1):
print("👴未到领取时间或已经领取完了(每天可领7次,每次间隔需半小时\n)!")
elif (result2["code"]==7):
print("token已失效,请检查是否已自动删除所有配置文件,若未自动删除,请手动🙏删除所有配置文件并重新运行脚本,最后温馨提示:建议接入server酱通知!\n")
else:
print("请求接口失效或网络不佳,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#def 限时抢红包函数
def drawlottery(batchId):
global wm_latitude,wm_longitude,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行限时抢天天神券脚本🧧:**\n")
print(batchId)
datas = "parActivityId="+parActivityId+"&wm_latitude="+wm_latitude+"&wm_longitude="+wm_longitude+"&token="+token+"&batchId="+batchId+"&isShareLink=true"+"&propType=1"+"&propId="+str(propIdforuse)
url_drawlottery = baseurl+r"/cfeplay/playcenter/batchgrabred/drawlottery"
request =urllib.request.Request(url_drawlottery,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0):
print("领取成功!\n提示信息:%s\n红包属性:%s\n使用限制:%s\n红包价值:%s\n红包立即生效时间:%s\n红包剩余有效期:%s分钟\n"%(result2["msg"],result2["data"]["name"],result2["data"]["priceLimitdesc"],result2["data"]["showTitle"],result2["data"]["endTimeDesc"],str(float(result2["data"]["leftTime"])/60000)))
global showPriceNumber
showPriceNumber = result2["data"]["showPriceNumber"]
if int(showPriceNumber)<500:
print("**当前红包面值为%d元,小于5元,👴将自动执行小额红包转红包豆脚本!!**\n"%(int(showPriceNumber)/100))
else:
print("**当前红包面值为%d元,大于等于5元,👴将不会执行小额红包转红包豆脚本!!**\n"%(int(showPriceNumber)/100))
elif (result2["code"]==1 and result2["subcode"]==3):
print("%s😅\n"%(result2["msg"]))
elif(result2["code"]==1 and result2["subcode"]==-1):
print("token错误或已失效,%s\n"%(result2["msg"]))
elif (result2["code"]==7):
print("token已失效,请手动🙏删除所有自动生成的配置文件,并建议接入server酱通知!\n")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义接受红包函数,获得红包小于5元时,不执行此函数,并调用redtobean函数自动将红包转为红包豆,若两个函数都不执行,在抢红包成功5分钟左右红包会自动发放到账户
def acceptRed(batchId):
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
global wm_latitude,wm_longitude,token
print("**开始执行发放天天神券🧧到红包库脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token+"&batchId="+batchId
url_acceptRed = baseurl+r"/cfeplay/playcenter/batchgrabred/acceptRed"
request =urllib.request.Request(url_acceptRed,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0):
print("*👴抢到的红包已经领取成功啦,快去使用吧!*\n")
elif (result2["code"]==1):
print("%s\n"%(result2["msg"]))
elif (result2["code"]==7):
print("token已失效,请手动🙏删除所有自动生成的配置文件,并建议接入server酱通知!\n")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义红包转红包豆函数,将小于5元的红包转为红包豆
def redtobean(batchId):
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
global wm_latitude,wm_longitude
print("**默认尝试执行面值小于5元🧧自动转红包豆脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token+"&batchId="+batchId
url_drawlottery = baseurl+r"/cfeplay/playcenter/batchgrabred/redToBean"
request =urllib.request.Request(url_drawlottery,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0):
print("👴小额红包转红包豆成功!\n")
elif (result2["code"]==1 and result2["subcode"]==12):
# print("%s😅\n"%(result2["msg"]))
print("没有待转换的红包😅\n")
elif (result2["code"]==7):
print("token已失效,请手动🙏删除所有自动生成的配置文件,并建议接入server酱通知!\n")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#查询已领取到的天天神券
def querymyreward():
global token
print("**开始执行查询已领天天神券🧧脚本:**\n")
datas = "parActivityId="+parActivityId+"&token="+token
url_querymyreward = baseurl+r"/cfeplay/playcenter/batchgrabred/myreward"
request =urllib.request.Request(url_querymyreward,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
# print(result2)
# print(result2["code"])
if(result2["code"]==0 and len(result2["data"]["myawardInfos"])):
print("👴开始遍历红包库:\n")
print("红包库详细信息:\n")
print("红包库中共有%d个红包\n"%(len(result2["data"]["myawardInfos"])))
cent=0
count = 0
isover15=0
for k in result2["data"]["myawardInfos"]:
if not k["status"]:
print("**第%d个红包有效!!!!**\n红包属性:%s\n使用限制:%s\n红包价值:%s元\n红包剩余有效期%s分钟\n"%(cent+1,k["name"],k["priceLimitdesc"],k["showPriceNumberYuan"],str(float(k["leftTime"])/60000)))
if(int(k["showPriceNumberYuan"])>15):
isover15 =1
print("\n")
else:
count=count+1
if cent == 0:
print("**过期红包详情:**\n")
cent=cent+1
if(propIdforuse!=5):
print("总计已领取%d个红包,其中已过期%d个😅,有效%d个\n"%(cent,count,cent-count))
else:
if isover15==1:
print("恭喜你领取大额限时红包,具体价值如上所示!!总计已领取%d个红包,其中已过期%d个😅,有效%d个\n"%(cent,count,cent-count))
print("\n")
elif (result2["code"]==1):
print("%s\n"%(result2["msg"]))
elif (result2["code"]==7):
print("token已失效,请手动🙏删除所有自动生成的配置文件,并建议接入server酱通知!\n")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#获取每日浏览天天神券奖励的30豆
def sendTaskRedBean():
global wm_latitude,wm_longitude,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行领取每日30豆的脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token+"&portraitId="+str(portraitId)
url_sendTaskRedBean = baseurl+r"/cfeplay/playcenter/batchgrabred/sendTaskRedBean"
request =urllib.request.Request(url_sendTaskRedBean,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["status"]==0):
print("%s\n今天领取成功%d个红包豆,请明日再来!\n"%(result2["msg"],result2["sendBeanCount"]))
elif (result2["status"]==1):
print("您今日已领取过😅,%s\n"%(result2["msg"]))
elif (result2["status"]==-1):
print("portraitId已失效,%s\n"%(result2["msg"]))
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义每日签到得必中符函数
def doAction():
global wm_latitude,wm_longitude,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行每日签到领必中符🧧的脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token+"&action=SiginInGetProp"
url_doaction = baseurl+r"/cfeplay/playcenter/batchgrabred/doAction"
request =urllib.request.Request(url_doaction,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==0 and result2["data"]["signDays"]!=0):
print("签到%s\n,截止今日这周已签到%d天"%(result2["msg"],result2["data"]["signDays"]))
elif (result2["code"]==0 and result2["data"]["signDays"]==0):
print("您今日已签到,请明天再来!")
elif (result2["code"]==7):
print("参数异常或接口已失效")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#查看道具库中的必中符记录
def querymyProps():
global propIdforuse
global wm_latitude,wm_longitude,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行查询道具库中必中符🧧详情的脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token
url_querymyprops = baseurl+r"/cfeplay/playcenter/batchgrabred/myProps"
request =urllib.request.Request(url_querymyprops,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==0 and len(result2["data"])):
print("👴开始遍历道具库:\n")
print("道具库详细信息:\n")
print("红包库中共有%d个必中符道具\n"%(len(result2["data"])))
cent=0
count = 0
for k in result2["data"]:
if k["status"]==1:
print("第%d个必中符道具有效!!!!\n必中符道具id号:%s\n必中符道具属性:%s\n过期时间:%s\n"%(cent+1,k["recordNo"],k["propName"],k["expireTime"]))
if cent==0:
propIdforuse = k["propId"]
print("\n")
else:
count=count+1
cent=cent+1
if (count!=0):
print("总计%d个必中符道具,已过期%d个😅,有效%d个\n"%(cent,count,cent-count))
if ((cent-count)!=0):
print("**注意:每天中午抢红包🧧时将自动为您使用道具库中第一个道具!!** ")
else:
print(" **注意:道具库无有效道具,无法使用必中符,下次抢红包将使用默认参数抢红包(拼手气😅)!!** ")
print("\n")
elif (result2["code"]==7):
print("参数异常或接口已失效,请手动🙏删除所有自动生成的配置文件,并建议接入server酱通知!")
else:
print("必中符道具库为空,👴未帮您领取过道具!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#已废弃,直接发送兑换请求即可,不在兑换时间段 subcode 为13
#定义运行时是否能兑换豆子成必中符,目前一直为14点至16点,故不定义此函数,采取每天14点至16点运行此程序时直接尝试兑换
#若需自行获取当前时间段是否可换豆子为道具,则post以下请求即可
# POST /cfeplay/playcenter/batchgrabred/canExchangeCheck HTTP/1.1
# Host: i.waimai.meituan.com
# Content-Length: 82
# User-Agent:MeituanGroup/11.9.208
# x-requested-with: XMLHttpRequest
# content-type: application/x-www-form-urlencoded
# parActivityId=Gh1tkq-wvFU2xEP_ZPzHPQ&wm_latitude=30657401&wm_longitude=104065827
#定义豆子兑换成必中符函数:
def exchange():
global propId,wm_latitude,wm_longitude,token
# wm_latitude = getVar()[0]
# wm_longitude = getVar()[1]
wm_actual_latitude = str(wm_latitude)
wm_actual_longitude =str(wm_longitude)
print("**开始执行每日豆子兑换必中符脚本**:\n")
while(1):
datas = "wm_actual_longitude="+wm_actual_longitude+"&wm_actual_latitude="+wm_actual_latitude+"&exchangeRuleId=&propId="+str(propId)+"&exchangeCoinNumber="+str(exchangeCoinNumber)+"&parActivityId="+parActivityId+"&wm_ctype="+wm_ctype+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+token
url_exchange = baseurl+r"/cfeplay/playcenter/batchgrabred/exchange"
request =urllib.request.Request(url_exchange,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==0 and result2["subcode"]==0):
print("%s,您设置的红包豆兑换指定额度的必中符成功!!!请查看下方道具库详情!😄\n"%(result2["msg"]))
break
elif (result2["code"]==1 and result2["subcode"]==13):
print("%s\n"%(result2["msg"]))
break
elif (result2["code"]==1 and result2["subcode"]==-1):
print("%s,您现在的红包豆不足以兑换此类必中符或者此类必中符已被抢完!\n正尝试兑换*次一等级*必中符\n"%(result2["msg"]))
if(propId ==5):
propId =4
break
elif (result2["code"]==7):
print("参数异常或接口已失效\n")
else:
print("请求接口失效或参数异常,请稍后再试!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,准备退出程序,错误代码为:%s\n"%(e.code))
if hasattr(e,"reason"):
print("脚本执行失败👀,准备退出程序,错误代码为:%s\n"%(e.reason))
###定义查询豆子详情的函数
def myRedBeanRecords():
global wm_latitude,wm_longitude,leftdou,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行查询豆子变化详情参数脚本**:\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+str(token)+"&userPortraitId="+str(portraitId)+"&pageNum=1"
url_myredbeanRecords = baseurl+r"/cfeplay/playcenter/batchgrabred/myRedBeanRecords"
request =urllib.request.Request(url_myredbeanRecords,headers=head,data=datas.encode("utf-8"),method="POST")
try:
response = urllib.request.urlopen(request,timeout=10)
result = response.read().decode("utf-8")
result2 = json.loads(result)
cent=1
if(result2["code"]==0 and result2["subcode"]==0 and len(result2["data"]["redBeanRecordInfos"])):
leftdou= result2["data"]["totalObtainAmount"]-result2["data"]["usedAmount"]-result2["data"]["expiredAmount"]
print("**总获得红包豆:%d,已使用红包豆:%d,已过期红包豆:%d,剩余可用红包豆:%d**\n"%(result2["data"]["totalObtainAmount"],result2["data"]["usedAmount"],result2["data"]["expiredAmount"],leftdou))
for k in result2["data"]["redBeanRecordInfos"]:
print("exchangeTime:%s\texchangeMessage:%s\texchangeNumber:%s\n"%(k["exchangeTime"],k["exchangeMessage"],k["exchangeNumber"]))
cent=cent+1
if(cent>10):
break
print("*只显示最近十条红包豆的变化* \n")
elif (result2["code"]==1 and result2["subcode"]==-1):
print("%s\n"%(result2["msg"]))
else:
print("请求接口失效或参数异常,建议🙏重置参数!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义查询红包池函数
def queryredpool():
global wm_latitude,wm_longitude,token
# wm_latitude = $wm_latitude
# wm_longitude=$wm_longitude
print("**开始执行查询红包池详情脚本:**\n")
datas = "parActivityId="+parActivityId+"&wm_latitude="+str(wm_latitude)+"&wm_longitude="+str(wm_longitude)+"&token="+str(token)+"&wm_ctype="+wm_ctype
url_myredbeanRecords = baseurl+r"/cfeplay/playcenter/batchgrabred/corepage"
request =urllib.request.Request(url_myredbeanRecords,headers=head,data=datas.encode("utf-8"),method="POST")
try:
global eight,ten,fifteen,thirty,fifty,eight_left,ten_left,fifteen_left,thirty_left,fifty_left
response = urllib.request.urlopen(request)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==0 and result2["subcode"]==0 and len(result2["data"]["awardInfos"])):
for k in result2["data"]["awardInfos"]:
if"leftStock" not in k:
print("该地区没有红包池,脚本异常退出!")
# if (round(float(k["showPriceNumberYuan"]))==8 and k["leftStock"]==eight_left):
# eight = 0
if (round(float(k["showPriceNumberYuan"]))==10 and k["leftStock"]==ten_left):
ten = 0
if (round(float(k["showPriceNumberYuan"]))==15 and k["leftStock"]==fifteen_left):
fifteen = 0
if (round(float(k["showPriceNumberYuan"]))==30 and k["leftStock"]==thirty_left):
thirty = 0
if (round(float(k["showPriceNumberYuan"]))==50 and k["leftStock"]==fifty_left):
fifty = 0
if counttime<3:
sprint("*红包池中%s元总量:%d张,已被领取:%d张,剩余%d张*\n"%(k["showPriceNumberYuan"],k["totalStock"],k["sendStock"],k["leftStock"]))
counttime =counttime +1
elif (result2["code"]==1 and result2["subcode"]==-1):
print("token失效,导致获取活动信息失败!%s\n"%(result2["msg"]))
else:
print("红包池未开放,等待中!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义pushPlus的消息推送函数
def pushPlus():
global pushPlusToken
global webhook
# pushPlusToken = $pushPlusToken
if not os.path.exists(str(cwd)+r"/output.txt"):
print("output.txt文件异常,推送退出!🙌")
return -1
file4= open(str(cwd)+r"/output.txt", mode='r',encoding="UTF-8")
message = str(file4.read())
file4.close
pushurl="https://www.pushplus.plus/send"
head_server ={"Host": "www.pushplus.plus","User-Agent":"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Mobile Safari/537.36","content-type":"application/x-www-form-urlencoded"}
print("**开始执行pushPlus推送脚本:**\n")
datas=bytes(urllib.parse.urlencode({"title":"天天神券推送","content":message,"token":pushPlusToken,"template":"markdown","channel":"wechat","webhook":webhook,"callbackUrl":""}),encoding="UTF-8")
request =urllib.request.Request(pushurl,headers=head_server,data=datas,method="POST")
try:
response = urllib.request.urlopen(request,timeout=30)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==200) :
print("pushPlus消息推送成功!\n\n")
else:
print("请求接口失效或参数异常,建议重置参数!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
#定义server 酱的消息推送函数
def serverjiang():
# serverkey = $serverkey
if not os.path.exists(str(cwd)+r"/output.txt"):
print("output.txt文件异常,推送退出!🙌")
return -1
file4= open(str(cwd)+r"/output.txt", mode='r',encoding="UTF-8")
message = str(file4.read())
file4.close
pushurl="https://sctapi.ftqq.com/"
head_server ={"Host": "sctapi.ftqq.com","User-Agent":"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Mobile Safari/537.36","content-type":"application/x-www-form-urlencoded"}
url_serverkey = pushurl+serverkey+".send"
print("**开始执行server酱推送脚本:**\n")
datas=bytes(urllib.parse.urlencode({"title":"天天神券推送","desp":message,"channel":""}),encoding="UTF-8")
request =urllib.request.Request(url_serverkey,headers=head_server,data=datas,method="POST")
try:
response = urllib.request.urlopen(request,timeout=30)
result = response.read().decode("utf-8")
result2 = json.loads(result)
if(result2["code"]==0) :
pushid = result2["data"]["pushid"]
readkey = result2["data"]["readkey"]
url_checkurl = pushurl+"push?id="+pushid+"&readkey="+readkey
request2 = urllib.request.Request(url_checkurl,headers=head_server,data=datas)
try:
response2 = urllib.request.urlopen(request2,timeout=30)
text=json.loads(response2.read().decode("utf-8"))
if(text["data"]["title"] =="天天神券推送"):
print("server酱推送成功😄!请在移动设备端查看\n")
else:
print("server酱推送失败👀,请检查serverkey是否正确!\n")
except urllib.error.URLError as e2:
if hasattr(e2,"code"):
print("脚本执行失败👀,错误代码如下:\n")
print(e2.code)
if hasattr(e2,"reason"):
print(e2,"reason")
else:
print("请求接口失效或参数异常,建议重置参数!\n")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print("脚本执行失败,错误代码如下:\n")
print(e.code)
if hasattr(e,"reason"):
print(e,"reason")
def main():
global propIdforuse,token
temp = sys.stdout
print("本脚本提供pushPlus、serverkey这两种推送方式,可以二选一或者全选,首次运行脚本请依次选择是否开启对应推送!\n由于server酱每日免费限额5条,若需开启推送,请首选pushPlus!\n")
# getpushPlusToken()
# getserverkey()
# token = gettoken()
# getlatlongitude()
# getpropId_Coinnumber(token)
sys.stdout = Logger(str(cwd)+r'/output.txt')
batchId = getbatchId()
##先去保持每天签到 以获得必中符或者豆子
doAction()
myRedBeanRecords()
if leftdou >=setexchangedou:
exchange()
else:
print("您当前红包豆为%d未满预设的%d数量,不会执行红包豆兑换必中符脚本,多攒几天豆子再来吧!\n"%(leftdou,setexchangedou))
querymyProps()
#定义bool类型变量判断当前时间段是不是自定义的大额抢红包时间段
istimeforbig1= (n_time <=d_time4) and(n_time>=d_time3)
istimeforbig2= (n_time <=d_time6) and(n_time>=d_time5)
if n_time > d_time7:
if istimeforbig1:
if propIdforuse ==5:
print("**当前符合抢30元以上大额红包的条件**\n")
print("**正使用15元必中符为您尝试抢30元以上的红包**\n")
##拥有15块以上的必中符,先等待着试图抢30,要是15没了,就直接去抢30的红包,或许有可能抢到50
while fifteen ==1 :
if not istimeforbig1:
print("*👴尽力了,等到红包池要关闭了都未等到15元以上大额红包被抢完,开始保底15元,注意查收!*\n")
break
if(thirty ==1 and fifty ==1):
print("*15有剩余,30元已被抢完,50元已被抢完,跳出监测,正在为您抢保底15元红包!*\n")
break
queryredpool()
if istimeforbig2 :
if propIdforuse ==5:
print("**当前符合抢30元以上大额红包的条件**\n")
print("**正使用15元必中符为您尝试抢30元以上的红包**\n")
##拥有15块以上的必中符,先等待着试图抢30,要是15没了,就直接去抢30的红包,或许有可能抢到50
while fifteen ==1 :
if not istimeforbig2 :
print("*👴尽力了,等到红包池要关闭了都未等到15元以上大额红包被抢完,开始保底15元,注意查收!*\n")
break
if(thirty ==1 and fifty ==1):
print("*15有剩余,30元已被抢完,50元已被抢完,跳出监测,正在为您抢保底15元红包!*\n")
break
queryredpool()
if istimeforbig1:
if propIdforuse ==3:
print("**当前符合抢30元以上大额红包的条件**\n")
print("**正使用10元必中符为您尝试抢30元以上的红包**\n")
##拥有10块以上的必中符,先等待着试图抢30,要是10和15都没了,就直接去抢30的红包,或许有可能抢到50
while fifteen ==1 :
if(thirty ==1 and fifty ==1 ):
print("&15有剩余,30元已被抢完,50元已被抢完,跳出监测,正在为您抢保底15元红包!*\n")
break
if(br ==1):
break
if not istimeforbig1:
print("*👴尽力了,等到红包池要关闭了都未等到15元以上大额红包被抢完,开始保底15元,注意查收!*\n")
break
if ten ==0 :
queryredpool()
while ten ==1:
if not istimeforbig1:
br = 1
print("*👴尽力了,等到红包池要关闭了都未等到任意大额红包被抢完,开始保底10元,注意查收!*\n")
queryredpool()
if istimeforbig2:
if propIdforuse ==3:
print("**当前符合抢30元以上大额红包的条件**\n")
print("**正使用10元必中符为您尝试抢30元以上的红包**\n")
##拥有10块以上的必中符,先等待着试图抢30,要是10和15都没了,就直接去抢30的红包,或许有可能抢到50
while fifteen ==1 :
if(thirty ==1 and fifty ==1 ):
print("&15有剩余,30元已被抢完,50元已被抢完,跳出监测,正在为您抢保底15元红包!*\n")
break
if(br ==1):
break
if not istimeforbig2:
print("*👴尽力了,等到红包池要关闭了都未等到15元以上大额红包被抢完,开始保底15元,注意查收!*\n")
break
if ten ==0 :
queryredpool()
while ten ==1:
if not istimeforbig2:
br = 1
print("*👴尽力了,等到红包池要关闭了都未等到任意大额红包被抢完,开始保底10元,注意查收!*\n")
queryredpool()
if n_time < d_time7 :
propIdforuse =1
drawlottery(batchId)
if(int(showPriceNumber)<500):
redtobean(batchId)
else:
acceptRed(batchId)
querymyreward()
sendTaskRedBean()
querymyProps()
myRedBeanRecords()
sys.stdout = temp
if(yesornot2 == "y"):
pushPlus()
else:
print("您已默认关闭pushPlus推送!若需开启,请将pushPlusToken 填入本脚本目录下的pushPlusToken.txt文本中!\n")
if(yesornot == "y"):
if n_time>d_time0:
serverjiang()
else:
print("当前时间段非抢红包时间,默认关闭server酱推送以节约server酱每日5条推送的限额!")
else:
print("您已默认关闭server酱推送!若需开启,请将serverkey 填入本脚本目录下的serverkey.txt文本中!\n")
if __name__ == "__main__":
main() |
py | 1a45f7575cf90baed19c89f69f3cca760277af94 | import twitter
from searchtweets import ResultStream, gen_rule_payload, load_credentials, collect_results
import json
import os.path
user_list = []
followers_list = []
# api = twitter.Api(consumer_key='C0Q2slgd38EQUV82soOig68Uo',
# consumer_secret='JKJ0tVC8vnlDmVbvPT4BF67nx7r5VqnJTSPHMiGqJLo43bba3m',
# access_token_key='479643521-Q7wHBkOomWOSa7j2jqiKrh5i8VSCbnZewOy0lUJv',
# access_token_secret='HSdLbWQiLXtwpZKKI3W2iW98oDk3QJbrGBEGYmAHhlwU4')
# api = twitter.Api(consumer_key='Wa5xi8yfBZ4LihhpZp2KqzlOq',
# consumer_secret='JHZn4GSi08P6e2S71eRAOT2cDWBk0VrYbMwOg0XhzssOALbsDE',
# access_token_key='86863810-NA4wtMzKrQ62EMIvFUyIaTlXuIWGjd5QwlZkJBL4P',
# access_token_secret='DuhCa5Kg3zjHJykC3W30wPxteEwz5QGEQZvoDAqiVwM5o')
premium_search_args = load_credentials(filename="./twitter_keys.yaml",
yaml_key="search_tweets_30_day_dev",
env_overwrite=False)
rule = gen_rule_payload("bcp point_radius:[-77.0304221 -12.1217806 20mi]", results_per_call=100)
bcp = collect_results(rule,
max_results=100,
result_stream_args=premium_search_args)
[print(tweet.all_text) for tweet in results[0:10]]
# %%
# Load File
if os.path.isfile('Miner/bcp.json'):
with open('Miner/bcp.json') as json_file:
past_res_json = json.load(json_file)
past_res = [twitter.Status.NewFromJsonDict(x) for x in past_res_json.get('statuses', '')]
else:
past_res = None
# results = api.GetSearch(raw_query="q=banco bcp", geocode='-12.04902,-77.03360,10km', return_json=True)
# with open('bcp.json', 'w') as f:
# json.dump(results, f)
# %%
# Get credentials and search
rawurl = 'https://api.twitter.com/1.1/search/tweets.json?q=from%3Atwitterdev&result_type=mixed&count=2'
results_json = api.GetSearch(term='bcp')
results = [twitter.Status.NewFromJsonDict(x) for x in results_json.get('statuses', '')]
# %%
if past_res:
total_result = past_res+results
# %%
with open('bcp.json', 'w') as f:
json.dump(total_result, f)
results = [twitter.Status.NewFromJsonDict(x) for x in results.get('statuses', '')]
# %%
for tweet in results:
tmptweet = tweet.AsDict()
# print(tmptweet)
user_list.append(tmptweet['user']['id'])
print(tmptweet['user']['screen_name'])
# %%
for tweet in results:
tmptweet = tweet.AsDict()
|
py | 1a45f791eab64ccfcf87145358eea23b98b839a9 | #%%
import numpy as np
import numpy.linalg as lin
import scipy.stats as sts
import scipy.integrate as intgr
import scipy.optimize as opt
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
from mpl_toolkits.mplot3d import Axes3D
#%%
incomes = np.array([[100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700 , 1800, 1900, 2000]])
incomes = incomes * 10000
# URL: https://www.mf-realty.jp/tebiki/taxbusiness/capter06_05.html
effective_tax = np.array([0.156, 0.164, 0.172, 0.21, 0.238, 0.258, 0.272, 0.286, 0.297, 0.316, 0.331, 0.344, 0.355, 0.364, 0.373, 0.38, 0.386, 0.392, 0.40, 0.48])
#%%
### GS Tax Function
# URL: https://www.jstor.org/stable/pdf/41789070.pdf
def tax_func(I, phi0, phi1, phi2):
#txrates = ((phi0 * (I - ((I ** -phi1) + phi2) ** (-1 / phi1))) / I)
txrates = phi0 - phi0 * (phi1 * I ** phi2 + 1)**(-1 / phi2)
return txrates
def tax_func2(I, phi0, phi1, phi2):
tau = (phi0 * (I - ((I ** -phi1) + phi2) ** (-1 / phi1)))
return tau
#%%
def model_moments(I_array, phi0, phi1, phi2):
return tax_func(I_array, phi0, phi1, phi2)
def data_moments():
effective_tax = np.array([0.156, 0.164, 0.172, 0.21, 0.238, 0.258, 0.272, 0.286, 0.297, 0.316, 0.331, 0.344, 0.355, 0.364, 0.373, 0.38, 0.386, 0.392, 0.40, 0.48])
return effective_tax
def err_vec(income, phi0, phi1, phi2, simple):
data_mms = data_moments()
model_mms = model_moments(income, phi0, phi1, phi2)
if simple:
err_vec = model_mms - data_mms
else:
err_vec = (model_mms - data_mms) / data_mms
return err_vec
def criterion(params, *args):
phi0, phi1, phi2 = params
income, W = args
err = err_vec(income, phi0, phi1, phi2, simple = False).squeeze()
crit_val = err.T @ W @ err
return crit_val
#%%
### Optimization Problem:
# Initial guess of parameters
phi0 = 0.479
phi1 = 0.022
phi2 = 0.817
params_init = np.array([phi0, phi1, phi2])
# Weighting matrix
W_hat = np.eye(20)
incomes = np.array([[100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700 , 1800, 1900, 2000]])
incomes = incomes * 10000
incomes = incomes * 10 ** (-6)
#gmm_args = (I_array, I_array_2, W_hat)
gmm_args = (incomes, W_hat)
# Optimization
results_GMM = opt.minimize(criterion, params_init, args = (gmm_args), method = 'L-BFGS-B')
print(results_GMM)
phi0_GMM, phi1_GMM, phi2_GMM = results_GMM.x
#%%
### Plots
I = np.linspace(1,20,20)
tax_rate = tax_func(I, phi0_GMM, phi1_GMM, phi2_GMM)
plt.xlabel('Income (Millions of Yen)')
plt.ylim(0, 0.5)
plt.ylabel('Effecitve Tax Rate $\tau_{s,t}^{ETR}$')
plt.plot(I, tax_rate, color = 'r', label = r'Estimated Tax Rates')
plt.legend(loc='upper left')
tax_rate_data = np.array(effective_tax)
plt.scatter(I, tax_rate_data, label = r'Calculated Tax Rates')
plt.legend(loc='upper left')
plt.grid(b=True, which='major', color='0.65', linestyle='-')
plt.tight_layout(rect=(0, 0.03, 1, 1))
plt.savefig("effective_tax_gs.png")
plt.close()
#%%
def marg_tax(I, phi0, phi1, phi2):
margrates = phi0 * phi1 * I ** (phi2 - 1) * (phi1 * I ** phi2 + 1) ** ( (- 1 - phi2) / phi2)
return margrates
marg = marg_tax(I, phi0_GMM, phi1_GMM, phi2_GMM)
I = np.linspace(1,20,20)
plt.xlabel('Income (Millions of Yen)')
plt.ylabel(r'Marginal Tax Rate $\tau_{s,t}^{MTR}$')
plt.plot(I, marg, color = 'r', label = r'Estimated Tax Rates')
plt.legend(loc='upper right')
plt.grid(b=True, which='major', color='0.65', linestyle='-')
plt.tight_layout(rect=(0, 0.03, 1, 1))
plt.savefig("marginal_tax_gs.png")
plt.close()
|
py | 1a45f7c340a9b7eb23766104449f89784943a062 | import json
import shutil
import logging
from flask import Blueprint, request
from tempfile import mkdtemp
from werkzeug.exceptions import BadRequest
from normality import safe_filename, stringify
from servicelayer.archive.util import ensure_path
from aleph.core import db, archive
from aleph.model import Document, Entity, Events
from aleph.queues import ingest_entity
from aleph.index.entities import index_proxy
from aleph.logic.notifications import publish, channel_tag
from aleph.views.util import get_db_collection, get_flag
from aleph.views.util import jsonify, validate_data, get_session_id
from aleph.views.forms import DocumentCreateSchema
log = logging.getLogger(__name__)
blueprint = Blueprint('ingest_api', __name__)
def _load_parent(collection, meta):
"""Determine the parent document for the document that is to be
ingested."""
parent_id = meta.get('parent_id')
if parent_id is None:
return
parent = Document.by_id(parent_id, collection_id=collection.id)
if parent is None:
raise BadRequest(response=jsonify({
'status': 'error',
'message': 'Cannot load parent document'
}, status=400))
return parent
def _load_metadata():
"""Unpack the common, pre-defined metadata for all the uploaded files."""
try:
meta = json.loads(request.form.get('meta', '{}'))
except Exception as ex:
raise BadRequest(str(ex))
validate_data(meta, DocumentCreateSchema)
foreign_id = stringify(meta.get('foreign_id'))
if not len(request.files) and foreign_id is None:
raise BadRequest(response=jsonify({
'status': 'error',
'message': 'Directories need to have a foreign_id'
}, status=400))
return meta, foreign_id
def _notify(collection, document_id):
if not collection.casefile:
return
channels = [
channel_tag(document_id, Entity),
channel_tag(collection),
]
params = {
'collection': collection,
'document': document_id
}
publish(Events.INGEST_DOCUMENT,
params=params,
channels=channels,
actor_id=request.authz.id)
db.session.commit()
@blueprint.route('/api/2/collections/<int:collection_id>/ingest',
methods=['POST', 'PUT'])
def ingest_upload(collection_id):
collection = get_db_collection(collection_id, request.authz.WRITE)
job_id = get_session_id()
sync = get_flag('sync', default=False)
meta, foreign_id = _load_metadata()
parent = _load_parent(collection, meta)
upload_dir = ensure_path(mkdtemp(prefix='aleph.upload.'))
try:
content_hash = None
for storage in request.files.values():
path = safe_filename(storage.filename, default='upload')
path = upload_dir.joinpath(path)
storage.save(str(path))
content_hash = archive.archive_file(path)
document = Document.save(collection=collection,
parent=parent,
foreign_id=foreign_id,
content_hash=content_hash,
meta=meta,
uploader_id=request.authz.id)
collection.touch()
db.session.commit()
proxy = document.to_proxy()
if proxy.schema.is_a(Document.SCHEMA_FOLDER) and sync:
index_proxy(collection, proxy, sync=sync)
ingest_entity(collection, proxy, job_id=job_id, sync=sync)
document_id = collection.ns.sign(document.id)
_notify(collection, document_id)
finally:
shutil.rmtree(upload_dir)
return jsonify({
'status': 'ok',
'id': document_id
}, status=201)
|
py | 1a45f7c7a039bb2055dbdb341785ba8416a7f78c | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class IotHubResourceOperations(object):
"""IotHubResourceOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.iothub.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.IotHubDescription"
"""Get the non-security related metadata of an IoT hub.
Get the non-security related metadata of an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubDescription, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.IotHubDescription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
resource_name, # type: str
iot_hub_description, # type: "_models.IotHubDescription"
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.IotHubDescription"
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(iot_hub_description, 'IotHubDescription')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
resource_name, # type: str
iot_hub_description, # type: "_models.IotHubDescription"
if_match=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.IotHubDescription"]
"""Create or update the metadata of an IoT hub.
Create or update the metadata of an Iot hub. The usual pattern to modify a property is to
retrieve the IoT hub metadata and security metadata, and then combine them with the modified
values in a new body to update the IoT hub. If certain properties are missing in the JSON,
updating IoT Hub may cause these values to fallback to default, which may lead to unexpected
behavior.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param iot_hub_description: The IoT hub metadata and security metadata.
:type iot_hub_description: ~azure.mgmt.iothub.v2020_03_01.models.IotHubDescription
:param if_match: ETag of the IoT Hub. Do not specify for creating a brand new IoT Hub. Required
to update an existing IoT Hub.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either IotHubDescription or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iothub.v2020_03_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
iot_hub_description=iot_hub_description,
if_match=if_match,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def _update_initial(
self,
resource_group_name, # type: str
resource_name, # type: str
iot_hub_tags, # type: "_models.TagsResource"
**kwargs # type: Any
):
# type: (...) -> "_models.IotHubDescription"
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(iot_hub_tags, 'TagsResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def begin_update(
self,
resource_group_name, # type: str
resource_name, # type: str
iot_hub_tags, # type: "_models.TagsResource"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.IotHubDescription"]
"""Update an existing IoT Hubs tags.
Update an existing IoT Hub tags. to update other fields use the CreateOrUpdate method.
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:param resource_name: Name of iot hub to update.
:type resource_name: str
:param iot_hub_tags: Updated tag information to set into the iot hub instance.
:type iot_hub_tags: ~azure.mgmt.iothub.v2020_03_01.models.TagsResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either IotHubDescription or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iothub.v2020_03_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescription"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
iot_hub_tags=iot_hub_tags,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional[Union["_models.IotHubDescription", "_models.ErrorDetails"]]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[Union["_models.IotHubDescription", "_models.ErrorDetails"]]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if response.status_code == 404:
deserialized = self._deserialize('ErrorDetails', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[Union["_models.IotHubDescription", "_models.ErrorDetails"]]
"""Delete an IoT hub.
Delete an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either IotHubDescription or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iothub.v2020_03_01.models.IotHubDescription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[Union["_models.IotHubDescription", "_models.ErrorDetails"]]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('IotHubDescription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}'} # type: ignore
def list_by_subscription(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IotHubDescriptionListResult"]
"""Get all the IoT hubs in a subscription.
Get all the IoT hubs in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubDescriptionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.IotHubDescriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IotHubDescriptionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/IotHubs'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IotHubDescriptionListResult"]
"""Get all the IoT hubs in a resource group.
Get all the IoT hubs in a resource group.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubDescriptionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.IotHubDescriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubDescriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IotHubDescriptionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs'} # type: ignore
def get_stats(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.RegistryStatistics"
"""Get the statistics from an IoT hub.
Get the statistics from an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryStatistics, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.RegistryStatistics
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryStatistics"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_stats.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('RegistryStatistics', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubStats'} # type: ignore
def get_valid_skus(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IotHubSkuDescriptionListResult"]
"""Get the list of valid SKUs for an IoT hub.
Get the list of valid SKUs for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubSkuDescriptionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.IotHubSkuDescriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubSkuDescriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_valid_skus.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IotHubSkuDescriptionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
get_valid_skus.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/skus'} # type: ignore
def list_event_hub_consumer_groups(
self,
resource_group_name, # type: str
resource_name, # type: str
event_hub_endpoint_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.EventHubConsumerGroupsListResult"]
"""Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an IoT hub.
Get a list of the consumer groups in the Event Hub-compatible device-to-cloud endpoint in an
IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint.
:type event_hub_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EventHubConsumerGroupsListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.EventHubConsumerGroupsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_event_hub_consumer_groups.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'eventHubEndpointName': self._serialize.url("event_hub_endpoint_name", event_hub_endpoint_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('EventHubConsumerGroupsListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_event_hub_consumer_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups'} # type: ignore
def get_event_hub_consumer_group(
self,
resource_group_name, # type: str
resource_name, # type: str
event_hub_endpoint_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.EventHubConsumerGroupInfo"
"""Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub.
Get a consumer group from the Event Hub-compatible device-to-cloud endpoint for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to retrieve.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.EventHubConsumerGroupInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_event_hub_consumer_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'eventHubEndpointName': self._serialize.url("event_hub_endpoint_name", event_hub_endpoint_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('EventHubConsumerGroupInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore
def create_event_hub_consumer_group(
self,
resource_group_name, # type: str
resource_name, # type: str
event_hub_endpoint_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.EventHubConsumerGroupInfo"
"""Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
Add a consumer group to an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to add.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EventHubConsumerGroupInfo, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.EventHubConsumerGroupInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EventHubConsumerGroupInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.create_event_hub_consumer_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'eventHubEndpointName': self._serialize.url("event_hub_endpoint_name", event_hub_endpoint_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('EventHubConsumerGroupInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore
def delete_event_hub_consumer_group(
self,
resource_group_name, # type: str
resource_name, # type: str
event_hub_endpoint_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub.
Delete a consumer group from an Event Hub-compatible endpoint in an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param event_hub_endpoint_name: The name of the Event Hub-compatible endpoint in the IoT hub.
:type event_hub_endpoint_name: str
:param name: The name of the consumer group to delete.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.delete_event_hub_consumer_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'eventHubEndpointName': self._serialize.url("event_hub_endpoint_name", event_hub_endpoint_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_event_hub_consumer_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/eventHubEndpoints/{eventHubEndpointName}/ConsumerGroups/{name}'} # type: ignore
def list_jobs(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.JobResponseListResult"]
"""Get a list of all the jobs in an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
Get a list of all the jobs in an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either JobResponseListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.JobResponseListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponseListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_jobs.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('JobResponseListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_jobs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs'} # type: ignore
def get_job(
self,
resource_group_name, # type: str
resource_name, # type: str
job_id, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.JobResponse"
"""Get the details of a job from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
Get the details of a job from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param job_id: The job identifier.
:type job_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.JobResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_job.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'jobId': self._serialize.url("job_id", job_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('JobResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/jobs/{jobId}'} # type: ignore
def get_quota_metrics(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.IotHubQuotaMetricInfoListResult"]
"""Get the quota metrics for an IoT hub.
Get the quota metrics for an IoT hub.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either IotHubQuotaMetricInfoListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.IotHubQuotaMetricInfoListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubQuotaMetricInfoListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_quota_metrics.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IotHubQuotaMetricInfoListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
get_quota_metrics.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/quotaMetrics'} # type: ignore
def get_endpoint_health(
self,
resource_group_name, # type: str
iot_hub_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.EndpointHealthDataListResult"]
"""Get the health for routing endpoints.
Get the health for routing endpoints.
:param resource_group_name:
:type resource_group_name: str
:param iot_hub_name:
:type iot_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EndpointHealthDataListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.EndpointHealthDataListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointHealthDataListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_endpoint_health.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'iotHubName': self._serialize.url("iot_hub_name", iot_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('EndpointHealthDataListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
get_endpoint_health.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routingEndpointsHealth'} # type: ignore
def check_name_availability(
self,
operation_inputs, # type: "_models.OperationInputs"
**kwargs # type: Any
):
# type: (...) -> "_models.IotHubNameAvailabilityInfo"
"""Check if an IoT hub name is available.
Check if an IoT hub name is available.
:param operation_inputs: Set the name parameter in the OperationInputs structure to the name of
the IoT hub to check.
:type operation_inputs: ~azure.mgmt.iothub.v2020_03_01.models.OperationInputs
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IotHubNameAvailabilityInfo, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.IotHubNameAvailabilityInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IotHubNameAvailabilityInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(operation_inputs, 'OperationInputs')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('IotHubNameAvailabilityInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Devices/checkNameAvailability'} # type: ignore
def test_all_routes(
self,
iot_hub_name, # type: str
resource_group_name, # type: str
input, # type: "_models.TestAllRoutesInput"
**kwargs # type: Any
):
# type: (...) -> "_models.TestAllRoutesResult"
"""Test all routes.
Test all routes configured in this Iot Hub.
:param iot_hub_name: IotHub to be tested.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to.
:type resource_group_name: str
:param input: Input for testing all routes.
:type input: ~azure.mgmt.iothub.v2020_03_01.models.TestAllRoutesInput
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestAllRoutesResult, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.TestAllRoutesResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TestAllRoutesResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.test_all_routes.metadata['url'] # type: ignore
path_format_arguments = {
'iotHubName': self._serialize.url("iot_hub_name", iot_hub_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(input, 'TestAllRoutesInput')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('TestAllRoutesResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
test_all_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routing/routes/$testall'} # type: ignore
def test_route(
self,
iot_hub_name, # type: str
resource_group_name, # type: str
input, # type: "_models.TestRouteInput"
**kwargs # type: Any
):
# type: (...) -> "_models.TestRouteResult"
"""Test the new route.
Test the new route for this Iot Hub.
:param iot_hub_name: IotHub to be tested.
:type iot_hub_name: str
:param resource_group_name: resource group which Iot Hub belongs to.
:type resource_group_name: str
:param input: Route that needs to be tested.
:type input: ~azure.mgmt.iothub.v2020_03_01.models.TestRouteInput
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TestRouteResult, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.TestRouteResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TestRouteResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.test_route.metadata['url'] # type: ignore
path_format_arguments = {
'iotHubName': self._serialize.url("iot_hub_name", iot_hub_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(input, 'TestRouteInput')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('TestRouteResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
test_route.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routing/routes/$testnew'} # type: ignore
def list_keys(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SharedAccessSignatureAuthorizationRuleListResult"]
"""Get the security metadata for an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
Get the security metadata for an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SharedAccessSignatureAuthorizationRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2020_03_01.models.SharedAccessSignatureAuthorizationRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_keys.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SharedAccessSignatureAuthorizationRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/listkeys'} # type: ignore
def get_keys_for_key_name(
self,
resource_group_name, # type: str
resource_name, # type: str
key_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.SharedAccessSignatureAuthorizationRule"
"""Get a shared access policy by name from an IoT hub. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
Get a shared access policy by name from an IoT hub. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-security.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param key_name: The name of the shared access policy.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SharedAccessSignatureAuthorizationRule, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.SharedAccessSignatureAuthorizationRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedAccessSignatureAuthorizationRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_keys_for_key_name.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SharedAccessSignatureAuthorizationRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_keys_for_key_name.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/IotHubKeys/{keyName}/listkeys'} # type: ignore
def export_devices(
self,
resource_group_name, # type: str
resource_name, # type: str
export_devices_parameters, # type: "_models.ExportDevicesRequest"
**kwargs # type: Any
):
# type: (...) -> "_models.JobResponse"
"""Exports all the device identities in the IoT hub identity registry to an Azure Storage blob container. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Exports all the device identities in the IoT hub identity registry to an Azure Storage blob
container. For more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param export_devices_parameters: The parameters that specify the export devices operation.
:type export_devices_parameters: ~azure.mgmt.iothub.v2020_03_01.models.ExportDevicesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.JobResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.export_devices.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(export_devices_parameters, 'ExportDevicesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('JobResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/exportDevices'} # type: ignore
def import_devices(
self,
resource_group_name, # type: str
resource_name, # type: str
import_devices_parameters, # type: "_models.ImportDevicesRequest"
**kwargs # type: Any
):
# type: (...) -> "_models.JobResponse"
"""Import, update, or delete device identities in the IoT hub identity registry from a blob. For more information, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
Import, update, or delete device identities in the IoT hub identity registry from a blob. For
more information, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-identity-registry#import-and-export-device-identities.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param import_devices_parameters: The parameters that specify the import devices operation.
:type import_devices_parameters: ~azure.mgmt.iothub.v2020_03_01.models.ImportDevicesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResponse, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.JobResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.import_devices.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(import_devices_parameters, 'ImportDevicesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('JobResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
import_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{resourceName}/importDevices'} # type: ignore
|
py | 1a45f89e31359e12f4dd4358ef8a8dff873fc3c2 | from PyQt5 import QtCore, QtGui
from PyQt5.QtWidgets import *
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(831, 682)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("logo.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setAutoFillBackground(False)
self.default_palette = QApplication.palette()
self.default_style = QApplication.style()
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_4 = QGridLayout(self.centralwidget)
self.gridLayout_4.setObjectName("gridLayout_4")
self.mainlayout = QGridLayout()
self.mainlayout.setObjectName("mainlayout")
spacerItem = QSpacerItem(0, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.mainlayout.addItem(spacerItem, 7, 0, 1, 1)
self.horizontalLayout_2 = QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.verticalLayout = QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
font = QtGui.QFont()
font.setFamily("Arial Black")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.title_welcome = QLabel(self.centralwidget)
self.title_welcome.setBaseSize(QtCore.QSize(800, 25))
self.title_welcome.setFont(font)
self.title_welcome.setAlignment(QtCore.Qt.AlignCenter)
self.title_welcome.setObjectName("title_welcome")
font = QtGui.QFont()
font.setFamily("Arial Black")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.titlename = QLabel(self.centralwidget)
self.titlename.setBaseSize(QtCore.QSize(800, 30))
self.titlename.setFont(font)
self.titlename.setAlignment(QtCore.Qt.AlignCenter)
self.titlename.setObjectName("titlename")
self.software_info1 = QLabel(self.centralwidget)
self.software_info1.setBaseSize(QtCore.QSize(800, 15))
self.software_info1.setObjectName("software_info1")
self.software_info2 = QLabel(self.centralwidget)
self.software_info2.setBaseSize(QtCore.QSize(800, 15))
self.software_info2.setObjectName("software_info2")
self.software_info3 = QLabel(self.centralwidget)
self.software_info3.setBaseSize(QtCore.QSize(800, 17))
self.software_info3.setObjectName("software_info3")
self.mainlayout.addWidget(self.title_welcome, 0, 1, 1, 1)
self.mainlayout.addWidget(self.titlename, 1, 1, 1, 1)
self.mainlayout.addWidget(self.software_info1, 2, 1, 1, 1)
self.mainlayout.addWidget(self.software_info2, 3, 1, 1, 1)
self.mainlayout.addWidget(self.software_info3, 4, 1, 1, 1)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(8)
self.gratinglayout = QGridLayout()
self.gratinglayout.setObjectName("gratinglayout")
self.groove_label = QLabel(self.centralwidget)
self.groove_label.setFont(font)
self.groove_label.setObjectName("groove_label")
self.groove_form = QComboBox(self.centralwidget)
self.groove_form.setObjectName("groove_form")
self.groove_form.addItem("")
self.groove_form.addItem("")
self.groove_form.addItem("")
self.micro_setting_label = QLabel(self.centralwidget)
self.micro_setting_label.setFont(font)
self.micro_setting_label.setObjectName("micro_setting_label")
self.micro_setting_form = QDoubleSpinBox(self.centralwidget, value=9.00, maximum=22.00, minimum=0.00, singleStep=0.01)
self.micro_setting_form.setObjectName("micro_setting_form")
self.feature_label = QLabel(self.centralwidget)
self.feature_label.setFont(font)
self.feature_label.setObjectName("feature_label")
self.feature_form = QLineEdit(self.centralwidget)
self.feature_form.setObjectName("feature_form")
self.gratinglayout.addWidget(self.groove_label, 0, 0, 1, 1)
self.gratinglayout.addWidget(self.groove_form, 0, 1, 1, 1)
self.gratinglayout.addWidget(self.micro_setting_label, 0, 2, 1, 1)
self.gratinglayout.addWidget(self.micro_setting_form, 0, 3, 1, 1)
self.gratinglayout.addWidget(self.feature_label, 0, 4, 1, 1)
self.gratinglayout.addWidget(self.feature_form, 0, 5, 1, 1)
self.verticalLayout.addLayout(self.gratinglayout)
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.observer_label = QLabel(self.centralwidget)
self.observer_label.setFont(font)
self.observer_label.setObjectName("observer_label")
self.observer_form = QTextEdit(self.centralwidget)
self.observer_form.setObjectName("observer_form")
self.observer_form.lineWrapMode()
self.lamplayout = QGridLayout()
self.lamplayout.setObjectName("lamplayout")
self.lamp_label = QLabel(self.centralwidget)
self.lamp_label.setFont(font)
self.lamp_label.setObjectName("lamp_label")
self.lamp_form = QComboBox(self.centralwidget)
self.lamp_form.setObjectName("lamp_form")
self.lamp_form.addItem("")
self.lamp_form.addItem("")
spacerItem1 = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
spacerItem2 = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.horizontalLayout.addWidget(self.observer_label)
self.horizontalLayout.addWidget(self.observer_form)
self.lamplayout.addWidget(self.lamp_label, 2, 1, 1, 1)
self.lamplayout.addWidget(self.lamp_form, 3, 1, 1, 1)
self.lamplayout.addItem(spacerItem1, 0, 1, 1, 1)
self.lamplayout.addItem(spacerItem2, 4, 1, 1, 1)
self.horizontalLayout.addLayout(self.lamplayout)
self.verticalLayout.addLayout(self.horizontalLayout)
self.gridLayout_9 = QGridLayout()
self.gridLayout_9.setObjectName("gridLayout_9")
self.datatype_label = QLabel(self.centralwidget)
self.datatype_label.setAlignment(QtCore.Qt.AlignCenter)
self.datatype_label.setObjectName("datatype_label")
self.datatype_form = QComboBox(self.centralwidget)
self.datatype_form.setObjectName("datatype_form")
self.datatype_form.addItem("")
self.datatype_form.addItem("")
self.datatype_form.addItem("")
self.datatype_form.addItem("")
self.datatype_form.addItem("")
self.datatype_form.setEnabled(False)
self.objname_label = QLabel(self.centralwidget)
self.objname_label.setAlignment(QtCore.Qt.AlignCenter)
self.objname_label.setObjectName("objname_label")
self.objname_form = QLineEdit(self.centralwidget)
self.objname_form.setText("")
self.objname_form.setObjectName("objname_form")
self.objname_form.setEnabled(False)
self.filename_label = QLabel(self.centralwidget)
self.filename_label.setAlignment(QtCore.Qt.AlignCenter)
self.filename_label.setObjectName("filename_label")
self.filename_form = QLineEdit(self.centralwidget)
self.filename_form.setObjectName("filename_form")
self.filename_form.setEnabled(False)
self.bin_label = QLabel(self.centralwidget)
self.bin_label.setAlignment(QtCore.Qt.AlignCenter)
self.bin_label.setObjectName("bin_label")
self.bin_form = QComboBox(self.centralwidget)
self.bin_form.setObjectName("bin_form")
self.bin_form.addItem("")
self.bin_form.addItem("")
self.bin_form.addItem("")
self.bin_form.addItem("")
self.bin_form.setEnabled(False)
self.exptime_label = QLabel(self.centralwidget)
self.exptime_label.setAlignment(QtCore.Qt.AlignCenter)
self.exptime_label.setObjectName("exptime_label")
self.exptime_form = QDoubleSpinBox(self.centralwidget, value=0., maximum=10000.00, minimum=0.00, singleStep=0.01)
self.exptime_form.setObjectName("exptime_form")
self.exptime_form.setEnabled(False)
self.nexp_label = QLabel(self.centralwidget)
self.nexp_label.setAlignment(QtCore.Qt.AlignCenter)
self.nexp_label.setObjectName("nexp_label")
self.nexp_form = QSpinBox(self.centralwidget, value=1, maximum=50.00, minimum=1, singleStep=1)
self.nexp_form.setObjectName("nexp_form")
self.nexp_form.setEnabled(False)
self.gridLayout_9.addWidget(self.datatype_label, 0, 0, 1, 1)
self.gridLayout_9.addWidget(self.datatype_form, 1, 0, 1, 1)
self.gridLayout_9.addWidget(self.objname_label, 0, 1, 1, 1)
self.gridLayout_9.addWidget(self.objname_form, 1, 1, 1, 1)
self.gridLayout_9.addWidget(self.filename_label, 0, 2, 1, 1)
self.gridLayout_9.addWidget(self.filename_form, 1, 2, 1, 1)
self.gridLayout_9.addWidget(self.bin_label, 0, 3, 1, 1)
self.gridLayout_9.addWidget(self.bin_form, 1, 3, 1, 1)
self.gridLayout_9.addWidget(self.exptime_label, 0, 4, 1, 1)
self.gridLayout_9.addWidget(self.exptime_form, 1, 4, 1, 1)
self.gridLayout_9.addWidget(self.nexp_label, 0, 5, 1, 1)
self.gridLayout_9.addWidget(self.nexp_form, 1, 5, 1, 1)
self.gridLayout_9.setColumnMinimumWidth(1, 1)
self.gridLayout_9.setColumnMinimumWidth(2, 2)
self.gridLayout_9.setColumnMinimumWidth(4, 1)
self.gridLayout_9.setColumnStretch(1, 1)
self.gridLayout_9.setColumnStretch(2, 2)
self.gridLayout_9.setColumnStretch(4, 1)
self.verticalLayout.addLayout(self.gridLayout_9)
self.verticalLayout.setStretch(0, 1)
self.verticalLayout.setStretch(1, 2)
self.verticalLayout.setStretch(2, 1)
self.horizontalLayout_2.addLayout(self.verticalLayout)
self.line_2 = QFrame(self.centralwidget)
self.line_2.setFrameShape(QFrame.VLine)
self.line_2.setObjectName("line_2")
self.horizontalLayout_2.addWidget(self.line_2)
self.gridLayout = QGridLayout()
self.gridLayout.setSizeConstraint(QLayout.SetMinimumSize)
self.gridLayout.setObjectName("gridLayout")
self.ccdtemp_label = QLabel(self.centralwidget)
self.ccdtemp_label.setFont(font)
self.ccdtemp_label.setObjectName("ccdtemp_label")
self.ccdtemp_form = QDoubleSpinBox(self.centralwidget, maximum=30.00, minimum=-20.00, singleStep=0.01)
self.ccdtemp_form.setObjectName("ccdtemp_form")
self.ccdtemp_form.setValue(-10.)
self.ambtemp_label = QLabel(self.centralwidget)
self.ambtemp_label.setFont(font)
self.ambtemp_label.setObjectName("ambtemp_label")
self.ambtemp_form = QDoubleSpinBox(self.centralwidget, value=20.00, maximum=50.00, minimum=0.00, singleStep=0.01)
self.ambtemp_form.setObjectName("ambtemp_form")
self.ambhum_label = QLabel(self.centralwidget)
self.ambhum_label.setFont(font)
self.ambhum_label.setObjectName("ambhum_label")
self.ambhum_form = QDoubleSpinBox(self.centralwidget, value=50.00, maximum=100.00, minimum=0.00, singleStep=0.01)
self.ambhum_form.setObjectName("ambhum_form")
self.skycond_label = QLabel(self.centralwidget)
self.skycond_label.setFont(font)
self.skycond_label.setObjectName("skycond_label")
self.skycond_form = QComboBox(self.centralwidget)
self.skycond_form.setObjectName("skycond_form")
self.skycond_form.addItem("")
self.skycond_form.addItem("")
self.skycond_form.addItem("")
self.skycond_form.addItem("")
self.skycond_form.addItem("")
self.comment_label = QLabel(self.centralwidget)
self.comment_label.setFont(font)
self.comment_label.setObjectName("comment_label")
self.comment_form = QLineEdit(self.centralwidget)
self.comment_form.setObjectName("comment_form")
self.gridLayout.addWidget(self.ccdtemp_label, 0, 0, 1, 2)
self.gridLayout.addWidget(self.ccdtemp_form, 0, 2, 1, 1)
self.gridLayout.addWidget(self.ambtemp_label, 1, 0, 1, 2)
self.gridLayout.addWidget(self.ambtemp_form, 1, 2, 1, 1)
self.gridLayout.addWidget(self.ambhum_label, 2, 0, 1, 2)
self.gridLayout.addWidget(self.ambhum_form, 2, 2, 1, 1)
self.gridLayout.addWidget(self.skycond_label, 3, 0, 1, 1)
self.gridLayout.addWidget(self.skycond_form, 3, 1, 1, 2)
self.gridLayout.addWidget(self.comment_label, 4, 0, 1, 1)
self.gridLayout.addWidget(self.comment_form, 4, 1, 1, 2)
self.gridLayout.setRowMinimumHeight(0, 1)
self.gridLayout.setRowMinimumHeight(1, 1)
self.gridLayout.setRowMinimumHeight(2, 1)
self.gridLayout.setRowMinimumHeight(3, 1)
self.gridLayout.setRowMinimumHeight(4, 1)
self.horizontalLayout_2.addLayout(self.gridLayout)
self.horizontalLayout_2.setStretch(0, 5)
self.horizontalLayout_2.setStretch(2, 2)
self.mainlayout.addLayout(self.horizontalLayout_2, 6, 1, 1, 1)
self.tableview = QTableView(self.centralwidget)
self.tableview.setEnabled(True)
self.tableview.setMinimumSize(QtCore.QSize(800, 280))
self.tableview.setBaseSize(QtCore.QSize(800, 280))
self.tableview.setObjectName("tableview")
self.tableview.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
self.mainlayout.addWidget(self.tableview, 7, 1, 1, 1)
self.confirmlayout = QGridLayout()
self.confirmlayout.setObjectName("confirmlayout")
self.nightmode_check = QCheckBox(self.centralwidget)
self.nightmode_check.setObjectName("nightmode_check")
self.startlog_button = QPushButton(self.centralwidget)
self.startlog_button.setObjectName("startlog_button")
self.updatelog_button = QPushButton(self.centralwidget)
self.updatelog_button.setEnabled(False)
self.updatelog_button.setObjectName("updatelog_button")
self.savelog_button = QPushButton(self.centralwidget)
self.savelog_button.setEnabled(False)
self.savelog_button.setObjectName("savelog_button")
self.resetlog_button = QPushButton(self.centralwidget)
self.resetlog_button.setEnabled(False)
self.resetlog_button.setObjectName("resetlog_button")
self.closewindow_button = QPushButton(self.centralwidget)
self.closewindow_button.setObjectName("closewindow_button")
spacerItem3 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.confirmlayout.addWidget(self.nightmode_check, 0, 0, 1, 1)
self.confirmlayout.addItem(spacerItem3, 0, 1, 1, 1)
self.confirmlayout.addWidget(self.startlog_button, 0, 2, 1, 1)
self.confirmlayout.addWidget(self.updatelog_button, 0, 3, 1, 1)
self.confirmlayout.addWidget(self.savelog_button, 0, 4, 1, 1)
self.confirmlayout.addWidget(self.resetlog_button, 0, 5, 1, 1)
self.confirmlayout.addWidget(self.closewindow_button, 0, 6, 1, 1)
self.mainlayout.addLayout(self.confirmlayout, 8, 1, 1, 1)
self.line = QFrame(self.centralwidget)
self.line.setFrameShape(QFrame.HLine)
self.line.setObjectName("line")
self.mainlayout.addWidget(self.line, 5, 1, 1, 1)
self.mainlayout.setRowStretch(6, 1)
self.mainlayout.setRowStretch(7, 3)
self.gridLayout_4.addLayout(self.mainlayout, 0, 0, 1, 1)
spacerItem4 = QSpacerItem(40, 0, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.gridLayout_4.addItem(spacerItem4, 1, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.groove_label.setBuddy(self.groove_form)
self.micro_setting_label.setBuddy(self.micro_setting_form)
self.feature_label.setBuddy(self.feature_form)
self.observer_label.setBuddy(self.observer_form)
self.lamp_label.setBuddy(self.lamp_form)
self.ccdtemp_label.setBuddy(self.ccdtemp_form)
self.ambtemp_label.setBuddy(self.ambtemp_form)
self.skycond_label.setBuddy(self.skycond_form)
self.ambhum_label.setBuddy(self.ambhum_form)
self.comment_label.setBuddy(self.comment_form)
self.datatype_label.setBuddy(self.datatype_form)
self.objname_label.setBuddy(self.objname_form)
self.filename_label.setBuddy(self.filename_form)
self.bin_label.setBuddy(self.bin_form)
self.exptime_label.setBuddy(self.exptime_form)
self.nexp_label.setBuddy(self.nexp_form)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.nightmode_check, self.startlog_button)
MainWindow.setTabOrder(self.startlog_button, self.updatelog_button)
MainWindow.setTabOrder(self.updatelog_button, self.savelog_button)
MainWindow.setTabOrder(self.savelog_button, self.resetlog_button)
MainWindow.setTabOrder(self.resetlog_button, self.closewindow_button)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "LHIRES Telescope Data Logging Software"))
self.title_welcome.setText(_translate("MainWindow", "Welcome to"))
self.titlename.setText(_translate("MainWindow", "LHIRES Telescope Data Logging Software"))
self.software_info2.setText(_translate("MainWindow", "version: 2.0"))
self.software_info1.setText(_translate("MainWindow", "by: Irfan Imaduddin"))
self.software_info3.setText(_translate("MainWindow", "contact: irfanimaduddin[at]gmail[dot]com"))
self.groove_label.setText(_translate("MainWindow", "Grating"))
self.groove_form.setItemText(0, _translate("MainWindow", "600 g/mm"))
self.groove_form.setItemText(1, _translate("MainWindow", "1200 g/mm"))
self.groove_form.setItemText(2, _translate("MainWindow", "2400 g/mm"))
self.micro_setting_label.setText(_translate("MainWindow", "Micrometer Setting"))
self.feature_label.setText(_translate("MainWindow", "Spectral Feature"))
self.observer_label.setText(_translate("MainWindow", "Observer(s)"))
self.lamp_label.setText(_translate("MainWindow", "Comparison Lamp"))
self.lamp_form.setItemText(0, _translate("MainWindow", "NeAr"))
self.lamp_form.setItemText(1, _translate("MainWindow", "FeNeAr"))
self.datatype_label.setText(_translate("MainWindow", "Datatype"))
self.datatype_form.setItemText(0, _translate("MainWindow", "Object"))
self.datatype_form.setItemText(1, _translate("MainWindow", "Comparison"))
self.datatype_form.setItemText(2, _translate("MainWindow", "Bias"))
self.datatype_form.setItemText(3, _translate("MainWindow", "Dark"))
self.datatype_form.setItemText(4, _translate("MainWindow", "Flat"))
self.objname_label.setText(_translate("MainWindow", "Object Name"))
self.filename_label.setText(_translate("MainWindow", "File Name"))
self.bin_label.setText(_translate("MainWindow", "Bin"))
self.bin_form.setItemText(0, _translate("MainWindow", "1"))
self.bin_form.setItemText(1, _translate("MainWindow", "2"))
self.bin_form.setItemText(2, _translate("MainWindow", "3"))
self.bin_form.setItemText(3, _translate("MainWindow", "4"))
self.exptime_label.setText(_translate("MainWindow", "Exptime"))
self.nexp_label.setText(_translate("MainWindow", "N"))
self.ccdtemp_label.setText(_translate("MainWindow", "CCD Temperature (C)"))
self.ambtemp_label.setText(_translate("MainWindow", "Ambient Temperature (C)"))
self.ambhum_label.setText(_translate("MainWindow", "Ambient Humidity (%)"))
self.skycond_label.setText(_translate("MainWindow", "Sky Condition"))
self.skycond_form.setItemText(0, _translate("MainWindow", "Good"))
self.skycond_form.setItemText(1, _translate("MainWindow", "Variable"))
self.skycond_form.setItemText(2, _translate("MainWindow", "Hazy"))
self.skycond_form.setItemText(3, _translate("MainWindow", "Cloudy"))
self.skycond_form.setItemText(4, _translate("MainWindow", "Rain"))
self.comment_label.setText(_translate("MainWindow", "Comment(s)"))
self.nightmode_check.setText(_translate("MainWindow", "Night Mode"))
self.startlog_button.setText(_translate("MainWindow", "Start"))
self.updatelog_button.setText(_translate("MainWindow", "Update"))
self.savelog_button.setText(_translate("MainWindow", "End && Save"))
self.resetlog_button.setText(_translate("MainWindow", "Reset"))
self.closewindow_button.setText(_translate("MainWindow", "Exit")) |
py | 1a45fa474f344703b56ff5e7cefcd26aff98d8af | # Hyphens are not valid module names.
|
py | 1a45fa8175e4777a1a06c41128e8193f91e1ef16 | import os
import json
import pytest
import mro
import mro.foreign_keys
import connection as con
@pytest.fixture(scope="module")
def connection(request):
connection = con.connect()
if request:
request.addfinalizer(mro.disconnect)
cursor = connection.cursor()
con.drop_tables()
cursor.execute("""create table table1 (
id serial,
name varchar(20) not null,
primary key (id)
);""")
cursor.execute("""create table table2 (
id serial,
name varchar(20) not null,
table1_id integer,
primary key (id),
foreign key (table1_id) references table1(id)
);""")
cursor.execute("""create table table3 (
id serial,
name varchar(20) not null,
table4s varchar(20),
primary key (id)
);""")
cursor.execute("""create table table4 (
id serial,
name varchar(20) not null,
table3_id integer,
primary key (id),
foreign key (table3_id) references table3(id)
);""")
connection.commit()
create_test_data(connection)
connection.close()
mro.load_database(lambda: con.connect())
return connection
def create_test_data(connection):
cursor = connection.cursor()
num_table1 = 2
for i in range(1,num_table1+1):
cursor.execute("insert into table1 (name) values (%s)", ('table1_{}'.format(i),))
for j in range(1,4):
cursor.execute("insert into table2 (name, table1_id) values (%s,%s)", ('table2_{}_{}'.format(i, j), i))
# edge cases
cursor.execute("insert into table2 (name, table1_id) values (%s,%s)", ('table2_None', None))
cursor.execute("insert into table1 (name) values (%s)", ('table1_None',))
connection.commit()
class TestForeignKeys(object):
def test_read_foreign_key(self, connection):
table = mro.table2.select_one('table1_id is not null')
assert isinstance(table.table1_id.value, int)
assert table.table1_id.value != None
assert isinstance(table.table1_id.object, mro.table1)
assert table.table1_id.object != None
# check the _i matches up for both tables
assert table.name.startswith(table.table1_id.object.name.replace("table1", "table2"))
def test_null_foreign_key(self, connection):
table = mro.table2.select_one('table1_id is null')
assert table.table1_id.value == None
assert table.table1_id.object == None
def test_read_foreign_keys_reverse(self, connection):
name = None
table2s = mro.table2.select()
table1_refs = [str(x.table1_id.value) for x in table2s if x.table1_id.value is not None]
table = mro.table1.select_one('id in (' + ','.join(table1_refs) + ')')
assert table.name != None
assert table.table2s != None
assert len(table.table2s) > 1
assert table.table2s[0].name == 'table2_1_1'
num_table2s = len(table.table2s)
mro.table2(name = 'table2_added', table1_id = table.id)
assert len(table.table2s) == num_table2s
table.table2s() # updates the reference list
assert len(table.table2s) == num_table2s + 1
num_table2s = len(table.table2s)
table2 = mro.table2(name = 'table2_added2', table1_id = None)
assert len(table.table2s) == num_table2s
with pytest.raises(PermissionError) as excinfo:
table.table2s[0] = table2
assert excinfo.value.args[0] == "Cannot set specific value on foreign key reference list."
table.table2s.append(table2)
assert len(table.table2s) == num_table2s + 1
# make sure the change is reflected in the database
table.table2s() # updates the reference list
assert len(table.table2s) == num_table2s + 1
def test_read_foreign_keys_reverse_no_data(self, connection):
table2s = mro.table2.select()
table1_refs = [str(x.table1_id.value) for x in table2s if x.table1_id.value is not None]
table = mro.table1.select_one('id not in (' + ','.join(table1_refs) + ')')
assert table.name != None
table2s = table.table2s
assert not table2s
def test_insert_class_that_has_foreign_references(self, connection):
mro.table1(name = 'Bob')
table = mro.table3(name = 'Bob2')
# test that it's a varchar not a foreign key reference
table.table4s = 'test string'
def test_write_foreign_keys(self, connection):
table1 = mro.table1.select_one()
table2sCount = len(table1.table2s)
table2 = mro.table2(name = 'table2_added2', table1_id = None)
table3 = mro.table2(name='table2_added3', table1_id=None)
table2.table1_id = table1
table3.table1_id = table1.id
assert table2.table1_id.value == table1.id
assert table2sCount == len(table1.table2s)
table1.table2s()
assert table2sCount + 2 == len(table1.table2s)
def test_foreign_keys_shortcuts(self, connection):
table1 = mro.table1.select_one()
table2sCount = len(table1.table2s)
table2 = mro.table2(name = 'table2_added2', table1_id = None)
table2.table1_id = table1
table3 = mro.table2(name='table2_added3', table1_id=table1.id)
assert table2.table1_id == table1.id
assert (table2.table1_id != table1.id) == False
assert table2.table1_id == table3.table1_id
assert table2sCount == len(table1.table2s)
table1.table2s()
assert table2sCount + 2 == len(table1.table2s)
def test_foreign_keys_to_json(self, connection):
table1 = mro.table1.select_one()
table2 = mro.table2(name='table2_added2', table1_id=table1.id)
table3 = mro.table2(name='table2_added3', table1_id=None)
serialised = json.dumps({"foreign_key": table2.table1_id, "foreign_key2": table3.table1_id})
assert serialised == '{"foreign_key": 1, "foreign_key2": null}' or serialised == '{"foreign_key2": null, "foreign_key": 1}'
if __name__ == '__main__':
#pytest.main([__file__])
#pytest.main([__file__ + '::TestForeignKeys::test_foreign_keys_shortcuts'])
t = TestForeignKeys()
t.test_foreign_keys_to_json(connection(None)) |
py | 1a45fb46cdddb13dcb20ed7b1c6baa0f7e023d41 | r"""
Elements of bounded height in number fields
Sage functions to list all elements of a given number field with height less
than a specified bound.
AUTHORS:
- John Doyle (2013): initial version
- David Krumm (2013): initial version
- TJ Combs (2018): added Doyle-Krumm algorithm - 4
- Raghukul Raman (2018): added Doyle-Krumm algorithm - 4
REFERENCES:
- [DK2013]
"""
# ****************************************************************************
# Copyright (C) 2013 John Doyle and David Krumm
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from __future__ import print_function, division
from six.moves import range
from copy import copy
from itertools import product
from sage.rings.real_mpfr import RealField
from sage.rings.number_field.unit_group import UnitGroup
from sage.modules.free_module_element import vector
from sage.matrix.constructor import column_matrix
from sage.rings.rational_field import QQ
from sage.rings.all import RR, Infinity
from sage.geometry.polyhedron.constructor import Polyhedron
def bdd_norm_pr_gens_iq(K, norm_list):
r"""
Compute generators for all principal ideals in an imaginary quadratic field
`K` whose norms are in ``norm_list``.
The only keys for the output dictionary are integers n appearing in
``norm_list``.
The function will only be called with `K` an imaginary quadratic field.
The function will return a dictionary for other number fields, but it may be
incorrect.
INPUT:
- `K` -- an imaginary quadratic number field
- ``norm_list`` -- a list of positive integers
OUTPUT:
- a dictionary of number field elements, keyed by norm
EXAMPLES:
In `QQ(i)`, there is one principal ideal of norm 4, two principal ideals of
norm 5, but no principal ideals of norm 7::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_gens_iq
sage: K.<g> = NumberField(x^2 + 1)
sage: L = range(10)
sage: bdd_pr_ideals = bdd_norm_pr_gens_iq(K, L)
sage: bdd_pr_ideals[4]
[2]
sage: bdd_pr_ideals[5]
[-g - 2, -g + 2]
sage: bdd_pr_ideals[7]
[]
There are no ideals in the ring of integers with negative norm::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_gens_iq
sage: K.<g> = NumberField(x^2 + 10)
sage: L = range(-5,-1)
sage: bdd_pr_ideals = bdd_norm_pr_gens_iq(K,L)
sage: bdd_pr_ideals
{-5: [], -4: [], -3: [], -2: []}
Calling a key that is not in the input ``norm_list`` raises a KeyError::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_gens_iq
sage: K.<g> = NumberField(x^2 + 20)
sage: L = range(100)
sage: bdd_pr_ideals = bdd_norm_pr_gens_iq(K, L)
sage: bdd_pr_ideals[100]
Traceback (most recent call last):
...
KeyError: 100
"""
return {n: K.elements_of_norm(n) for n in norm_list}
def bdd_height_iq(K, height_bound):
r"""
Compute all elements in the imaginary quadratic field `K` which have
relative multiplicative height at most ``height_bound``.
The function will only be called with `K` an imaginary quadratic field.
If called with `K` not an imaginary quadratic, the function will likely
yield incorrect output.
ALGORITHM:
This is an implementation of Algorithm 5 in [DK2013]_.
INPUT:
- `K` -- an imaginary quadratic number field
- ``height_bound`` -- a real number
OUTPUT:
- an iterator of number field elements
EXAMPLES::
sage: from sage.rings.number_field.bdd_height import bdd_height_iq
sage: K.<a> = NumberField(x^2 + 191)
sage: for t in bdd_height_iq(K,8):
....: print(exp(2*t.global_height()))
1.00000000000000
1.00000000000000
1.00000000000000
4.00000000000000
4.00000000000000
4.00000000000000
4.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
8.00000000000000
There are 175 elements of height at most 10 in `QQ(\sqrt(-3))`::
sage: from sage.rings.number_field.bdd_height import bdd_height_iq
sage: K.<a> = NumberField(x^2 + 3)
sage: len(list(bdd_height_iq(K,10)))
175
The only elements of multiplicative height 1 in a number field are 0 and
the roots of unity::
sage: from sage.rings.number_field.bdd_height import bdd_height_iq
sage: K.<a> = NumberField(x^2 + x + 1)
sage: list(bdd_height_iq(K,1))
[0, a + 1, a, -1, -a - 1, -a, 1]
A number field has no elements of multiplicative height less than 1::
sage: from sage.rings.number_field.bdd_height import bdd_height_iq
sage: K.<a> = NumberField(x^2 + 5)
sage: list(bdd_height_iq(K,0.9))
[]
"""
if height_bound < 1:
return
yield K(0)
roots_of_unity = K.roots_of_unity()
for zeta in roots_of_unity:
yield zeta
# Get a complete set of ideal class representatives
class_group_reps = []
class_group_rep_norms = []
for c in K.class_group():
a = c.ideal()
class_group_reps.append(a)
class_group_rep_norms.append(a.norm())
class_number = len(class_group_reps)
# Find principal ideals of bounded norm
possible_norm_set = set([])
for n in range(class_number):
for m in range(1, int(height_bound + 1)):
possible_norm_set.add(m*class_group_rep_norms[n])
bdd_ideals = bdd_norm_pr_gens_iq(K, possible_norm_set)
# Distribute the principal ideals
generator_lists = []
for n in range(class_number):
this_ideal = class_group_reps[n]
this_ideal_norm = class_group_rep_norms[n]
gens = []
for i in range(1, int(height_bound + 1)):
for g in bdd_ideals[i*this_ideal_norm]:
if g in this_ideal:
gens.append(g)
generator_lists.append(gens)
# Build all the output numbers
for n in range(class_number):
gens = generator_lists[n]
s = len(gens)
for i in range(s):
for j in range(i + 1, s):
if K.ideal(gens[i], gens[j]) == class_group_reps[n]:
new_number = gens[i]/gens[j]
for zeta in roots_of_unity:
yield zeta * new_number
yield zeta / new_number
def bdd_norm_pr_ideal_gens(K, norm_list):
r"""
Compute generators for all principal ideals in a number field `K` whose
norms are in ``norm_list``.
INPUT:
- `K` -- a number field
- ``norm_list`` -- a list of positive integers
OUTPUT:
- a dictionary of number field elements, keyed by norm
EXAMPLES:
There is only one principal ideal of norm 1, and it is generated by the
element 1::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_ideal_gens
sage: K.<g> = QuadraticField(101)
sage: bdd_norm_pr_ideal_gens(K, [1])
{1: [1]}
::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_ideal_gens
sage: K.<g> = QuadraticField(123)
sage: bdd_norm_pr_ideal_gens(K, range(5))
{0: [0], 1: [1], 2: [-g - 11], 3: [], 4: [2]}
::
sage: from sage.rings.number_field.bdd_height import bdd_norm_pr_ideal_gens
sage: K.<g> = NumberField(x^5 - x + 19)
sage: b = bdd_norm_pr_ideal_gens(K, range(30))
sage: key = ZZ(28)
sage: b[key]
[157*g^4 - 139*g^3 - 369*g^2 + 848*g + 158, g^4 + g^3 - g - 7]
"""
negative_norm_units = K.elements_of_norm(-1)
gens = {}
if not negative_norm_units:
for n in norm_list:
if not n:
gens[n] = [K.zero()]
else:
gens[n] = K.elements_of_norm(n) + K.elements_of_norm(-n)
else:
for n in norm_list:
gens[n] = K.elements_of_norm(n)
return gens
def integer_points_in_polytope(matrix, interval_radius):
r"""
Return the set of integer points in the polytope obtained by acting on a
cube by a linear transformation.
Given an r-by-r matrix ``matrix`` and a real number ``interval_radius``,
this function finds all integer lattice points in the polytope obtained by
transforming the cube [-interval_radius,interval_radius]^r via the linear
map induced by ``matrix``.
INPUT:
- ``matrix`` -- a square matrix of real numbers
- ``interval_radius`` -- a real number
OUTPUT:
- a list of tuples of integers
EXAMPLES:
Stretch the interval [-1,1] by a factor of 2 and find the integers in the
resulting interval::
sage: from sage.rings.number_field.bdd_height import integer_points_in_polytope
sage: m = matrix([2])
sage: r = 1
sage: integer_points_in_polytope(m,r)
[(-2), (-1), (0), (1), (2)]
Integer points inside a parallelogram::
sage: from sage.rings.number_field.bdd_height import integer_points_in_polytope
sage: m = matrix([[1, 2],[3, 4]])
sage: r = RealField()(1.3)
sage: integer_points_in_polytope(m,r)
[(-3, -7), (-2, -5), (-2, -4), (-1, -3), (-1, -2), (-1, -1), (0, -1), (0, 0), (0, 1), (1, 1), (1, 2), (1, 3), (2, 4), (2, 5), (3, 7)]
Integer points inside a parallelepiped::
sage: from sage.rings.number_field.bdd_height import integer_points_in_polytope
sage: m = matrix([[1.2,3.7,0.2],[-5.3,-.43,3],[1.2,4.7,-2.1]])
sage: r = 2.2
sage: L = integer_points_in_polytope(m,r)
sage: len(L)
4143
If ``interval_radius`` is 0, the output should include only the zero tuple::
sage: from sage.rings.number_field.bdd_height import integer_points_in_polytope
sage: m = matrix([[1,2,3,7],[4,5,6,2],[7,8,9,3],[0,3,4,5]])
sage: integer_points_in_polytope(m,0)
[(0, 0, 0, 0)]
"""
T = matrix
d = interval_radius
r = T.nrows()
# Find the vertices of the given box
box_vertices = [vector(x) for x in product([-d, d], repeat=r)]
# Transform the vertices
T_trans = T.transpose()
transformed_vertices = [v * T_trans for v in box_vertices]
# Create polyhedron from transformed vertices and find integer points inside
return list(Polyhedron(transformed_vertices, base_ring=QQ).integral_points())
def bdd_height(K, height_bound, tolerance=1e-2, precision=53):
r"""
Compute all elements in the number field `K` which have relative
multiplicative height at most ``height_bound``.
The function can only be called for number fields `K` with positive unit
rank. An error will occur if `K` is `QQ` or an imaginary quadratic field.
This algorithm computes 2 lists: L containing elements x in `K` such that
H_k(x) <= B, and a list L' containing elements x in `K` that, due to
floating point issues,
may be slightly larger then the bound. This can be controlled
by lowering the tolerance.
In current implementation both lists (L,L') are merged and returned in
form of iterator.
ALGORITHM:
This is an implementation of the revised algorithm (Algorithm 4) in
[DK2013]_.
INPUT:
- ``height_bound`` -- real number
- ``tolerance`` -- (default: 0.01) a rational number in (0,1]
- ``precision`` -- (default: 53) positive integer
OUTPUT:
- an iterator of number field elements
EXAMPLES:
There are no elements of negative height::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<g> = NumberField(x^5 - x + 7)
sage: list(bdd_height(K,-3))
[]
The only nonzero elements of height 1 are the roots of unity::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<g> = QuadraticField(3)
sage: list(bdd_height(K,1))
[0, -1, 1]
::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<g> = QuadraticField(36865)
sage: len(list(bdd_height(K,101))) # long time (4 s)
131
::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<g> = NumberField(x^6 + 2)
sage: len(list(bdd_height(K,60))) # long time (5 s)
1899
::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<g> = NumberField(x^4 - x^3 - 3*x^2 + x + 1)
sage: len(list(bdd_height(K,10)))
99
TESTS:
Check that :trac:`22771` is fixed::
sage: from sage.rings.number_field.bdd_height import bdd_height
sage: K.<v> = NumberField(x^3 + x + 1)
sage: len(list(bdd_height(K,3)))
23
"""
# global values, used in internal function
B = height_bound
theta = tolerance
if B < 1:
return
embeddings = K.places(prec=precision)
O_K = K.ring_of_integers()
r1, r2 = K.signature()
r = r1 + r2 - 1
RF = RealField(precision)
lambda_gens_approx = {}
class_group_rep_norm_log_approx = []
unit_log_dict = {}
def rational_in(x, y):
r"""
Compute a rational number q, such that x<q<y using Archimedes' axiom
"""
z = y - x
if z == 0:
n = 1
else:
n = RR(1/z).ceil() + 1
if RR(n*y).ceil() is n*y: # WHAT !?
m = n*y - 1
else:
m = RR(n*y).floor()
return m / n
def delta_approximation(x, delta):
r"""
Compute a rational number in range (x-delta, x+delta)
"""
return rational_in(x - delta, x + delta)
def vector_delta_approximation(v, delta):
r"""
Compute a rational vector w=(w1, ..., wn)
such that |vi-wi|<delta for all i in [1, n]
"""
return [delta_approximation(vi, delta) for vi in v]
def log_map(number):
r"""
Compute the image of an element of `K` under the logarithmic map.
"""
x = number
x_logs = []
for i in range(r1):
sigma = embeddings[i] # real embeddings
x_logs.append(sigma(x).abs().log())
for i in range(r1, r + 1):
tau = embeddings[i] # Complex embeddings
x_logs.append(2 * tau(x).abs().log())
return vector(x_logs)
def log_height_for_generators_approx(alpha, beta, Lambda):
r"""
Compute the rational approximation of logarithmic height function.
Return a lambda approximation h_K(alpha/beta)
"""
delta = Lambda / (r + 2)
norm_log = delta_approximation(RR(O_K.ideal(alpha, beta).norm()).log(), delta)
log_ga = vector_delta_approximation(log_map(alpha), delta)
log_gb = vector_delta_approximation(log_map(beta), delta)
arch_sum = sum([max(log_ga[k], log_gb[k]) for k in range(r + 1)])
return (arch_sum - norm_log)
def packet_height(n, pair, u):
r"""
Compute the height of the element of `K` encoded by a given packet.
"""
gens = generator_lists[n]
i = pair[0]
j = pair[1]
Log_gi = lambda_gens_approx[gens[i]]
Log_gj = lambda_gens_approx[gens[j]]
Log_u_gi = vector(Log_gi) + unit_log_dict[u]
arch_sum = sum([max(Log_u_gi[k], Log_gj[k]) for k in range(r + 1)])
return (arch_sum - class_group_rep_norm_log_approx[n])
# Step 1
# Computes ideal class representative and their rational approx norm
t = theta / (3*B)
delta_1 = t / (6*r+12)
class_group_reps = []
class_group_rep_norms = []
for c in K.class_group():
a = c.ideal()
a_norm = a.norm()
log_norm = RF(a_norm).log()
log_norm_approx = delta_approximation(log_norm, delta_1)
class_group_reps.append(a)
class_group_rep_norms.append(a_norm)
class_group_rep_norm_log_approx.append(log_norm_approx)
class_number = len(class_group_reps)
# Step 2
# Find generators for principal ideals of bounded norm
possible_norm_set = set([])
for n in range(class_number):
for m in range(1, (B + 1).ceil()):
possible_norm_set.add(m * class_group_rep_norms[n])
bdd_ideals = bdd_norm_pr_ideal_gens(K, possible_norm_set)
# Stores it in form of an dictionary and gives lambda(g)_approx for key g
for norm in possible_norm_set:
gens = bdd_ideals[norm]
for g in gens:
lambda_g_approx = vector_delta_approximation(log_map(g), delta_1)
lambda_gens_approx[g] = lambda_g_approx
# Step 3
# Find a list of all generators corresponding to each ideal a_l
generator_lists = []
for l in range(class_number):
this_ideal = class_group_reps[l]
this_ideal_norm = class_group_rep_norms[l]
gens = []
for i in range(1, (B + 1).ceil()):
for g in bdd_ideals[i * this_ideal_norm]:
if g in this_ideal:
gens.append(g)
generator_lists.append(gens)
# Step 4
# Finds all relevant pair and their height
gen_height_approx_dictionary = {}
relevant_pair_lists = []
for n in range(class_number):
relevant_pairs = []
gens = generator_lists[n]
l = len(gens)
for i in range(l):
for j in range(i+1, l):
if K.ideal(gens[i], gens[j]) == class_group_reps[n]:
relevant_pairs.append([i, j])
gen_height_approx_dictionary[(n, i, j)] = log_height_for_generators_approx(gens[i], gens[j], t/6)
relevant_pair_lists.append(relevant_pairs)
# Step 5
b = rational_in(t/12 + RR(B).log(), t/4 + RR(B).log())
maximum = 0
for n in range(class_number):
for p in relevant_pair_lists[n]:
maximum = max(maximum, gen_height_approx_dictionary[(n, p[0], p[1])])
d_tilde = b + t/6 + maximum
# Step 6
# computes fundamental units and their value under log map
fund_units = UnitGroup(K).fundamental_units()
fund_unit_logs = [log_map(fund_units[i]) for i in range(r)]
S = column_matrix(fund_unit_logs).delete_rows([r])
S_inverse = S.inverse()
S_norm = S.norm(Infinity)
S_inverse_norm = S_inverse.norm(Infinity)
upper_bound = (r**2) * max(S_norm, S_inverse_norm)
m = RR(upper_bound).ceil() + 1
# Step 7
# Variables needed for rational approximation
lambda_tilde = (t/12) / (d_tilde*r*(1+m))
delta_tilde = min(lambda_tilde/((r**2)*((m**2)+m*lambda_tilde)), 1/(r**2))
M = d_tilde * (upper_bound+lambda_tilde*RR(r).sqrt())
M = RR(M).ceil()
d_tilde = RR(d_tilde)
delta_2 = min(delta_tilde, (t/6)/(r*(r+1)*M))
# Step 8, 9
# Computes relevant points in polytope
fund_unit_log_approx = [vector_delta_approximation(fund_unit_logs[i], delta_2) for i in range(r)]
S_tilde = column_matrix(fund_unit_log_approx).delete_rows([r])
S_tilde_inverse = S_tilde.inverse()
U = integer_points_in_polytope(S_tilde_inverse, d_tilde)
# Step 10
# tilde suffixed list are used for computing second list (L_primed)
yield K(0)
U0 = []
U0_tilde = []
L0 = []
L0_tilde = []
# Step 11
# Computes unit height
unit_height_dict = {}
U_copy = copy(U)
inter_bound = b - (5*t)/12
for u in U:
u_log = sum([u[j]*vector(fund_unit_log_approx[j]) for j in range(r)])
unit_log_dict[u] = u_log
u_height = sum([max(u_log[k], 0) for k in range(r + 1)])
unit_height_dict[u] = u_height
if u_height < inter_bound:
U0.append(u)
if inter_bound <= u_height and u_height < b - (t/12):
U0_tilde.append(u)
if u_height > t/12 + d_tilde:
U_copy.remove(u)
U = U_copy
relevant_tuples = set(U0 + U0_tilde)
# Step 12
# check for relevant packets
for n in range(class_number):
for pair in relevant_pair_lists[n]:
i = pair[0]
j = pair[1]
u_height_bound = b + gen_height_approx_dictionary[(n, i, j)] + t/4
for u in U:
if unit_height_dict[u] < u_height_bound:
candidate_height = packet_height(n, pair, u)
if candidate_height <= b - 7*t/12:
L0.append([n, pair, u])
relevant_tuples.add(u)
elif candidate_height < b + t/4:
L0_tilde.append([n, pair, u])
relevant_tuples.add(u)
# Step 13
# forms a dictionary of all_unit_tuples and their value
tuple_to_unit_dict = {}
for u in relevant_tuples:
unit = K.one()
for k in range(r):
unit *= fund_units[k]**u[k]
tuple_to_unit_dict[u] = unit
# Step 14
# Build all output numbers
roots_of_unity = K.roots_of_unity()
for u in U0 + U0_tilde:
for zeta in roots_of_unity:
yield zeta * tuple_to_unit_dict[u]
# Step 15
for p in L0 + L0_tilde:
gens = generator_lists[p[0]]
i = p[1][0]
j = p[1][1]
u = p[2]
c_p = tuple_to_unit_dict[u] * (gens[i] / gens[j])
for zeta in roots_of_unity:
yield zeta * c_p
yield zeta / c_p
|
py | 1a45fc068a2c695dff52ed3c81439c19840fb5ec | from math import sqrt, floor
def frpart(x):
return x - floor(x)
def constrain(val, lo, hi):
if val < lo:
return lo
if val > hi:
return hi
return val
def sq(x):
return x * x
def saw(x):
t = x - floor(x)
if t > 0.5:
return 2.0 * (1.0 - t)
else:
return 2.0 * t
def lerp(lo, hi, step):
return lo + (hi - lo) * step
def dist(x1, y1, x2, y2):
dx = x2 - x1
dy = y2 - y1
return sqrt(dx * dx + dy * dy)
def ccir601(rgb):
return 0.299 * rgb[0] + 0.587 * rgb[1] + 0.114 * rgb[2]
|
py | 1a45fc4b4b6676ceb36ffe825f768ac79e7f2ba3 | # -*- coding: utf-8 -*-
""" HTTP API for triggering Earthstar events and
a simple web based controller that connects to the API.
Events are published to a ZeroMQ socket where they
are consumed by the EffectBox (and potentially other subscribers such
as an event logger).
"""
import click
from flask import Flask
from flask_bootstrap import Bootstrap
import zmq
from .blueprints import root
from .blueprints import effect_api
from .blueprints import controller
def create_effect_socket(effect_addr):
""" Create effect socket. """
context = zmq.Context()
effect_socket = context.socket(zmq.PUB)
effect_socket.bind(effect_addr)
return effect_socket
def create_webapp(effect_socket):
""" Create the Earthstar web application. """
app = Flask(__name__)
app.effect_socket = effect_socket
app.register_blueprint(root.root_bp)
app.register_blueprint(effect_api.effect_api_bp)
app.register_blueprint(controller.controller_bp)
Bootstrap(app)
return app
@click.command(context_settings={"auto_envvar_prefix": "ESC"})
@click.option(
'--host', default='localhost',
help='IP address to listen on.')
@click.option(
'--port', default=8080,
help='Port to listen on.')
@click.option(
'--effect-addr', default='tcp://127.0.0.1:5555',
help='ZeroMQ address to publish events to.')
@click.option(
'--debug/--no-debug', default=False,
help='Run with debug on or off.')
def main(host, port, effect_addr, debug):
""" Run the Earthstar effect API and web interface. """
effect_socket = create_effect_socket(effect_addr)
app = create_webapp(effect_socket)
app.run(host=host, port=port, debug=debug)
|
py | 1a45fe0b26775eea6d5e1a5168ee781803701bfb | from netCDF4 import Dataset
import numpy as np
import tables as tb
from glob import glob
import sys
MISSING_PBL = -1
def read_nc_data(in_file):
rootgrp = Dataset(in_file, "r", format="NETCDF4")
time_axis = rootgrp.variables['time']
height_axis = rootgrp.variables['range']
beta_raw = np.array(rootgrp.variables['beta_raw'])
result = {'time': np.array(time_axis),
'height': np.array(height_axis),
'beta_raw': np.array(beta_raw)}
# data_float = data_np.astype(float)
# data_float[data_np== MISSING_PBL ] = np.NaN
return result
def create_hdf_file(data_tables, out_file):
outfile = tb.open_file(out_file, 'w')
signal_group = outfile.create_group("/", 'raw_signal', 'Raw signal')
for name, data in data_tables.items():
if not data.dtype.fields:
desc = np.dtype([(name, 'f8')])
else:
desc = data.dtype
table = tb.Array(signal_group, name, data)
# table.append(data)
outfile.close()
# def append_to_hdf_file( rec_data, out_file):
#
# outfile = tb.open_file(out_file, 'a')
#
# table = outfile.get_node("/pbl/PBL")
#
# table.append(rec_data)
#
# outfile.close()
in_files = sorted(glob('*_leipzig_CHM080079_000.nc'))
for in_file in in_files:
out_file = '.'.join(in_file.split('.')[:-1]) + ".h5"
create_hdf_file(read_nc_data(in_file), out_file)
|
py | 1a45fe841c4268e2ae89599412b0cc2a3d3ca1d4 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# *****************************************************************************/
# * Authors: Joseph Tarango
# *****************************************************************************/
from __future__ import absolute_import, division, print_function, unicode_literals # , nested_scopes, generators, generator_stop, with_statement, annotations
import re, sys, os, datetime, inspect, traceback, pprint
##############################################
# Python generic info
##############################################
"""
Requires
https://graphviz.gitlab.io/_pages/Download/windows/graphviz-2.38.msi
install path https://graphviz.gitlab.io/_pages/Download/windows/graphviz-2.38.msi
Add to path C:\\Program Files (x86)\\Graphviz2.38\\bin\\dot.exe
"""
# .exe extension patch for the compiled version of this script
if not re.search(pattern='\.PY$|\.PYC$|\.EXE$', string=os.path.split(sys.argv[0])[1].upper()):
sys.argv[0] = os.path.join(os.path.split(sys.argv[0])[0], os.path.split(sys.argv[0])[1] + '.exe')
##############################################
# Libraries
##############################################
def whoami(annotate=True):
frame = inspect.currentframe().f_back
fileName = inspect.getframeinfo(frame).filename
functionName = inspect.getframeinfo(frame).function
lineNumber = inspect.getframeinfo(frame).lineno
traceContext = pprint.pformat(traceback.format_exc(limit=None, chain=True))
if annotate:
fileName = ''.join(["File=", fileName])
functionName = ''.join(["Function=", functionName])
lineNumber = ''.join(["Line=", str(lineNumber)])
return fileName, functionName, lineNumber, traceContext
def devGraphAll(options, args):
##############################################
# Debug Graphing
##############################################
# import necessary paths
importPath = os.path.abspath(os.getcwd())
importPathNext = os.path.abspath(os.path.join(importPath, "pycallgraph"))
print("Importing Paths: ", str(importPath), str(importPathNext))
sys.path.insert(1, importPath)
sys.path.insert(1, importPathNext)
importPathNext = os.path.abspath(os.path.join(importPath, "pycallgraph", "output"))
print("Importing Paths: ", str(importPath), str(importPathNext))
sys.path.insert(1, importPath)
sys.path.insert(1, importPathNext)
try:
importPathNext = os.path.abspath('C:\\Program Files (x86)\\Graphviz2.38\\bin\\dot.exe')
print("Importing Paths: ", str(importPath), str(importPathNext))
sys.path.insert(1, importPath)
sys.path.insert(1, importPathNext)
except Exception as ErrorContext:
print(ErrorContext)
pass
status = 0
##############################################
# Library
##############################################
try:
from pycallgraph2 import PyCallGraph
from pycallgraph2.output import GraphvizOutput
from pycallgraph2 import Config
##############################################
# Configuration
##############################################
graphviz = GraphvizOutput()
graphviz.output_type = 'svg'
graphviz.output_file = 'pycallgraph.svg'
configList = Config()
configList.output = None
configList.verbose = True
configList.debug = False
configList.groups = True
configList.threaded = False
configList.max_depth = 2 ** 31
print(options, args)
with PyCallGraph(output=graphviz, config=configList):
callReturn = 1
print("PyCallGraphReturn", callReturn)
# status = testDrive(driveNumber) # Debug code goes here
except:
pass
return status
def main():
##############################################
# Main function, Options
##############################################
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--example", action='store_true', dest='example', default=False,
help='Show command execution example.')
parser.add_option("--debug", action='store_true', dest='debug', default=False, help='Debug mode.')
parser.add_option("--verbose", action='store_true', dest='verbose', default=False,
help='Verbose printing for debug use.')
(options, args) = parser.parse_args()
devGraphAll(options=options, args=args)
return 0
# Main Execute
if __name__ == '__main__':
p = datetime.datetime.now()
main()
q = datetime.datetime.now()
print("Execution time: " + str(q - p))
|
py | 1a45ffe7f10578aa5887712afd351721cb41ee6b | import numpy as np
import os
import cv2
from .colors import get_color
class BoundBox:
def __init__(self, xmin, ymin, xmax, ymax, c = None, classes = None):
self.xmin = xmin
self.ymin = ymin
self.xmax = xmax
self.ymax = ymax
self.c = c
self.classes = classes
self.label = -1
self.score = -1
def get_label(self):
if self.label == -1:
self.label = np.argmax(self.classes)
return self.label
def get_score(self):
if self.score == -1:
self.score = self.classes[self.get_label()]
return self.score
def _interval_overlap(interval_a, interval_b):
x1, x2 = interval_a
x3, x4 = interval_b
if x3 < x1:
if x4 < x1:
return 0
else:
return min(x2,x4) - x1
else:
if x2 < x3:
return 0
else:
return min(x2,x4) - x3
def bbox_iou(box1, box2):
intersect_w = _interval_overlap([box1.xmin, box1.xmax], [box2.xmin, box2.xmax])
intersect_h = _interval_overlap([box1.ymin, box1.ymax], [box2.ymin, box2.ymax])
intersect = intersect_w * intersect_h
w1, h1 = box1.xmax-box1.xmin, box1.ymax-box1.ymin
w2, h2 = box2.xmax-box2.xmin, box2.ymax-box2.ymin
union = w1*h1 + w2*h2 - intersect
return float(intersect) / union
def draw_boxes(image, boxes, labels, obj_thresh, quiet=True):
flag = False
label_strs = ''
for box in boxes:
label_str = ''
label = -1
for i in range(len(labels)):
print("box.class[]:"+str(i)+" :"+ str(box.classes[i]))
if box.classes[i] > obj_thresh:
if label_str != '': label_str += ', '
label_str += (labels[i] + ' ' + str(round(box.get_score()*100, 2)) + '%')
label = i
if not quiet: print(label_str)
label_strs += label_str
if label >= 0:
text_size = cv2.getTextSize(label_str, cv2.FONT_HERSHEY_SIMPLEX, 1.1e-3 * image.shape[0], 5)
width, height = text_size[0][0], text_size[0][1]
region = np.array([[box.xmin-3, box.ymin],
[box.xmin-3, box.ymin-height-26],
[box.xmin+width+13, box.ymin-height-26],
[box.xmin+width+13, box.ymin]], dtype='int32')
cv2.rectangle(img=image, pt1=(box.xmin,box.ymin), pt2=(box.xmax,box.ymax), color=get_color(label), thickness=5)
cv2.fillPoly(img=image, pts=[region], color=get_color(label))
cv2.putText(img=image,
text=label_str,
org=(box.xmin+13, box.ymin - 13),
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
fontScale=1e-3 * image.shape[0],
color=(0,0,0),
thickness=2)
flag = True
return image, flag, label_strs
|
py | 1a460017f86d62cb9abbee154c1fbc143681988c | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-10 09:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('games', '0003_auto_20160910_0756'),
]
operations = [
migrations.AlterField(
model_name='gameseed',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
|
py | 1a4601d002d77ea98d7b85819035488e48f9f492 | import unittest
from igraph import *
class DirectedUndirectedTests(unittest.TestCase):
def testToUndirected(self):
graph = Graph([(0,1), (0,2), (1,0)], directed=True)
graph2 = graph.copy()
graph2.to_undirected(mode=False)
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0,1), (0,1), (0,2)])
graph2 = graph.copy()
graph2.to_undirected()
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0,1), (0,2)])
graph2 = graph.copy()
graph2.es["weight"] = [1,2,3]
graph2.to_undirected(mode="collapse", combine_edges="sum")
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0,1), (0,2)])
self.assertTrue(graph2.es["weight"] == [4,2])
graph = Graph([(0,1),(1,0),(0,1),(1,0),(2,1),(1,2)], directed=True)
graph2 = graph.copy()
graph2.es["weight"] = [1,2,3,4,5,6]
graph2.to_undirected(mode="mutual", combine_edges="sum")
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0,1), (0,1), (1,2)])
self.assertTrue(graph2.es["weight"] == [7,3,11] or graph2.es["weight"] == [3,7,11])
def testToDirected(self):
graph = Graph([(0,1), (0,2), (2,3), (2,4)], directed=False)
graph.to_directed()
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 5)
self.assertTrue(sorted(graph.get_edgelist()) == \
[(0,1), (0,2), (1,0), (2,0), (2,3), (2,4), (3,2), (4,2)]
)
class GraphRepresentationTests(unittest.TestCase):
def testGetAdjacency(self):
# Undirected case
g = Graph.Tree(6, 3)
g.es["weight"] = range(5)
self.assertTrue(g.get_adjacency() == Matrix([
[0, 1, 1, 1, 0, 0],
[1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0]
]))
self.assertTrue(g.get_adjacency(attribute="weight") == Matrix([
[0, 0, 1, 2, 0, 0],
[0, 0, 0, 0, 3, 4],
[1, 0, 0, 0, 0, 0],
[2, 0, 0, 0, 0, 0],
[0, 3, 0, 0, 0, 0],
[0, 4, 0, 0, 0, 0]
]))
self.assertTrue(g.get_adjacency(eids=True) == Matrix([
[0, 1, 2, 3, 0, 0],
[1, 0, 0, 0, 4, 5],
[2, 0, 0, 0, 0, 0],
[3, 0, 0, 0, 0, 0],
[0, 4, 0, 0, 0, 0],
[0, 5, 0, 0, 0, 0]
])-1)
# Directed case
g = Graph.Tree(6, 3, "tree_out")
g.add_edges([(0,1), (1,0)])
self.assertTrue(g.get_adjacency() == Matrix([
[0, 2, 1, 1, 0, 0],
[1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]
]))
def suite():
direction_suite = unittest.makeSuite(DirectedUndirectedTests)
representation_suite = unittest.makeSuite(GraphRepresentationTests)
return unittest.TestSuite([direction_suite,
representation_suite])
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
|
py | 1a460201f7ed0e27e7356e121b5bca663310c400 | import pytest
import sdk_cmd
import sdk_install
import sdk_plan
from tests import config
@pytest.fixture(scope='module', autouse=True)
def configure_package(configure_security):
try:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
options = {
"service": {
"spec_file": "examples/discovery.yml"
}
}
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, 1, additional_options=options)
yield # let the test session execute
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
@pytest.mark.sanity
def test_task_dns_prefix_points_to_all_tasks():
pod_info = sdk_cmd.service_request('GET', config.SERVICE_NAME, '/v1/pod/hello-0/info').json()
# Assert that DiscoveryInfo is correctly set on tasks.
assert(all(p["info"]["discovery"]["name"] == "hello-0" for p in pod_info))
# Assert that the hello-0.hello-world.mesos DNS entry points to the right IP.
sdk_plan.wait_for_completed_deployment(config.SERVICE_NAME)
|
py | 1a46028d21480caca3b922e8e4118514cd8035d2 | import sys
for line in sys.stdin:
line = line.strip()
words = line.split(' ')
i=0
for word in words:
print '%s\t%s' % (i,word)
i=i+1
#cat /home/wing/113CS0147/senti/proj/input | python /home/wing/113CS0147/senti/proj/mapper_senti.py | python /home/wing/113CS0147/senti/proj/reducer_senti.py
#hadoop jar /usr/local/hadoop/share/hadoop/tools/lib/hadoop-streaming-2.7.3.jar -file /home/wing/113CS0147/senti/proj/mapper_senti.py -mapper "python /home/wing/113CS0147/senti/proj/mapper_senti.py" -file /home/wing/113CS0147/senti/proj/reducer_senti.py -reducer "python /home/wing/113CS0147/senti/proj/reducer_senti.py" -input /senti/input -output /senti/output |
gyp | 1a460306a75c50916bc4da5f4cf1e088fa0a9c7b | {
"targets": [
{
"target_name": "rgb_node",
"sources": [ "swig_wrap.cxx" ],
"libraries": [
'-L<(module_root_dir)/../../rust-lib/target/debug/',
'-lrgb',
],
'include_dirs': [
'../../rust-lib',
],
"ldflags": [
'-Wl,-rpath,../../rust-lib/target/debug/'
],
"cflags!": ["-std=c++11"],
}
]
}
|
py | 1a460307e985656aee437ea5ad433af4c8d96b90 | from hypothesis import given
from hypothesis.strategies import integers, lists
from les_iterables import retain_if
from helpers.predicates import is_odd
@given(items=lists(integers()))
def test_retain_if_is_odd_retains_only_odd_integers(items):
result = retain_if(is_odd, items)
assert all(is_odd(x) for x in result)
|
py | 1a4604466b4a1d8200f12131b8a4785f157a0c10 | # -*- coding: utf-8 -*-
'''
Module for returning various status data about a minion.
These data can be useful for compiling into stats later,
or for problem solving if your minion is having problems.
.. versionadded:: 0.12.0
:depends: - wmi
'''
# Import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
import ctypes
import datetime
import logging
import subprocess
log = logging.getLogger(__name__)
# Import Salt Libs
import salt.utils.event
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.win_pdh
from salt.utils.network import host_to_ips as _host_to_ips
from salt.utils.functools import namespaced_function as _namespaced_function
# Import 3rd party Libs
from salt.ext import six
# These imports needed for namespaced functions
# pylint: disable=W0611
from salt.modules.status import ping_master, time_
import copy
# pylint: enable=W0611
# Import 3rd Party Libs
try:
if salt.utils.platform.is_windows():
import wmi
import salt.utils.winapi
HAS_WMI = True
else:
HAS_WMI = False
except ImportError:
HAS_WMI = False
HAS_PSUTIL = False
if salt.utils.platform.is_windows():
import psutil
HAS_PSUTIL = True
__opts__ = {}
__virtualname__ = 'status'
# Taken from https://www.geoffchappell.com/studies/windows/km/ntoskrnl/api/ex/sysinfo/performance.htm
class SYSTEM_PERFORMANCE_INFORMATION(ctypes.Structure):
_fields_ = [('IdleProcessTime', ctypes.c_int64),
('IoReadTransferCount', ctypes.c_int64),
('IoWriteTransferCount', ctypes.c_int64),
('IoOtherTransferCount', ctypes.c_int64),
('IoReadOperationCount', ctypes.c_ulong),
('IoWriteOperationCount', ctypes.c_ulong),
('IoOtherOperationCount', ctypes.c_ulong),
('AvailablePages', ctypes.c_ulong),
('CommittedPages', ctypes.c_ulong),
('CommitLimit', ctypes.c_ulong),
('PeakCommitment', ctypes.c_ulong),
('PageFaultCount', ctypes.c_ulong),
('CopyOnWriteCount', ctypes.c_ulong),
('TransitionCount', ctypes.c_ulong),
('CacheTransitionCount', ctypes.c_ulong),
('DemandZeroCount', ctypes.c_ulong),
('PageReadCount', ctypes.c_ulong),
('PageReadIoCount', ctypes.c_ulong),
('CacheReadCount', ctypes.c_ulong), # Was c_ulong ** 2
('CacheIoCount', ctypes.c_ulong),
('DirtyPagesWriteCount', ctypes.c_ulong),
('DirtyWriteIoCount', ctypes.c_ulong),
('MappedPagesWriteCount', ctypes.c_ulong),
('MappedWriteIoCount', ctypes.c_ulong),
('PagedPoolPages', ctypes.c_ulong),
('NonPagedPoolPages', ctypes.c_ulong),
('PagedPoolAllocs', ctypes.c_ulong),
('PagedPoolFrees', ctypes.c_ulong),
('NonPagedPoolAllocs', ctypes.c_ulong),
('NonPagedPoolFrees', ctypes.c_ulong),
('FreeSystemPtes', ctypes.c_ulong),
('ResidentSystemCodePage', ctypes.c_ulong),
('TotalSystemDriverPages', ctypes.c_ulong),
('TotalSystemCodePages', ctypes.c_ulong),
('NonPagedPoolLookasideHits', ctypes.c_ulong),
('PagedPoolLookasideHits', ctypes.c_ulong),
('AvailablePagedPoolPages', ctypes.c_ulong),
('ResidentSystemCachePage', ctypes.c_ulong),
('ResidentPagedPoolPage', ctypes.c_ulong),
('ResidentSystemDriverPage', ctypes.c_ulong),
('CcFastReadNoWait', ctypes.c_ulong),
('CcFastReadWait', ctypes.c_ulong),
('CcFastReadResourceMiss', ctypes.c_ulong),
('CcFastReadNotPossible', ctypes.c_ulong),
('CcFastMdlReadNoWait', ctypes.c_ulong),
('CcFastMdlReadWait', ctypes.c_ulong),
('CcFastMdlReadResourceMiss', ctypes.c_ulong),
('CcFastMdlReadNotPossible', ctypes.c_ulong),
('CcMapDataNoWait', ctypes.c_ulong),
('CcMapDataWait', ctypes.c_ulong),
('CcMapDataNoWaitMiss', ctypes.c_ulong),
('CcMapDataWaitMiss', ctypes.c_ulong),
('CcPinMappedDataCount', ctypes.c_ulong),
('CcPinReadNoWait', ctypes.c_ulong),
('CcPinReadWait', ctypes.c_ulong),
('CcPinReadNoWaitMiss', ctypes.c_ulong),
('CcPinReadWaitMiss', ctypes.c_ulong),
('CcCopyReadNoWait', ctypes.c_ulong),
('CcCopyReadWait', ctypes.c_ulong),
('CcCopyReadNoWaitMiss', ctypes.c_ulong),
('CcCopyReadWaitMiss', ctypes.c_ulong),
('CcMdlReadNoWait', ctypes.c_ulong),
('CcMdlReadWait', ctypes.c_ulong),
('CcMdlReadNoWaitMiss', ctypes.c_ulong),
('CcMdlReadWaitMiss', ctypes.c_ulong),
('CcReadAheadIos', ctypes.c_ulong),
('CcLazyWriteIos', ctypes.c_ulong),
('CcLazyWritePages', ctypes.c_ulong),
('CcDataFlushes', ctypes.c_ulong),
('CcDataPages', ctypes.c_ulong),
('ContextSwitches', ctypes.c_ulong),
('FirstLevelTbFills', ctypes.c_ulong),
('SecondLevelTbFills', ctypes.c_ulong),
('SystemCalls', ctypes.c_ulong),
# Windows 8 and above
('CcTotalDirtyPages', ctypes.c_ulonglong),
('CcDirtyPagesThreshold', ctypes.c_ulonglong),
('ResidentAvailablePages', ctypes.c_longlong),
# Windows 10 and above
('SharedCommittedPages', ctypes.c_ulonglong)]
def __virtual__():
'''
Only works on Windows systems with WMI and WinAPI
'''
if not salt.utils.platform.is_windows():
return False, 'win_status.py: Requires Windows'
if not HAS_WMI:
return False, 'win_status.py: Requires WMI and WinAPI'
if not HAS_PSUTIL:
return False, 'win_status.py: Requires psutil'
# Namespace modules from `status.py`
global ping_master, time_
ping_master = _namespaced_function(ping_master, globals())
time_ = _namespaced_function(time_, globals())
return __virtualname__
__func_alias__ = {
'time_': 'time'
}
def cpustats():
'''
Return information about the CPU.
Returns
dict: A dictionary containing information about the CPU stats
CLI Example:
.. code-block:: bash
salt * status.cpustats
'''
# Tries to gather information similar to that returned by a Linux machine
# Avoid using WMI as there's a lot of overhead
# Time related info
user, system, idle, interrupt, dpc = psutil.cpu_times()
cpu = {'user': user,
'system': system,
'idle': idle,
'irq': interrupt,
'dpc': dpc}
# Count related info
ctx_switches, interrupts, soft_interrupts, sys_calls = psutil.cpu_stats()
intr = {'irqs': {'irqs': [],
'total': interrupts}}
soft_irq = {'softirqs': [],
'total': soft_interrupts}
return {'btime': psutil.boot_time(),
'cpu': cpu,
'ctxt': ctx_switches,
'intr': intr,
'processes': len(psutil.pids()),
'softirq': soft_irq,
'syscalls': sys_calls}
def meminfo():
'''
Return information about physical and virtual memory on the system
Returns:
dict: A dictionary of information about memory on the system
CLI Example:
.. code-block:: bash
salt * status.meminfo
'''
# Get physical memory
vm_total, vm_available, vm_percent, vm_used, vm_free = psutil.virtual_memory()
# Get swap memory
swp_total, swp_used, swp_free, swp_percent, _, _ = psutil.swap_memory()
def get_unit_value(memory):
symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i + 1) * 10
for s in reversed(symbols):
if memory >= prefix[s]:
value = float(memory) / prefix[s]
return {'unit': s,
'value': value}
return {'unit': 'B',
'value': memory}
return {'VmallocTotal': get_unit_value(vm_total),
'VmallocUsed': get_unit_value(vm_used),
'VmallocFree': get_unit_value(vm_free),
'VmallocAvail': get_unit_value(vm_available),
'SwapTotal': get_unit_value(swp_total),
'SwapUsed': get_unit_value(swp_used),
'SwapFree': get_unit_value(swp_free)}
def vmstats():
'''
Return information about the virtual memory on the machine
Returns:
dict: A dictionary of virtual memory stats
CLI Example:
.. code-block:: bash
salt * status.vmstats
'''
# Setup the SPI Structure
spi = SYSTEM_PERFORMANCE_INFORMATION()
retlen = ctypes.c_ulong()
# 2 means to query System Performance Information and return it in a
# SYSTEM_PERFORMANCE_INFORMATION Structure
ctypes.windll.ntdll.NtQuerySystemInformation(
2, ctypes.byref(spi), ctypes.sizeof(spi), ctypes.byref(retlen))
# Return each defined field in a dict
ret = {}
for field in spi._fields_:
ret.update({field[0]: getattr(spi, field[0])})
return ret
def loadavg():
'''
Returns counter information related to the load of the machine
Returns:
dict: A dictionary of counters
CLI Example:
.. code-block:: bash
salt * status.loadavg
'''
# Counter List (obj, instance, counter)
counter_list = [
('Memory', None, 'Available Bytes'),
('Memory', None, 'Pages/sec'),
('Paging File', '*', '% Usage'),
('Processor', '*', '% Processor Time'),
('Processor', '*', 'DPCs Queued/sec'),
('Processor', '*', '% Privileged Time'),
('Processor', '*', '% User Time'),
('Processor', '*', '% DPC Time'),
('Processor', '*', '% Interrupt Time'),
('Server', None, 'Work Item Shortages'),
('Server Work Queues', '*', 'Queue Length'),
('System', None, 'Processor Queue Length'),
('System', None, 'Context Switches/sec'),
]
return salt.utils.win_pdh.get_counters(counter_list=counter_list)
def cpuload():
'''
.. versionadded:: 2015.8.0
Return the processor load as a percentage
CLI Example:
.. code-block:: bash
salt '*' status.cpuload
'''
return psutil.cpu_percent()
def diskusage(human_readable=False, path=None):
'''
.. versionadded:: 2015.8.0
Return the disk usage for this minion
human_readable : False
If ``True``, usage will be in KB/MB/GB etc.
CLI Example:
.. code-block:: bash
salt '*' status.diskusage path=c:/salt
'''
if not path:
path = 'c:/'
disk_stats = psutil.disk_usage(path)
total_val = disk_stats.total
used_val = disk_stats.used
free_val = disk_stats.free
percent = disk_stats.percent
if human_readable:
total_val = _byte_calc(total_val)
used_val = _byte_calc(used_val)
free_val = _byte_calc(free_val)
return {'total': total_val,
'used': used_val,
'free': free_val,
'percent': percent}
def procs(count=False):
'''
Return the process data
count : False
If ``True``, this function will simply return the number of processes.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' status.procs
salt '*' status.procs count
'''
with salt.utils.winapi.Com():
wmi_obj = wmi.WMI()
processes = wmi_obj.win32_process()
#this short circuit's the function to get a short simple proc count.
if count:
return len(processes)
#a propper run of the function, creating a nonsensically long out put.
process_info = {}
for proc in processes:
process_info[proc.ProcessId] = _get_process_info(proc)
return process_info
def saltmem(human_readable=False):
'''
.. versionadded:: 2015.8.0
Returns the amount of memory that salt is using
human_readable : False
return the value in a nicely formatted number
CLI Example:
.. code-block:: bash
salt '*' status.saltmem
salt '*' status.saltmem human_readable=True
'''
# psutil.Process defaults to current process (`os.getpid()`)
p = psutil.Process()
# Use oneshot to get a snapshot
with p.oneshot():
mem = p.memory_info().rss
if human_readable:
return _byte_calc(mem)
return mem
def uptime(human_readable=False):
'''
.. versionadded:: 2015.8.0
Return the system uptime for the machine
Args:
human_readable (bool):
Return uptime in human readable format if ``True``, otherwise
return seconds. Default is ``False``
.. note::
Human readable format is ``days, hours:min:sec``. Days will only
be displayed if more than 0
Returns:
str:
The uptime in seconds or human readable format depending on the
value of ``human_readable``
CLI Example:
.. code-block:: bash
salt '*' status.uptime
salt '*' status.uptime human_readable=True
'''
# Get startup time
startup_time = datetime.datetime.fromtimestamp(psutil.boot_time())
# Subtract startup time from current time to get the uptime of the system
uptime = datetime.datetime.now() - startup_time
return six.text_type(uptime) if human_readable else uptime.total_seconds()
def _get_process_info(proc):
'''
Return process information
'''
cmd = salt.utils.stringutils.to_unicode(proc.CommandLine or '')
name = salt.utils.stringutils.to_unicode(proc.Name)
info = dict(
cmd=cmd,
name=name,
**_get_process_owner(proc)
)
return info
def _get_process_owner(process):
owner = {}
domain, error_code, user = None, None, None
try:
domain, error_code, user = process.GetOwner()
owner['user'] = salt.utils.stringutils.to_unicode(user)
owner['user_domain'] = salt.utils.stringutils.to_unicode(domain)
except Exception as exc:
pass
if not error_code and all((user, domain)):
owner['user'] = salt.utils.stringutils.to_unicode(user)
owner['user_domain'] = salt.utils.stringutils.to_unicode(domain)
elif process.ProcessId in [0, 4] and error_code == 2:
# Access Denied for System Idle Process and System
owner['user'] = 'SYSTEM'
owner['user_domain'] = 'NT AUTHORITY'
else:
log.warning('Error getting owner of process; PID=\'%s\'; Error: %s',
process.ProcessId, error_code)
return owner
def _byte_calc(val):
if val < 1024:
tstr = six.text_type(val)+'B'
elif val < 1038336:
tstr = six.text_type(val/1024)+'KB'
elif val < 1073741824:
tstr = six.text_type(val/1038336)+'MB'
elif val < 1099511627776:
tstr = six.text_type(val/1073741824)+'GB'
else:
tstr = six.text_type(val/1099511627776)+'TB'
return tstr
def master(master=None, connected=True):
'''
.. versionadded:: 2015.5.0
Fire an event if the minion gets disconnected from its master. This
function is meant to be run via a scheduled job from the minion. If
master_ip is an FQDN/Hostname, is must be resolvable to a valid IPv4
address.
CLI Example:
.. code-block:: bash
salt '*' status.master
'''
def _win_remotes_on(port):
'''
Windows specific helper function.
Returns set of ipv4 host addresses of remote established connections
on local or remote tcp port.
Parses output of shell 'netstat' to get connections
PS C:> netstat -n -p TCP
Active Connections
Proto Local Address Foreign Address State
TCP 10.1.1.26:3389 10.1.1.1:4505 ESTABLISHED
TCP 10.1.1.26:56862 10.1.1.10:49155 TIME_WAIT
TCP 10.1.1.26:56868 169.254.169.254:80 CLOSE_WAIT
TCP 127.0.0.1:49197 127.0.0.1:49198 ESTABLISHED
TCP 127.0.0.1:49198 127.0.0.1:49197 ESTABLISHED
'''
remotes = set()
try:
data = subprocess.check_output(['netstat', '-n', '-p', 'TCP']) # pylint: disable=minimum-python-version
except subprocess.CalledProcessError:
log.error('Failed netstat')
raise
lines = salt.utils.stringutils.to_unicode(data).split('\n')
for line in lines:
if 'ESTABLISHED' not in line:
continue
chunks = line.split()
remote_host, remote_port = chunks[2].rsplit(':', 1)
if int(remote_port) != port:
continue
remotes.add(remote_host)
return remotes
# the default publishing port
port = 4505
master_ips = None
if master:
master_ips = _host_to_ips(master)
if not master_ips:
return
if __salt__['config.get']('publish_port') != '':
port = int(__salt__['config.get']('publish_port'))
master_connection_status = False
connected_ips = _win_remotes_on(port)
# Get connection status for master
for master_ip in master_ips:
if master_ip in connected_ips:
master_connection_status = True
break
# Connection to master is not as expected
if master_connection_status is not connected:
with salt.utils.event.get_event('minion', opts=__opts__, listen=False) as event_bus:
if master_connection_status:
event_bus.fire_event({'master': master}, salt.minion.master_event(type='connected'))
else:
event_bus.fire_event({'master': master}, salt.minion.master_event(type='disconnected'))
return master_connection_status
|
py | 1a4605601221d0985cac6f82789f0345248a80f0 | import random
import time
from typing import Set, List, Optional
from dataclasses import dataclass
from chinilla.types.peer_info import PeerInfo
from chinilla.util.ints import uint64, uint16
@dataclass(frozen=False)
class VettedPeer:
host: str
port: uint16
# 0 means we have not attempted to vet this peer yet
# a negative number means we have failed that many vetting attempts in a row
# a positive number means we have successfully vetted the peer this many
# times in a row
vetted: int = 0
# the timestamp of the last *successful* vetting of this peer
vetted_timestamp: uint64 = uint64(0)
# the last time we attempted to vet this peer, or 0 if we haven't tried yet
# we set this regardless of whether the vetting is successful or not
last_attempt: uint64 = uint64(0)
time_added: uint64 = uint64(0)
def __init__(self, h: str, p: uint16):
self.host = h
self.port = p
def __eq__(self, rhs):
return self.host == rhs.host and self.port == rhs.port
def __hash__(self):
return hash((self.host, self.port))
class IntroducerPeers:
"""
Has the list of known full node peers that are already connected or may be
connected to, and the time that they were last added.
"""
def __init__(self) -> None:
self._peers: Set[VettedPeer] = set()
def add(self, peer: Optional[PeerInfo]) -> bool:
if peer is None or not peer.port:
return False
p = VettedPeer(peer.host, peer.port)
p.time_added = uint64(int(time.time()))
if p in self._peers:
return True
self._peers.add(p)
return True
def remove(self, peer: Optional[VettedPeer]) -> bool:
if peer is None or not peer.port:
return False
try:
self._peers.remove(peer)
return True
except ValueError:
return False
def get_peers(self, max_peers: int = 0, randomize: bool = False, recent_threshold=9999999) -> List[VettedPeer]:
target_peers = [peer for peer in self._peers if time.time() - peer.time_added < recent_threshold]
if not max_peers or max_peers > len(target_peers):
max_peers = len(target_peers)
if randomize:
return random.sample(target_peers, max_peers)
else:
return target_peers[:max_peers]
|
py | 1a4605bf362766f090fc2e0d73c84fbf5c52c624 |
from bentoml import BentoService, api, env, artifacts
from bentoml.artifact import PickleArtifact
from bentoml.adapters import FileInput
@artifacts([PickleArtifact('model')])
@env(pip_dependencies=['easyocr'],
conda_channels=["conda-forge"],
conda_dependencies=["ruamel.yaml"])
class TextDetectionService(BentoService):
@api(input=FileInput())
def predict(self, image):
result = self.artifacts.model.detect_text(image[0])
return result
|
py | 1a46062de849434817e9a6d347a1a39274108b22 | """
modify file with additions or substitutions, and making as few other changes
as possible (no formatting, whitespace, encoding etc)
Authors:
Carl Anderson ([email protected])
"""
import os
import logging
class FileModifier:
"""
class that modifies file with additions or substitutions, and doing so
with making as few other changes as possible (no formatting, whitespace, encoding etc)
"""
COMMENT = "# programmatically added by LookML modifier"
def __init__(self, filename):
"""initialize the FileModifier
Args:
filename (str): filename
"""
if not os.path.exists(filename):
raise IOError("Filename does not exist: %s" % filename)
logging.info("Reading in file %s", filename)
self.lines = open(filename, "r").readlines()
def is_header(self, line, header_type, header_name):
"""looking for start of dimension or header, e.g.
"dimension: header_name {"
Args:
line (str): line from a file
header_type (str): e.g. dimension
header_name (str): e.g. header_name (in example above)
Returns:
bool: is this chunk a header?
"""
start = header_type + ":"
# FIXME this assumes brace is on same line. Valid LookML means that it doesn't have to be
if (
line.strip().startswith(start)
and line.split(start)[1].split("{")[0].strip() == header_name
):
return True
return False
def handle_description_addition(self, definition_type, name, description):
"""add in a new description
Args:
definition_type (str): 'measure' or 'dimension'
name (str): name of measure or dimension
description (str): description to add
Returns:
nothing. Side effect is to add lines to self.lines
"""
new_lines = []
for line in self.lines:
if self.is_header(line, definition_type, name):
line_to_add = ' description: "%s"\t%s\n' % (
description,
FileModifier.COMMENT,
)
logging.info("Adding in line: %s" % line_to_add)
new_lines.append(line) # header
new_lines.append(line_to_add)
else:
new_lines.append(line)
self.lines = new_lines
def handle_desription_substitution(
self, num_lines, definition_type, name, description
):
"""as description exists, we need to find the header, then look for description after it,
consume all the lines of the current description, and add the new description
Args:
num_lines (int): number of lines in the existing description
definition_type (str): 'measure' or 'dimension'
name (str): name of measure or dimension
description (str): description to add
Returns:
Nothing. Side effect to save to self.lines
"""
new_lines = []
iterator = iter(self.lines)
while iterator:
try:
line = next(iterator)
if self.is_header(line, definition_type, name):
new_lines.append(line)
ct = 0
while True:
line = next(iterator)
ct += 1
if line.strip().startswith("description"):
logging.info("found description %d lines after header", ct)
# consume the other lines for this existing description
for i in range(num_lines):
line = next(iterator)
# inject our new description
line_to_add = ' description: "%s"\t%s\n' % (
description,
FileModifier.COMMENT,
)
logging.info("Adding in line: %s", line_to_add)
new_lines.append(line_to_add)
break
else:
new_lines.append(line)
new_lines.append(line)
except StopIteration:
break
self.lines = new_lines
def modify(self, num_lines, definition_type, name, description, has_key):
"""
modify an entry
Args:
num_lines (int): number of lines to substitute
has_key (bool): do we have a description key for the definition_type,
name (str): name of dimension, dimension_group, or measure
description (str): correct description
Returns:
nothing. Side effect is to update self.lines with correct info
"""
if not has_key:
self.handle_description_addition(definition_type, name, description)
else:
self.handle_desription_substitution(
num_lines, definition_type, name, description
)
def write(self, filename):
"""write modified LookML to filename
Args:
filename (str): filepath of file to write to
Returns:
nothing. Side effect is to write data to file
"""
logging.info("Writing LookML to %s" % filename)
with open(filename, "w") as the_file:
for line in self.lines:
the_file.write(line)
|
py | 1a4606f02a7af06463776786e9debd41bf8a8b6b | import datetime
import json
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, JsonResponse
from django.shortcuts import get_object_or_404, redirect
from compose.models import DailyEntry, get_current_streak, get_longest_streak
@login_required
def fetch(request):
today = datetime.date.today()
user = request.user
entry = DailyEntry.objects.today(user=user)
total_word_count = sum(e.word_count
for e in DailyEntry.objects.filter(user=user, date__lt=today))
word_count_this_month = sum(e.word_count
for e in DailyEntry.objects.filter(user=user, date__month=today.month))
response = {
'longest_streak': get_longest_streak(user),
'streak_length': get_current_streak(user),
'text': entry.text,
'total_word_count': total_word_count,
'word_count': entry.word_count,
'word_count_goal': entry.word_count_goal,
'word_count_this_month': word_count_this_month,
}
return JsonResponse(response)
@login_required
def update(request):
if request.method == 'POST':
try:
obj = json.loads(request.body.decode('utf-8'))
except (json.JSONDecodeError, UnicodeDecodeError):
return HttpResponse(status_code=400)
text = obj.get('text')
word_count_goal = obj.get('word_count_goal')
entry = DailyEntry.objects.today(user=request.user)
if text is not None:
entry.text = text
if word_count_goal is not None:
entry.word_count_goal = word_count_goal
entry.save()
return HttpResponse()
else:
return redirect('compose:index')
|
py | 1a46089f943a524d64cbf099d28dc751d9e129ed | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
return
# country = get_country_info(country="Turkey")
# add_country_and_currency("Turkey", country)
|
py | 1a4609bfdf29aeef123b5cf40e88f71bb25c8c68 | import pandas as pd
pd.DataFrame()
class RailwayForm:
formType = "RailwayForm"
def printData(self):
print(f"Name is {self.name}")
print(f"Train is {self.train}")
harrysApplication = RailwayForm()
harrysApplication.name = "Harry"
harrysApplication.train = "Rajdhani Express"
harrysApplication.printData() |
py | 1a4609cd30a8fec7692a73570426eb5a6759a0c2 | import argparse
from argparse import ArgumentParser, Namespace
from typing import Any, Dict, Optional
from emmental.utils.utils import (
nullable_float,
nullable_int,
nullable_string,
str2bool,
str2dict,
)
def parse_args(parser: Optional[ArgumentParser] = None) -> ArgumentParser:
r"""Parse the configuration from command line.
Args:
parser(ArgumentParser): The exterenl argument parser object, defaults to None.
Returns:
ArgumentParser: The updated argument parser object.
"""
if parser is None:
parser = argparse.ArgumentParser(
"Emmental configuration",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
# Load meta configuration
meta_config = parser.add_argument_group("Meta configuration")
meta_config.add_argument(
"--seed",
type=nullable_int,
default=None,
help="Random seed for all numpy/torch/cuda operations in model and learning",
)
meta_config.add_argument(
"--verbose",
type=str2bool,
default=True,
help="Whether to print the log information",
)
meta_config.add_argument(
"--log_path", type=str, default="logs", help="Directory to save running log"
)
# Load data configuration
data_config = parser.add_argument_group("Data configuration")
data_config.add_argument(
"--min_data_len", type=int, default=0, help="Minimal data length"
)
data_config.add_argument(
"--max_data_len",
type=int,
default=0,
help="Maximal data length (0 for no max_len)",
)
# Load model configuration
model_config = parser.add_argument_group("Model configuration")
model_config.add_argument(
"--model_path",
type=nullable_string,
default=None,
help="Path to pretrained model",
)
model_config.add_argument(
"--device",
type=int,
default=0,
help="Which device to use (-1 for cpu or gpu id (e.g., 0 for cuda:0))",
)
model_config.add_argument(
"--dataparallel",
type=str2bool,
default=True,
help="Whether to use dataparallel or not",
)
# Learning configuration
learner_config = parser.add_argument_group("Learning configuration")
learner_config.add_argument(
"--fp16",
type=str2bool,
default=False,
help="Whether to use half precision to train",
)
learner_config.add_argument(
"--n_epochs", type=int, default=1, help="Total number of learning epochs"
)
learner_config.add_argument(
"--train_split",
nargs="+",
type=str,
default=["train"],
help="The split for training",
)
learner_config.add_argument(
"--valid_split",
nargs="+",
type=str,
default=["valid"],
help="The split for validation",
)
learner_config.add_argument(
"--test_split",
nargs="+",
type=str,
default=["test"],
help="The split for testing",
)
learner_config.add_argument(
"--ignore_index",
type=nullable_int,
default=None,
help="The ignore index, uses for masking samples",
)
# Optimizer configuration
optimizer_config = parser.add_argument_group("Optimizer configuration")
optimizer_config.add_argument(
"--optimizer",
type=nullable_string,
default="adam",
choices=[
"asgd",
"adadelta",
"adagrad",
"adam",
"adamw",
"adamax",
"lbfgs",
"rms_prop",
"r_prop",
"sgd",
"sparse_adam",
"bert_adam",
None,
],
help="The optimizer to use",
)
optimizer_config.add_argument("--lr", type=float, default=1e-3, help="Learing rate")
optimizer_config.add_argument(
"--l2", type=float, default=0.0, help="l2 regularization"
)
optimizer_config.add_argument(
"--grad_clip", type=nullable_float, default=None, help="Gradient clipping"
)
# ASGD config
optimizer_config.add_argument(
"--asgd_lambd", type=float, default=0.0001, help="ASGD lambd"
)
optimizer_config.add_argument(
"--asgd_alpha", type=float, default=0.75, help="ASGD alpha"
)
optimizer_config.add_argument(
"--asgd_t0", type=float, default=1000000.0, help="ASGD t0"
)
# Adadelta config
optimizer_config.add_argument(
"--adadelta_rho", type=float, default=0.9, help="Adadelta rho"
)
optimizer_config.add_argument(
"--adadelta_eps", type=float, default=0.000001, help="Adadelta eps"
)
# Adagrad config
optimizer_config.add_argument(
"--adagrad_lr_decay", type=float, default=0, help="Adagrad lr_decay"
)
optimizer_config.add_argument(
"--adagrad_initial_accumulator_value",
type=float,
default=0,
help="Adagrad initial accumulator value",
)
optimizer_config.add_argument(
"--adagrad_eps", type=float, default=0.0000000001, help="Adagrad eps"
)
# Adam config
optimizer_config.add_argument(
"--adam_betas", nargs="+", type=float, default=(0.9, 0.999), help="Adam betas"
)
optimizer_config.add_argument(
"--adam_eps", type=float, default=1e-8, help="Adam eps"
)
optimizer_config.add_argument(
"--adam_amsgrad",
type=str2bool,
default=False,
help="Whether to use the AMSGrad variant of adam",
)
# AdamW config
optimizer_config.add_argument(
"--adamw_betas", nargs="+", type=float, default=(0.9, 0.999), help="AdamW betas"
)
optimizer_config.add_argument(
"--adamw_eps", type=float, default=1e-8, help="AdamW eps"
)
optimizer_config.add_argument(
"--adamw_amsgrad",
type=str2bool,
default=False,
help="Whether to use the AMSGrad variant of AdamW",
)
# Adamax config
optimizer_config.add_argument(
"--adamax_betas",
nargs="+",
type=float,
default=(0.9, 0.999),
help="Adamax betas",
)
optimizer_config.add_argument(
"--adamax_eps", type=float, default=1e-8, help="Adamax eps"
)
# LBFGS config
optimizer_config.add_argument(
"--lbfgs_max_iter", type=int, default=20, help="LBFGS max iter"
)
optimizer_config.add_argument(
"--lbfgs_max_eval", type=nullable_int, default=None, help="LBFGS max eval"
)
optimizer_config.add_argument(
"--lbfgs_tolerance_grad", type=float, default=1e-07, help="LBFGS tolerance grad"
)
optimizer_config.add_argument(
"--lbfgs_tolerance_change",
type=float,
default=1e-09,
help="LBFGS tolerance change",
)
optimizer_config.add_argument(
"--lbfgs_history_size", type=int, default=100, help="LBFGS history size"
)
optimizer_config.add_argument(
"--lbfgs_line_search_fn",
type=nullable_string,
default=None,
help="LBFGS line search fn",
)
# RMSprop config
optimizer_config.add_argument(
"--rms_prop_alpha", type=float, default=0.99, help="RMSprop alpha"
)
optimizer_config.add_argument(
"--rms_prop_eps", type=float, default=1e-08, help="RMSprop eps"
)
optimizer_config.add_argument(
"--rms_prop_momentum", type=float, default=0, help="RMSprop momentum"
)
optimizer_config.add_argument(
"--rms_prop_centered", type=str2bool, default=False, help="RMSprop centered"
)
# Rprop config
optimizer_config.add_argument(
"--r_prop_etas", nargs="+", type=float, default=(0.5, 1.2), help="Rprop etas"
)
optimizer_config.add_argument(
"--r_prop_step_sizes",
nargs="+",
type=float,
default=(1e-06, 50),
help="Rprop step sizes",
)
# SGD config
optimizer_config.add_argument(
"--sgd_momentum", type=float, default=0, help="SGD momentum"
)
optimizer_config.add_argument(
"--sgd_dampening", type=float, default=0, help="SGD dampening"
)
optimizer_config.add_argument(
"--sgd_nesterov", type=str2bool, default=False, help="SGD nesterov"
)
# SparseAdam config
optimizer_config.add_argument(
"--sparse_adam_betas",
nargs="+",
type=float,
default=(0.9, 0.999),
help="SparseAdam betas",
)
optimizer_config.add_argument(
"--sparse_adam_eps", type=float, default=1e-08, help="SparseAdam eps"
)
# BertAdam config
optimizer_config.add_argument(
"--bert_adam_betas",
nargs="+",
type=float,
default=(0.9, 0.999),
help="BertAdam betas",
)
optimizer_config.add_argument(
"--bert_adam_eps", type=float, default=1e-08, help="BertAdam eps"
)
# Scheduler configuration
scheduler_config = parser.add_argument_group("Scheduler configuration")
scheduler_config.add_argument(
"--lr_scheduler",
type=nullable_string,
default=None,
choices=[
"linear",
"exponential",
"plateau",
"step",
"multi_step",
"cyclic",
"one_cycle",
"cosine_annealing",
],
help="Learning rate scheduler",
)
scheduler_config.add_argument(
"--lr_scheduler_step_unit",
type=str,
default="batch",
choices=["batch", "epoch"],
help="Learning rate scheduler step unit",
)
scheduler_config.add_argument(
"--lr_scheduler_step_freq",
type=int,
default=1,
help="Learning rate scheduler step freq",
)
scheduler_config.add_argument(
"--warmup_steps", type=float, default=None, help="Warm up steps"
)
scheduler_config.add_argument(
"--warmup_unit",
type=str,
default="batch",
choices=["batch", "epoch"],
help="Warm up unit",
)
scheduler_config.add_argument(
"--warmup_percentage", type=float, default=None, help="Warm up percentage"
)
scheduler_config.add_argument(
"--min_lr", type=float, default=0.0, help="Minimum learning rate"
)
scheduler_config.add_argument(
"--exponential_lr_scheduler_gamma",
type=float,
default=0.9,
help="Gamma for exponential lr scheduler",
)
# ReduceLROnPlateau lr scheduler config
scheduler_config.add_argument(
"--plateau_lr_scheduler_metric",
type=str,
default="model/train/all/loss",
help="Metric of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_mode",
type=str,
default="min",
choices=["min", "max"],
help="Mode of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_factor",
type=float,
default=0.1,
help="Factor of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_patience",
type=int,
default=10,
help="Patience for plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_threshold",
type=float,
default=0.0001,
help="Threshold of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_threshold_mode",
type=str,
default="rel",
choices=["rel", "abs"],
help="Threshold mode of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_cooldown",
type=int,
default=0,
help="Cooldown of plateau lr scheduler",
)
scheduler_config.add_argument(
"--plateau_lr_scheduler_eps",
type=float,
default=0.00000001,
help="Eps of plateau lr scheduler",
)
# Step lr scheduler config
scheduler_config.add_argument(
"--step_lr_scheduler_step_size",
type=int,
default=1,
help="Period of learning rate decay",
)
scheduler_config.add_argument(
"--step_lr_scheduler_gamma",
type=float,
default=0.1,
help="Multiplicative factor of learning rate decay",
)
scheduler_config.add_argument(
"--step_lr_scheduler_last_epoch",
type=int,
default=-1,
help="The index of last epoch",
)
scheduler_config.add_argument(
"--multi_step_lr_scheduler_milestones",
nargs="+",
type=int,
default=[1000],
help="List of epoch indices. Must be increasing.",
)
scheduler_config.add_argument(
"--multi_step_lr_scheduler_gamma",
type=float,
default=0.1,
help="Multiplicative factor of learning rate decay",
)
scheduler_config.add_argument(
"--multi_step_lr_scheduler_last_epoch",
type=int,
default=-1,
help="The index of last epoch",
)
# Cyclic lr scheduler config
scheduler_config.add_argument(
"--cyclic_lr_scheduler_base_lr",
nargs="+",
type=float,
default=0.001,
help="Base lr of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_max_lr",
nargs="+",
type=float,
default=0.1,
help="Max lr of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_step_size_up",
type=int,
default=2000,
help="Step size up of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_step_size_down",
type=nullable_int,
default=None,
help="Step size down of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_mode",
type=nullable_string,
default="triangular",
help="Mode of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_gamma",
type=float,
default=1.0,
help="Gamma of cyclic lr scheduler",
)
# TODO: support cyclic_lr_scheduler_scale_fn
scheduler_config.add_argument(
"--cyclic_lr_scheduler_scale_mode",
type=str,
default="cycle",
choices=["cycle", "iterations"],
help="Scale mode of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_cycle_momentum",
type=str2bool,
default=True,
help="Cycle momentum of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_base_momentum",
nargs="+",
type=float,
default=0.8,
help="Base momentum of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_max_momentum",
nargs="+",
type=float,
default=0.9,
help="Max momentum of cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cyclic_lr_scheduler_last_epoch",
type=int,
default=-1,
help="Last epoch of cyclic lr scheduler",
)
# One cycle lr scheduler config
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_max_lr",
nargs="+",
type=float,
default=0.1,
help="Max lr of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_pct_start",
type=float,
default=0.3,
help="Percentage start of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_anneal_strategy",
type=str,
default="cos",
choices=["cos", "linear"],
help="Anneal strategyr of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_cycle_momentum",
type=str2bool,
default=True,
help="Cycle momentum of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_base_momentum",
nargs="+",
type=float,
default=0.85,
help="Base momentum of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_max_momentum",
nargs="+",
type=float,
default=0.95,
help="Max momentum of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_div_factor",
type=float,
default=25,
help="Div factor of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_final_div_factor",
type=float,
default=1e4,
help="Final div factor of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--one_cycle_lr_scheduler_last_epoch",
type=int,
default=-1,
help="Last epoch of one cyclic lr scheduler",
)
scheduler_config.add_argument(
"--cosine_annealing_lr_scheduler_last_epoch",
type=int,
default=-1,
help="The index of last epoch",
)
scheduler_config.add_argument(
"--task_scheduler",
type=str,
default="round_robin",
# choices=["sequential", "round_robin", "mixed"],
help="Task scheduler",
)
scheduler_config.add_argument(
"--sequential_scheduler_fillup",
type=str2bool,
default=False,
help="Whether fillup in sequential scheduler",
)
scheduler_config.add_argument(
"--round_robin_scheduler_fillup",
type=str2bool,
default=False,
help="whether fillup in round robin scheduler",
)
scheduler_config.add_argument(
"--mixed_scheduler_fillup",
type=str2bool,
default=False,
help="whether fillup in mixed scheduler scheduler",
)
# Logging configuration
logging_config = parser.add_argument_group("Logging configuration")
logging_config.add_argument(
"--counter_unit",
type=str,
default="epoch",
choices=["epoch", "batch"],
help="Logging unit (epoch, batch)",
)
logging_config.add_argument(
"--evaluation_freq", type=float, default=1, help="Logging evaluation frequency"
)
logging_config.add_argument(
"--writer",
type=str,
default="tensorboard",
choices=["json", "tensorboard"],
help="The writer format (json, tensorboard)",
)
logging_config.add_argument(
"--checkpointing",
type=str2bool,
default=False,
help="Whether to checkpoint the model",
)
logging_config.add_argument(
"--checkpoint_path", type=str, default=None, help="Checkpointing path"
)
logging_config.add_argument(
"--checkpoint_freq",
type=int,
default=1,
help="Checkpointing every k logging time",
)
logging_config.add_argument(
"--checkpoint_metric",
type=str2dict,
default={"model/train/all/loss": "min"},
help=(
"Checkpointing metric (metric_name:mode), "
"e.g., `model/train/all/loss:min`"
),
)
logging_config.add_argument(
"--checkpoint_task_metrics",
type=str2dict,
default=None,
help=(
"Task specific checkpointing metric "
"(metric_name1:mode1,metric_name2:mode2)"
),
)
logging_config.add_argument(
"--checkpoint_runway",
type=float,
default=0,
help="Checkpointing runway (no checkpointing before k checkpointing unit)",
)
logging_config.add_argument(
"--clear_intermediate_checkpoints",
type=str2bool,
default=True,
help="Whether to clear intermediate checkpoints",
)
logging_config.add_argument(
"--clear_all_checkpoints",
type=str2bool,
default=False,
help="Whether to clear all checkpoints",
)
return parser
def parse_args_to_config(args: Namespace) -> Dict[str, Any]:
r"""Parse the arguments to config dict
Args:
args(Namespace): The parsed namespace from argument parser.
Returns:
dict: The config dict.
"""
config = {
"meta_config": {
"seed": args.seed,
"verbose": args.verbose,
"log_path": args.log_path,
},
"data_config": {
"min_data_len": args.min_data_len,
"max_data_len": args.max_data_len,
},
"model_config": {
"model_path": args.model_path,
"device": args.device,
"dataparallel": args.dataparallel,
},
"learner_config": {
"fp16": args.fp16,
"n_epochs": args.n_epochs,
"train_split": args.train_split,
"valid_split": args.valid_split,
"test_split": args.test_split,
"ignore_index": args.ignore_index,
"optimizer_config": {
"optimizer": args.optimizer,
"lr": args.lr,
"l2": args.l2,
"grad_clip": args.grad_clip,
"asgd_config": {
"lambd": args.asgd_lambd,
"alpha": args.asgd_alpha,
"t0": args.asgd_t0,
},
"adadelta_config": {"rho": args.adadelta_rho, "eps": args.adadelta_eps},
"adagrad_config": {
"lr_decay": args.adagrad_lr_decay,
"initial_accumulator_value": args.adagrad_initial_accumulator_value,
"eps": args.adagrad_eps,
},
"adam_config": {
"betas": args.adam_betas,
"amsgrad": args.adam_amsgrad,
"eps": args.adam_eps,
},
"adamw_config": {
"betas": args.adamw_betas,
"amsgrad": args.adamw_amsgrad,
"eps": args.adamw_eps,
},
"adamax_config": {"betas": args.adamax_betas, "eps": args.adamax_eps},
"lbfgs_config": {
"max_iter": args.lbfgs_max_iter,
"max_eval": args.lbfgs_max_eval,
"tolerance_grad": args.lbfgs_tolerance_grad,
"tolerance_change": args.lbfgs_tolerance_change,
"history_size": args.lbfgs_history_size,
"line_search_fn": args.lbfgs_line_search_fn,
},
"rms_prop_config": {
"alpha": args.rms_prop_alpha,
"eps": args.rms_prop_eps,
"momentum": args.rms_prop_momentum,
"centered": args.rms_prop_centered,
},
"r_prop_config": {
"etas": args.r_prop_etas,
"step_sizes": args.r_prop_step_sizes,
},
"sgd_config": {
"momentum": args.sgd_momentum,
"dampening": args.sgd_dampening,
"nesterov": args.sgd_nesterov,
},
"sparse_adam_config": {
"betas": args.sparse_adam_betas,
"eps": args.sparse_adam_eps,
},
"bert_adam_config": {
"betas": args.bert_adam_betas,
"eps": args.bert_adam_eps,
},
},
"lr_scheduler_config": {
"lr_scheduler": args.lr_scheduler,
"lr_scheduler_step_unit": args.lr_scheduler_step_unit,
"lr_scheduler_step_freq": args.lr_scheduler_step_freq,
"warmup_steps": args.warmup_steps,
"warmup_unit": args.warmup_unit,
"warmup_percentage": args.warmup_percentage,
"min_lr": args.min_lr,
"exponential_config": {"gamma": args.exponential_lr_scheduler_gamma},
"plateau_config": {
"metric": args.plateau_lr_scheduler_metric,
"mode": args.plateau_lr_scheduler_mode,
"factor": args.plateau_lr_scheduler_factor,
"patience": args.plateau_lr_scheduler_patience,
"threshold": args.plateau_lr_scheduler_threshold,
"threshold_mode": args.plateau_lr_scheduler_threshold_mode,
"cooldown": args.plateau_lr_scheduler_cooldown,
"eps": args.plateau_lr_scheduler_eps,
},
"step_config": {
"step_size": args.step_lr_scheduler_step_size,
"gamma": args.step_lr_scheduler_gamma,
"last_epoch": args.step_lr_scheduler_last_epoch,
},
"multi_step_config": {
"milestones": args.multi_step_lr_scheduler_milestones,
"gamma": args.multi_step_lr_scheduler_gamma,
"last_epoch": args.multi_step_lr_scheduler_last_epoch,
},
"cyclic_config": {
"base_lr": args.cyclic_lr_scheduler_base_lr,
"max_lr": args.cyclic_lr_scheduler_max_lr,
"step_size_up": args.cyclic_lr_scheduler_step_size_up,
"step_size_down": args.cyclic_lr_scheduler_step_size_down,
"mode": args.cyclic_lr_scheduler_mode,
"gamma": args.cyclic_lr_scheduler_gamma,
"scale_fn": None,
"scale_mode": args.cyclic_lr_scheduler_scale_mode,
"cycle_momentum": args.cyclic_lr_scheduler_cycle_momentum,
"base_momentum": args.cyclic_lr_scheduler_base_momentum,
"max_momentum": args.cyclic_lr_scheduler_max_momentum,
"last_epoch": args.cyclic_lr_scheduler_last_epoch,
},
"one_cycle_config": {
"max_lr": args.one_cycle_lr_scheduler_max_lr,
"pct_start": args.one_cycle_lr_scheduler_pct_start,
"anneal_strategy": args.one_cycle_lr_scheduler_anneal_strategy,
"cycle_momentum": args.one_cycle_lr_scheduler_cycle_momentum,
"base_momentum": args.one_cycle_lr_scheduler_base_momentum,
"max_momentum": args.one_cycle_lr_scheduler_max_momentum,
"div_factor": args.one_cycle_lr_scheduler_div_factor,
"final_div_factor": args.one_cycle_lr_scheduler_final_div_factor,
"last_epoch": args.one_cycle_lr_scheduler_last_epoch,
},
"cosine_annealing_config": {
"last_epoch": args.cosine_annealing_lr_scheduler_last_epoch
},
},
"task_scheduler_config": {
"task_scheduler": args.task_scheduler,
"sequential_scheduler_config": {
"fillup": args.sequential_scheduler_fillup
},
"round_robin_scheduler_config": {
"fillup": args.round_robin_scheduler_fillup
},
"mixed_scheduler_config": {"fillup": args.mixed_scheduler_fillup},
},
},
"logging_config": {
"counter_unit": args.counter_unit,
"evaluation_freq": args.evaluation_freq,
"writer_config": {"writer": args.writer, "verbose": True},
"checkpointing": args.checkpointing,
"checkpointer_config": {
"checkpoint_path": args.checkpoint_path,
"checkpoint_freq": args.checkpoint_freq,
"checkpoint_metric": args.checkpoint_metric,
"checkpoint_task_metrics": args.checkpoint_task_metrics,
"checkpoint_runway": args.checkpoint_runway,
"clear_intermediate_checkpoints": args.clear_intermediate_checkpoints,
"clear_all_checkpoints": args.clear_all_checkpoints,
},
},
}
return config
|
py | 1a460a7df3318a421431acb3fa972e09608fd8e2 | #!/usr/bin/env python
import os
import scipy.io as sio
import glob
PYTHON_DIR = os.path.dirname(os.path.realpath(__file__))
DATA_DIR = os.path.join(os.path.dirname(PYTHON_DIR), 'pmtkdataCopy')
def load_mat(matName):
"""look for the .mat file in pmtk3/pmtkdataCopy/
currently only support .mat files create by Matlab 5,6,7~7.2,
"""
try:
data = sio.loadmat(os.path.join(DATA_DIR, matName))
except NotImplementedError:
raise
except FileNotFoundError:
raise
return data
def generate_rst():
"""generate chX.rst in current working directory"""
cwd = os.getcwd()
demo_dir = os.path.join(cwd, 'demos')
chapters = os.listdir(demo_dir)
for chapter in chapters:
if not os.path.isdir(os.path.join(demo_dir, chapter)):
continue
reg_py = os.path.join(demo_dir, chapter, '*.py')
scripts = glob.glob(reg_py)
rst_file = chapter + '.rst'
rst_file = os.path.join(demo_dir, chapter, rst_file)
with open(rst_file, 'w') as f:
f.write(chapter)
f.write('\n========================================\n')
for script in scripts:
script_name = os.path.basename(script)
f.write('\n' + script_name[:-3])
f.write('\n----------------------------------------\n')
reg_png = os.path.join(demo_dir,
chapter,
script_name[:-3] + '*.png')
for img in glob.glob(reg_png):
img_name = os.path.basename(img)
f.write(".. image:: " + img_name + "\n")
f.write(".. literalinclude:: " + script_name + "\n")
if __name__ == '__main__':
generate_rst()
print("Finished generate chX.rst!")
|
py | 1a460ada3f82e90f97730ade91791aadd1243a98 | """Integration tests for auto"""
import socket
from unittest import TestCase
import mock
import pytest
from lexicon.providers.auto import _get_ns_records_domains_for_domain
from lexicon.tests.providers.integration_tests import IntegrationTestsV2
# This fixture ensures to mock _get_ns_records_domains_for_domain, in order to not rely
# on the machine on which the test is done, as this function call nslookup.
# Then it will prevent errors where there is no network or tested domain do not exists anymore.
@pytest.fixture(autouse=True)
def _nslookup_mock(request):
if request.node.name == "test_nslookup_resolution":
# Do not mock for the test that specifically test nslookup resolution.
yield
else:
with mock.patch(
"lexicon.providers.auto._get_ns_records_for_domain",
return_value=["ns.ovh.net"],
) as fixture:
yield fixture
# Guys, are we online ?
def _there_is_no_network():
try:
socket.create_connection(("www.google.com", 80))
return False
except (OSError, IOError):
pass
return True
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class AutoProviderTests(TestCase, IntegrationTestsV2):
"""TestCase for auto"""
provider_name = "auto"
domain = "pacalis.net"
def _filter_headers(self):
return ["X-Ovh-Application", "X-Ovh-Consumer", "X-Ovh-Signature"]
def _test_parameters_overrides(self):
return {"auth_entrypoint": "ovh-eu"}
def _test_fallback_fn(self):
return lambda x: "placeholder_" + x if x != "mapping_override" else None
# Here we do not mock the function _get_ns_records_domains_for_domain
# to effectively test the nslookup call and processing.\
@pytest.mark.skipif(
_there_is_no_network(), reason="No network, no nslookup call possible."
)
def test_nslookup_resolution(self):
"""Ensure that nameservers can be resolved through os nslookup call."""
assert _get_ns_records_domains_for_domain("google.com")
|
py | 1a460adfed2bf845b43805604640feadad396745 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2017-2018 The Astral Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test compact blocks (BIP 152).
Version 1 compact blocks are pre-segwit (txids)
Version 2 compact blocks are post-segwit (wtxids)
"""
from test_framework.mininode import *
from test_framework.test_framework import AstralTestFramework
from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
from test_framework.script import CScript, OP_TRUE
# TestNode: A peer we use to send messages to astrald, and store responses.
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.last_sendcmpct = []
self.block_announced = False
# Store the hashes of blocks we've seen announced.
# This is for synchronizing the p2p message traffic,
# so we can eg wait until a particular block is announced.
self.announced_blockhashes = set()
def on_sendcmpct(self, conn, message):
self.last_sendcmpct.append(message)
def on_cmpctblock(self, conn, message):
self.block_announced = True
self.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
self.announced_blockhashes.add(self.last_message["cmpctblock"].header_and_shortids.header.sha256)
def on_headers(self, conn, message):
self.block_announced = True
for x in self.last_message["headers"].headers:
x.calc_sha256()
self.announced_blockhashes.add(x.sha256)
def on_inv(self, conn, message):
for x in self.last_message["inv"].inv:
if x.type == 2:
self.block_announced = True
self.announced_blockhashes.add(x.hash)
# Requires caller to hold mininode_lock
def received_block_announcement(self):
return self.block_announced
def clear_block_announcement(self):
with mininode_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
self.last_message.pop("cmpctblock", None)
def get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.connection.send_message(msg)
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
def request_headers_and_sync(self, locator, hashstop=0):
self.clear_block_announcement()
self.get_headers(locator, hashstop)
wait_until(self.received_block_announcement, timeout=30, lock=mininode_lock)
self.clear_block_announcement()
# Block until a block announcement for a particular block hash is
# received.
def wait_for_block_announcement(self, block_hash, timeout=30):
def received_hash():
return (block_hash in self.announced_blockhashes)
wait_until(received_hash, timeout=timeout, lock=mininode_lock)
def send_await_disconnect(self, message, timeout=30):
"""Sends a message to the node and wait for disconnect.
This is used when we want to send a message into the node that we expect
will get us disconnected, eg an invalid block."""
self.send_message(message)
wait_until(lambda: not self.connected, timeout=timeout, lock=mininode_lock)
class CompactBlocksTest(AstralTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
# Node0 = pre-segwit, node1 = segwit-aware
self.num_nodes = 2
self.extra_args = [["-vbparams=segwit:0:0"], ["-txindex"]]
self.utxos = []
def build_block_on_tip(self, node, segwit=False):
height = node.getblockcount()
tip = node.getbestblockhash()
mtp = node.getblockheader(tip)['mediantime']
block = create_block(int(tip, 16), create_coinbase(height + 1), mtp + 1)
block.nVersion = 4
if segwit:
add_witness_commitment(block)
block.solve()
return block
# Create 10 more anyone-can-spend utxo's for testing.
def make_utxos(self):
# Doesn't matter which node we use, just use node0.
block = self.build_block_on_tip(self.nodes[0])
self.test_node.send_and_ping(msg_block(block))
assert(int(self.nodes[0].getbestblockhash(), 16) == block.sha256)
self.nodes[0].generate(100)
total_value = block.vtx[0].vout[0].nValue
out_value = total_value // 10
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b''))
for i in range(10):
tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
tx.rehash()
block2 = self.build_block_on_tip(self.nodes[0])
block2.vtx.append(tx)
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.solve()
self.test_node.send_and_ping(msg_block(block2))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)])
return
# Test "sendcmpct" (between peers preferring the same version):
# - No compact block announcements unless sendcmpct is sent.
# - If sendcmpct is sent with version > preferred_version, the message is ignored.
# - If sendcmpct is sent with boolean 0, then block announcements are not
# made with compact blocks.
# - If sendcmpct is then sent with boolean 1, then new block announcements
# are made with compact blocks.
# If old_node is passed in, request compact blocks with version=preferred-1
# and verify that it receives block announcements via compact block.
def test_sendcmpct(self, node, test_node, preferred_version, old_node=None):
# Make sure we get a SENDCMPCT message from our peer
def received_sendcmpct():
return (len(test_node.last_sendcmpct) > 0)
wait_until(received_sendcmpct, timeout=30, lock=mininode_lock)
with mininode_lock:
# Check that the first version received is the preferred one
assert_equal(test_node.last_sendcmpct[0].version, preferred_version)
# And that we receive versions down to 1.
assert_equal(test_node.last_sendcmpct[-1].version, 1)
test_node.last_sendcmpct = []
tip = int(node.getbestblockhash(), 16)
def check_announcement_of_new_block(node, peer, predicate):
peer.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
peer.wait_for_block_announcement(block_hash, timeout=30)
assert(peer.block_announced)
with mininode_lock:
assert predicate(peer), (
"block_hash={!r}, cmpctblock={!r}, inv={!r}".format(
block_hash, peer.last_message.get("cmpctblock", None), peer.last_message.get("inv", None)))
# We shouldn't get any block announcements via cmpctblock yet.
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Try one more time, this time after requesting headers.
test_node.request_headers_and_sync(locator=[tip])
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "inv" in p.last_message)
# Test a few ways of using sendcmpct that should NOT
# result in compact block announcements.
# Before each test, sync the headers chain.
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with too-high version
sendcmpct = msg_sendcmpct()
sendcmpct.version = preferred_version+1
sendcmpct.announce = True
test_node.send_and_ping(sendcmpct)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with valid version, but announce=False
sendcmpct.version = preferred_version
sendcmpct.announce = False
test_node.send_and_ping(sendcmpct)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Finally, try a SENDCMPCT message with announce=True
sendcmpct.version = preferred_version
sendcmpct.announce = True
test_node.send_and_ping(sendcmpct)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time (no headers sync should be needed!)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time, after turning on sendheaders
test_node.send_and_ping(msg_sendheaders())
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time, after sending a version-1, announce=false message.
sendcmpct.version = preferred_version-1
sendcmpct.announce = False
test_node.send_and_ping(sendcmpct)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Now turn off announcements
sendcmpct.version = preferred_version
sendcmpct.announce = False
test_node.send_and_ping(sendcmpct)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "headers" in p.last_message)
if old_node is not None:
# Verify that a peer using an older protocol version can receive
# announcements from this node.
sendcmpct.version = preferred_version-1
sendcmpct.announce = True
old_node.send_and_ping(sendcmpct)
# Header sync
old_node.request_headers_and_sync(locator=[tip])
check_announcement_of_new_block(node, old_node, lambda p: "cmpctblock" in p.last_message)
# This test actually causes astrald to (reasonably!) disconnect us, so do this last.
def test_invalid_cmpctblock_message(self):
self.nodes[0].generate(101)
block = self.build_block_on_tip(self.nodes[0])
cmpct_block = P2PHeaderAndShortIDs()
cmpct_block.header = CBlockHeader(block)
cmpct_block.prefilled_txn_length = 1
# This index will be too high
prefilled_txn = PrefilledTransaction(1, block.vtx[0])
cmpct_block.prefilled_txn = [prefilled_txn]
self.test_node.send_await_disconnect(msg_cmpctblock(cmpct_block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.hashPrevBlock)
# Compare the generated shortids to what we expect based on BIP 152, given
# astrald's choice of nonce.
def test_compactblock_construction(self, node, test_node, version, use_witness_address):
# Generate a bunch of transactions.
node.generate(101)
num_transactions = 25
address = node.getnewaddress()
if use_witness_address:
# Want at least one segwit spend, so move all funds to
# a witness address.
address = node.addwitnessaddress(address)
value_to_send = node.getbalance()
node.sendtoaddress(address, satoshi_round(value_to_send-Decimal(0.1)))
node.generate(1)
segwit_tx_generated = False
for i in range(num_transactions):
txid = node.sendtoaddress(address, 0.1)
hex_tx = node.gettransaction(txid)["hex"]
tx = FromHex(CTransaction(), hex_tx)
if not tx.wit.is_null():
segwit_tx_generated = True
if use_witness_address:
assert(segwit_tx_generated) # check that our test is not broken
# Wait until we've seen the block announcement for the resulting tip
tip = int(node.getbestblockhash(), 16)
test_node.wait_for_block_announcement(tip)
# Make sure we will receive a fast-announce compact block
self.request_cb_announcements(test_node, node, version)
# Now mine a block, and look at the resulting compact block.
test_node.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
# Store the raw block in our internal format.
block = FromHex(CBlock(), node.getblock("%02x" % block_hash, False))
for tx in block.vtx:
tx.calc_sha256()
block.rehash()
# Wait until the block was announced (via compact blocks)
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
# Now fetch and check the compact block
header_and_shortids = None
with mininode_lock:
assert("cmpctblock" in test_node.last_message)
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
# Now fetch the compact block using a normal non-announce getdata
with mininode_lock:
test_node.clear_block_announcement()
inv = CInv(4, block_hash) # 4 == "CompactBlock"
test_node.send_message(msg_getdata([inv]))
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
# Now fetch and check the compact block
header_and_shortids = None
with mininode_lock:
assert("cmpctblock" in test_node.last_message)
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
def check_compactblock_construction_from_block(self, version, header_and_shortids, block_hash, block):
# Check that we got the right block!
header_and_shortids.header.calc_sha256()
assert_equal(header_and_shortids.header.sha256, block_hash)
# Make sure the prefilled_txn appears to have included the coinbase
assert(len(header_and_shortids.prefilled_txn) >= 1)
assert_equal(header_and_shortids.prefilled_txn[0].index, 0)
# Check that all prefilled_txn entries match what's in the block.
for entry in header_and_shortids.prefilled_txn:
entry.tx.calc_sha256()
# This checks the non-witness parts of the tx agree
assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256)
# And this checks the witness
wtxid = entry.tx.calc_sha256(True)
if version == 2:
assert_equal(wtxid, block.vtx[entry.index].calc_sha256(True))
else:
# Shouldn't have received a witness
assert(entry.tx.wit.is_null())
# Check that the cmpctblock message announced all the transactions.
assert_equal(len(header_and_shortids.prefilled_txn) + len(header_and_shortids.shortids), len(block.vtx))
# And now check that all the shortids are as expected as well.
# Determine the siphash keys to use.
[k0, k1] = header_and_shortids.get_siphash_keys()
index = 0
while index < len(block.vtx):
if (len(header_and_shortids.prefilled_txn) > 0 and
header_and_shortids.prefilled_txn[0].index == index):
# Already checked prefilled transactions above
header_and_shortids.prefilled_txn.pop(0)
else:
tx_hash = block.vtx[index].sha256
if version == 2:
tx_hash = block.vtx[index].calc_sha256(True)
shortid = calculate_shortid(k0, k1, tx_hash)
assert_equal(shortid, header_and_shortids.shortids[0])
header_and_shortids.shortids.pop(0)
index += 1
# Test that astrald requests compact blocks when we announce new blocks
# via header or inv, and that responding to getblocktxn causes the block
# to be successfully reconstructed.
# Post-segwit: upgraded nodes would only make this request of cb-version-2,
# NODE_WITNESS peers. Unupgraded nodes would still make this request of
# any cb-version-1-supporting peer.
def test_compactblock_requests(self, node, test_node, version, segwit):
# Try announcing a block with an inv or header, expect a compactblock
# request
for announce in ["inv", "header"]:
block = self.build_block_on_tip(node, segwit=segwit)
with mininode_lock:
test_node.last_message.pop("getdata", None)
if announce == "inv":
test_node.send_message(msg_inv([CInv(2, block.sha256)]))
wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.send_header_for_blocks([block])
else:
test_node.send_header_for_blocks([block])
wait_until(lambda: "getdata" in test_node.last_message, timeout=30, lock=mininode_lock)
assert_equal(len(test_node.last_message["getdata"].inv), 1)
assert_equal(test_node.last_message["getdata"].inv[0].type, 4)
assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
# Send back a compactblock message that omits the coinbase
comp_block = HeaderAndShortIDs()
comp_block.header = CBlockHeader(block)
comp_block.nonce = 0
[k0, k1] = comp_block.get_siphash_keys()
coinbase_hash = block.vtx[0].sha256
if version == 2:
coinbase_hash = block.vtx[0].calc_sha256(True)
comp_block.shortids = [
calculate_shortid(k0, k1, coinbase_hash) ]
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# Expect a getblocktxn message.
with mininode_lock:
assert("getblocktxn" in test_node.last_message)
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [0]) # should be a coinbase request
# Send the coinbase, and verify that the tip advances.
if version == 2:
msg = msg_witness_blocktxn()
else:
msg = msg_blocktxn()
msg.block_transactions.blockhash = block.sha256
msg.block_transactions.transactions = [block.vtx[0]]
test_node.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
# Create a chain of transactions from given utxo, and add to a new block.
def build_block_with_transactions(self, node, utxo, num_transactions):
block = self.build_block_on_tip(node)
for i in range(num_transactions):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE])))
tx.rehash()
utxo = [tx.sha256, 0, tx.vout[0].nValue]
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
return block
# Test that we only receive getblocktxn requests for transactions that the
# node needs, and that responding to them causes the block to be
# reconstructed.
def test_getblocktxn_requests(self, node, test_node, version):
with_witness = (version==2)
def test_getblocktxn_response(compact_block, peer, expected_result):
msg = msg_cmpctblock(compact_block.to_p2p())
peer.send_and_ping(msg)
with mininode_lock:
assert("getblocktxn" in peer.last_message)
absolute_indexes = peer.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, expected_result)
def test_tip_after_message(node, peer, msg, tip):
peer.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), tip)
# First try announcing compactblocks that won't reconstruct, and verify
# that we receive getblocktxn messages back.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [1, 2, 3, 4, 5])
msg_bt = msg_blocktxn()
if with_witness:
msg_bt = msg_witness_blocktxn() # serialize with witnesses
msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[1:])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Now try interspersing the prefilled transactions
comp_block.initialize_from_block(block, prefill_list=[0, 1, 5], use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [2, 3, 4])
msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[2:5])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
# Now try giving one transaction ahead of time.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
test_node.send_and_ping(msg_tx(block.vtx[1]))
assert(block.vtx[1].hash in node.getrawmempool())
# Prefill 4 out of the 6 transactions, and verify that only the one
# that was not in the mempool is requested.
comp_block.initialize_from_block(block, prefill_list=[0, 2, 3, 4], use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [5])
msg_bt.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
# Now provide all transactions to the node before the block is
# announced and verify reconstruction happens immediately.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
for tx in block.vtx[1:]:
test_node.send_message(msg_tx(tx))
test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = node.getrawmempool()
for tx in block.vtx[1:]:
assert(tx.hash in mempool)
# Clear out last request.
with mininode_lock:
test_node.last_message.pop("getblocktxn", None)
# Send compact block
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=with_witness)
test_tip_after_message(node, test_node, msg_cmpctblock(comp_block.to_p2p()), block.sha256)
with mininode_lock:
# Shouldn't have gotten a request for any transaction
assert("getblocktxn" not in test_node.last_message)
# Incorrectly responding to a getblocktxn shouldn't cause the block to be
# permanently failed.
def test_incorrect_blocktxn_response(self, node, test_node, version):
if (len(self.utxos) == 0):
self.make_utxos()
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Relay the first 5 transactions from the block in advance
for tx in block.vtx[1:6]:
test_node.send_message(msg_tx(tx))
test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = node.getrawmempool()
for tx in block.vtx[1:6]:
assert(tx.hash in mempool)
# Send compact block
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=(version == 2))
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
absolute_indexes = []
with mininode_lock:
assert("getblocktxn" in test_node.last_message)
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [6, 7, 8, 9, 10])
# Now give an incorrect response.
# Note that it's possible for astrald to be smart enough to know we're
# lying, since it could check to see if the shortid matches what we're
# sending, and eg disconnect us for misbehavior. If that behavior
# change were made, we could just modify this test by having a
# different peer provide the block further down, so that we're still
# verifying that the block isn't marked bad permanently. This is good
# enough for now.
msg = msg_blocktxn()
if version==2:
msg = msg_witness_blocktxn()
msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]] + block.vtx[7:])
test_node.send_and_ping(msg)
# Tip should not have updated
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# We should receive a getdata request
wait_until(lambda: "getdata" in test_node.last_message, timeout=10, lock=mininode_lock)
assert_equal(len(test_node.last_message["getdata"].inv), 1)
assert(test_node.last_message["getdata"].inv[0].type == 2 or test_node.last_message["getdata"].inv[0].type == 2|MSG_WITNESS_FLAG)
assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
# Deliver the block
if version==2:
test_node.send_and_ping(msg_witness_block(block))
else:
test_node.send_and_ping(msg_block(block))
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def test_getblocktxn_handler(self, node, test_node, version):
# astrald will not send blocktxn responses for blocks whose height is
# more than 10 blocks deep.
MAX_GETBLOCKTXN_DEPTH = 10
chain_height = node.getblockcount()
current_height = chain_height
while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH):
block_hash = node.getblockhash(current_height)
block = FromHex(CBlock(), node.getblock(block_hash, False))
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [])
num_to_request = random.randint(1, len(block.vtx))
msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
test_node.send_message(msg)
wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10, lock=mininode_lock)
[tx.calc_sha256() for tx in block.vtx]
with mininode_lock:
assert_equal(test_node.last_message["blocktxn"].block_transactions.blockhash, int(block_hash, 16))
all_indices = msg.block_txn_request.to_absolute()
for index in all_indices:
tx = test_node.last_message["blocktxn"].block_transactions.transactions.pop(0)
tx.calc_sha256()
assert_equal(tx.sha256, block.vtx[index].sha256)
if version == 1:
# Witnesses should have been stripped
assert(tx.wit.is_null())
else:
# Check that the witness matches
assert_equal(tx.calc_sha256(True), block.vtx[index].calc_sha256(True))
test_node.last_message.pop("blocktxn", None)
current_height -= 1
# Next request should send a full block response, as we're past the
# allowed depth for a blocktxn response.
block_hash = node.getblockhash(current_height)
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0])
with mininode_lock:
test_node.last_message.pop("block", None)
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
with mininode_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(block_hash, 16))
assert "blocktxn" not in test_node.last_message
def test_compactblocks_not_at_tip(self, node, test_node):
# Test that requesting old compactblocks doesn't work.
MAX_CMPCTBLOCK_DEPTH = 5
new_blocks = []
for i in range(MAX_CMPCTBLOCK_DEPTH + 1):
test_node.clear_block_announcement()
new_blocks.append(node.generate(1)[0])
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
node.generate(1)
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
with mininode_lock:
test_node.last_message.pop("block", None)
test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock)
with mininode_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(new_blocks[0], 16))
# Generate an old compactblock, and verify that it's not accepted.
cur_height = node.getblockcount()
hashPrevBlock = int(node.getblockhash(cur_height-5), 16)
block = self.build_block_on_tip(node)
block.hashPrevBlock = hashPrevBlock
block.solve()
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block)
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
tips = node.getchaintips()
found = False
for x in tips:
if x["hash"] == block.hash:
assert_equal(x["status"], "headers-only")
found = True
break
assert(found)
# Requesting this block via getblocktxn should silently fail
# (to avoid fingerprinting attacks).
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
with mininode_lock:
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
with mininode_lock:
assert "blocktxn" not in test_node.last_message
def activate_segwit(self, node):
node.generate(144*3)
assert_equal(get_bip9_status(node, "segwit")["status"], 'active')
def test_end_to_end_block_relay(self, node, listeners):
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
[l.clear_block_announcement() for l in listeners]
# ToHex() won't serialize with witness, but this block has no witnesses
# anyway. TODO: repeat this test with witness tx's to a segwit node.
node.submitblock(ToHex(block))
for l in listeners:
wait_until(lambda: l.received_block_announcement(), timeout=30, lock=mininode_lock)
with mininode_lock:
for l in listeners:
assert "cmpctblock" in l.last_message
l.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
assert_equal(l.last_message["cmpctblock"].header_and_shortids.header.sha256, block.sha256)
# Test that we don't get disconnected if we relay a compact block with valid header,
# but invalid transactions.
def test_invalid_tx_in_compactblock(self, node, test_node, use_segwit):
assert(len(self.utxos))
utxo = self.utxos[0]
block = self.build_block_with_transactions(node, utxo, 5)
del block.vtx[3]
block.hashMerkleRoot = block.calc_merkle_root()
if use_segwit:
# If we're testing with segwit, also drop the coinbase witness,
# but include the witness commitment.
add_witness_commitment(block)
block.vtx[0].wit.vtxinwit = []
block.solve()
# Now send the compact block with all transactions prefilled, and
# verify that we don't get disconnected.
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, prefill_list=[0, 1, 2, 3, 4], use_witness=use_segwit)
msg = msg_cmpctblock(comp_block.to_p2p())
test_node.send_and_ping(msg)
# Check that the tip didn't advance
assert(int(node.getbestblockhash(), 16) is not block.sha256)
test_node.sync_with_ping()
# Helper for enabling cb announcements
# Send the sendcmpct request and sync headers
def request_cb_announcements(self, peer, node, version):
tip = node.getbestblockhash()
peer.get_headers(locator=[int(tip, 16)], hashstop=0)
msg = msg_sendcmpct()
msg.version = version
msg.announce = True
peer.send_and_ping(msg)
def test_compactblock_reconstruction_multiple_peers(self, node, stalling_peer, delivery_peer):
assert(len(self.utxos))
def announce_cmpct_block(node, peer):
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
cmpct_block = HeaderAndShortIDs()
cmpct_block.initialize_from_block(block)
msg = msg_cmpctblock(cmpct_block.to_p2p())
peer.send_and_ping(msg)
with mininode_lock:
assert "getblocktxn" in peer.last_message
return block, cmpct_block
block, cmpct_block = announce_cmpct_block(node, stalling_peer)
for tx in block.vtx[1:]:
delivery_peer.send_message(msg_tx(tx))
delivery_peer.sync_with_ping()
mempool = node.getrawmempool()
for tx in block.vtx[1:]:
assert(tx.hash in mempool)
delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Now test that delivering an invalid compact block won't break relay
block, cmpct_block = announce_cmpct_block(node, stalling_peer)
for tx in block.vtx[1:]:
delivery_peer.send_message(msg_tx(tx))
delivery_peer.sync_with_ping()
cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [ CTxInWitness() ]
cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
cmpct_block.use_witness = True
delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
assert(int(node.getbestblockhash(), 16) != block.sha256)
msg = msg_blocktxn()
msg.block_transactions.blockhash = block.sha256
msg.block_transactions.transactions = block.vtx[1:]
stalling_peer.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode()
self.segwit_node = TestNode()
self.old_node = TestNode() # version 1 peer <--> segwit node
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
self.segwit_node, services=NODE_NETWORK|NODE_WITNESS))
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
self.old_node, services=NODE_NETWORK))
self.test_node.add_connection(connections[0])
self.segwit_node.add_connection(connections[1])
self.old_node.add_connection(connections[2])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
self.test_node.wait_for_verack()
# We will need UTXOs to construct transactions in later tests.
self.make_utxos()
self.log.info("Running tests, pre-segwit activation:")
self.log.info("Testing SENDCMPCT p2p message... ")
self.test_sendcmpct(self.nodes[0], self.test_node, 1)
sync_blocks(self.nodes)
self.test_sendcmpct(self.nodes[1], self.segwit_node, 2, old_node=self.old_node)
sync_blocks(self.nodes)
self.log.info("Testing compactblock construction...")
self.test_compactblock_construction(self.nodes[0], self.test_node, 1, False)
sync_blocks(self.nodes)
self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, False)
sync_blocks(self.nodes)
self.log.info("Testing compactblock requests... ")
self.test_compactblock_requests(self.nodes[0], self.test_node, 1, False)
sync_blocks(self.nodes)
self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, False)
sync_blocks(self.nodes)
self.log.info("Testing getblocktxn requests...")
self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1)
sync_blocks(self.nodes)
self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2)
sync_blocks(self.nodes)
self.log.info("Testing getblocktxn handler...")
self.test_getblocktxn_handler(self.nodes[0], self.test_node, 1)
sync_blocks(self.nodes)
self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2)
self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1)
sync_blocks(self.nodes)
self.log.info("Testing compactblock requests/announcements not at chain tip...")
self.test_compactblocks_not_at_tip(self.nodes[0], self.test_node)
sync_blocks(self.nodes)
self.test_compactblocks_not_at_tip(self.nodes[1], self.segwit_node)
self.test_compactblocks_not_at_tip(self.nodes[1], self.old_node)
sync_blocks(self.nodes)
self.log.info("Testing handling of incorrect blocktxn responses...")
self.test_incorrect_blocktxn_response(self.nodes[0], self.test_node, 1)
sync_blocks(self.nodes)
self.test_incorrect_blocktxn_response(self.nodes[1], self.segwit_node, 2)
sync_blocks(self.nodes)
# End-to-end block relay tests
self.log.info("Testing end-to-end block relay...")
self.request_cb_announcements(self.test_node, self.nodes[0], 1)
self.request_cb_announcements(self.old_node, self.nodes[1], 1)
self.request_cb_announcements(self.segwit_node, self.nodes[1], 2)
self.test_end_to_end_block_relay(self.nodes[0], [self.segwit_node, self.test_node, self.old_node])
self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node])
self.log.info("Testing handling of invalid compact blocks...")
self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False)
self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, False)
self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, False)
self.log.info("Testing reconstructing compact blocks from all peers...")
self.test_compactblock_reconstruction_multiple_peers(self.nodes[1], self.segwit_node, self.old_node)
sync_blocks(self.nodes)
# Advance to segwit activation
self.log.info("Advancing to segwit activation")
self.activate_segwit(self.nodes[1])
self.log.info("Running tests, post-segwit activation...")
self.log.info("Testing compactblock construction...")
self.test_compactblock_construction(self.nodes[1], self.old_node, 1, True)
self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, True)
sync_blocks(self.nodes)
self.log.info("Testing compactblock requests (unupgraded node)... ")
self.test_compactblock_requests(self.nodes[0], self.test_node, 1, True)
self.log.info("Testing getblocktxn requests (unupgraded node)...")
self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1)
# Need to manually sync node0 and node1, because post-segwit activation,
# node1 will not download blocks from node0.
self.log.info("Syncing nodes...")
assert(self.nodes[0].getbestblockhash() != self.nodes[1].getbestblockhash())
while (self.nodes[0].getblockcount() > self.nodes[1].getblockcount()):
block_hash = self.nodes[0].getblockhash(self.nodes[1].getblockcount()+1)
self.nodes[1].submitblock(self.nodes[0].getblock(block_hash, False))
assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
self.log.info("Testing compactblock requests (segwit node)... ")
self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, True)
self.log.info("Testing getblocktxn requests (segwit node)...")
self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2)
sync_blocks(self.nodes)
self.log.info("Testing getblocktxn handler (segwit node should return witnesses)...")
self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2)
self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1)
# Test that if we submitblock to node1, we'll get a compact block
# announcement to all peers.
# (Post-segwit activation, blocks won't propagate from node0 to node1
# automatically, so don't bother testing a block announced to node0.)
self.log.info("Testing end-to-end block relay...")
self.request_cb_announcements(self.test_node, self.nodes[0], 1)
self.request_cb_announcements(self.old_node, self.nodes[1], 1)
self.request_cb_announcements(self.segwit_node, self.nodes[1], 2)
self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node])
self.log.info("Testing handling of invalid compact blocks...")
self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False)
self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, True)
self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, True)
self.log.info("Testing invalid index in cmpctblock message...")
self.test_invalid_cmpctblock_message()
if __name__ == '__main__':
CompactBlocksTest().main()
|
py | 1a460b82eab12c87afe20ffb63926afa500368fb | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
network config setting, will be used in train.py and eval.py
"""
class DataConfig:
"""data config"""
data_vocab_size = 184965
train_num_of_parts = 21
test_num_of_parts = 3
batch_size = 16000
data_field_size = 39
data_format = 1
class ModelConfig:
"""model config"""
batch_size = DataConfig.batch_size
data_field_size = DataConfig.data_field_size
data_vocab_size = DataConfig.data_vocab_size
data_emb_dim = 80
deep_layer_args = [[1024, 512, 256, 128], "relu"]
init_args = [-0.01, 0.01]
weight_bias_init = ['normal', 'normal']
keep_prob = 0.9
class TrainConfig:
"""train config"""
batch_size = DataConfig.batch_size
l2_coef = 8e-5
learning_rate = 5e-4
epsilon = 5e-8
loss_scale = 1024.0
train_epochs = 5
save_checkpoint = True
ckpt_file_name_prefix = "deepfm"
save_checkpoint_steps = 1
keep_checkpoint_max = 50
eval_callback = True
loss_callback = True
|
py | 1a460bcab4af5647d2b60313796f8eb59e7d8574 | import sys
from config_parser.config_parser import ConfigGenerator
from torch_runner.experiment_setup import setup_experiment, load_config, clean_experiment_directory
config, config_object = load_config()
print(config)
clean_experiment_directory(config)
|
py | 1a460c9eb6bfca9541be7e2c6f73586e9654533e | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Binary for running temperature scaling, writing temperature param to disk."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v2 as tf
from uq_benchmark_2019 import array_utils
from uq_benchmark_2019 import calibration_lib
from uq_benchmark_2019 import metrics_lib
from uq_benchmark_2019 import uq_utils
gfile = tf.io.gfile
FLAGS = flags.FLAGS
NUM_EXAMPLES = 20000
def _declare_flags():
"""Declare flags; not invoked when this module is imported as a library."""
flags.DEFINE_string('prediction_path', None, 'Path to predictions file.')
def run(prediction_path):
"""Run temperature scaling."""
stats = array_utils.load_stats_from_tfrecords(prediction_path)
probs = stats['probs'].astype(np.float32)
labels = stats['labels'].astype(np.int32)
if len(labels.shape) > 1:
labels = np.squeeze(labels, -1)
if probs.shape[0] > NUM_EXAMPLES:
probs = probs[:NUM_EXAMPLES, :]
labels = labels[:NUM_EXAMPLES]
probs = metrics_lib.soften_probabilities(probs=probs)
logits = uq_utils.np_inverse_softmax(probs)
temp = calibration_lib.find_scaling_temperature(labels, logits)
with gfile.GFile(
os.path.join(os.path.dirname(prediction_path),
'temperature_hparam.json'), 'w') as fh:
fh.write(json.dumps({'temperature': temp}))
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
tf.enable_v2_behavior()
run(FLAGS.prediction_path)
if __name__ == '__main__':
_declare_flags()
app.run(main)
|
py | 1a460e1d0a17e8d4e0bac02c8eb38aa18e234872 | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class LdpConnectedInterface(Base):
"""LDP Interface level Configuration
The LdpConnectedInterface class encapsulates a list of ldpConnectedInterface resources that are managed by the user.
A list of resources can be retrieved from the server using the LdpConnectedInterface.find() method.
The list can be managed by using the LdpConnectedInterface.add() and LdpConnectedInterface.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'ldpConnectedInterface'
_SDM_ATT_MAP = {
'Active': 'active',
'Authentication': 'authentication',
'BasicHelloInterval': 'basicHelloInterval',
'BasicHoldTime': 'basicHoldTime',
'ConnectedVia': 'connectedVia',
'Count': 'count',
'DescriptiveName': 'descriptiveName',
'EnableBfdRegistration': 'enableBfdRegistration',
'Errors': 'errors',
'LabelSpaceID': 'labelSpaceID',
'LocalRouterID': 'localRouterID',
'MD5Key': 'mD5Key',
'Multiplier': 'multiplier',
'Name': 'name',
'OperationMode': 'operationMode',
'SessionStatus': 'sessionStatus',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
}
def __init__(self, parent):
super(LdpConnectedInterface, self).__init__(parent)
@property
def Active(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Activate/Deactivate Configuration
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
@property
def Authentication(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): The type of cryptographic authentication to be used on this link interface
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Authentication']))
@property
def BasicHelloInterval(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): The number of seconds between this router's Hello packets.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BasicHelloInterval']))
@property
def BasicHoldTime(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Maximum length of time that a sending LSR will retain the record of Hellos sent by the receiving LSR, without receiving another Hello message.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BasicHoldTime']))
@property
def ConnectedVia(self):
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def Count(self):
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DescriptiveName(self):
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def EnableBfdRegistration(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Enable BFD Registration
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableBfdRegistration']))
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def LabelSpaceID(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Identifies the set of labels that will be used. Part of the LDP Identifier.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LabelSpaceID']))
@property
def LocalRouterID(self):
"""
Returns
-------
- list(str): Router ID
"""
return self._get_attribute(self._SDM_ATT_MAP['LocalRouterID'])
@property
def MD5Key(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): A value to be used as the secret MD5 Key.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['MD5Key']))
@property
def Multiplier(self):
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def OperationMode(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): The type of LDP Label Advertisement.
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OperationMode']))
@property
def SessionStatus(self):
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def StackedLayers(self):
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
def update(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Updates ldpConnectedInterface resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Adds a new ldpConnectedInterface resource on the server and adds it to the container.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Returns
-------
- self: This instance with all currently retrieved ldpConnectedInterface resources using find and the newly added ldpConnectedInterface resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained ldpConnectedInterface resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, Count=None, DescriptiveName=None, Errors=None, LocalRouterID=None, Multiplier=None, Name=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None):
"""Finds and retrieves ldpConnectedInterface resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ldpConnectedInterface resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ldpConnectedInterface resources from the server.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- LocalRouterID (list(str)): Router ID
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns
-------
- self: This instance with matching ldpConnectedInterface resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ldpConnectedInterface data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ldpConnectedInterface resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, Active=None, Authentication=None, BasicHelloInterval=None, BasicHoldTime=None, EnableBfdRegistration=None, LabelSpaceID=None, MD5Key=None, OperationMode=None):
"""Base class infrastructure that gets a list of ldpConnectedInterface device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- Authentication (str): optional regex of authentication
- BasicHelloInterval (str): optional regex of basicHelloInterval
- BasicHoldTime (str): optional regex of basicHoldTime
- EnableBfdRegistration (str): optional regex of enableBfdRegistration
- LabelSpaceID (str): optional regex of labelSpaceID
- MD5Key (str): optional regex of mD5Key
- OperationMode (str): optional regex of operationMode
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
def Abort(self, *args, **kwargs):
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
abort(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(SessionIndices=list)
--------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
restartDown(SessionIndices=string)
----------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def ResumeBasicHello(self, *args, **kwargs):
"""Executes the resumeBasicHello operation on the server.
Resume sending LDP Basic Hellos
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
resumeBasicHello(SessionIndices=list)
-------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
resumeBasicHello(SessionIndices=string)
---------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('resumeBasicHello', payload=payload, response_object=None)
def Resumebasichello(self, *args, **kwargs):
"""Executes the resumebasichello operation on the server.
Resume Basic LDP Hello Messages.
resumebasichello(Arg2=list)list
-------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('resumebasichello', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
start(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(SessionIndices=list)
-------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
stop(SessionIndices=string)
---------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def StopBasicHello(self, *args, **kwargs):
"""Executes the stopBasicHello operation on the server.
Stop sending LDP Basic Hellos
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stopBasicHello(SessionIndices=list)
-----------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
stopBasicHello(SessionIndices=string)
-------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stopBasicHello', payload=payload, response_object=None)
def Stopbasichello(self, *args, **kwargs):
"""Executes the stopbasichello operation on the server.
Stops Basic LDP Hello Messages.
stopbasichello(Arg2=list)list
-----------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stopbasichello', payload=payload, response_object=None)
|
py | 1a460e8bdee04ff55da382e7352b1f0536fe214f | #this was initiated by atom(conan)
#partially modified by opkr
import os
import math
from cereal import car, log
from common.params import Params
from selfdrive.car.hyundai.spdcontroller import SpdController
import common.log as trace1
from selfdrive.controls.lib.events import Events
EventName = car.CarEvent.EventName
LaneChangeState = log.LateralPlan.LaneChangeState
class Spdctrl(SpdController):
def __init__(self, CP=None):
super().__init__( CP )
self.cv_Raio = 0.4
self.cv_Dist = -5
self.steer_mode = ""
self.cruise_gap = 0.0
self.cut_in = False
self.map_enable = False
self.map_spdlimit_offset = 0
self.target_speed = 0
self.target_speed_camera = 0
self.target_speed_map = 0.0
self.target_speed_map_counter = 0
self.target_speed_map_counter1 = 0
self.target_speed_map_counter2 = 0
self.hesitant_status = False
self.hesitant_timer = 0
self.map_decel_only = False
self.map_spdlimit_offset = int(Params().get("OpkrSpeedLimitOffset", encoding="utf8"))
self.map_enabled = False
self.second = 0
def update_lead(self, sm, CS, dRel, yRel, vRel, CC):
self.map_decel_only = CS.out.cruiseState.modeSel == 4
plan = sm['longitudinalPlan']
dRele = plan.dRel1 #EON Lead
yRele = plan.yRel1 #EON Lead
vRele = plan.vRel1 * 3.6 + 0.5 #EON Lead
dRelef = plan.dRel2 #EON Lead
yRelef = plan.yRel2 #EON Lead
vRelef = plan.vRel2 * 3.6 + 0.5 #EON Lead
lead2_status = plan.status2
self.second += 1
if self.second > 100:
self.map_enabled = Params().get_bool("OpkrMapEnable")
self.second = 0
if self.map_enabled:
self.target_speed_camera = plan.targetSpeedCamera + round(plan.targetSpeedCamera*0.01*self.map_spdlimit_offset)
else:
self.target_speed_camera = CS.out.safetySign + round(CS.out.safetySign*0.01*self.map_spdlimit_offset)
if self.target_speed_camera <= 29:
self.map_enable = False
self.target_speed = 0
elif self.target_speed_camera > 29 and (plan.onSpeedControl if self.map_enabled else CS.on_speed_control):
self.target_speed = self.target_speed_camera
self.map_enable = True
else:
self.target_speed = 0
self.map_enable = False
lead_set_speed = int(round(self.cruise_set_speed_kph))
lead_wait_cmd = 300
dRel = 150
vRel = 0
dRel2 = 140
vRel2 = 0
#dRel, yRel, vRel = self.get_lead( sm, CS )
if 1 < dRele < 149:
dRel = int(dRele) # dRele(이온 차간간격)값 사용
vRel = int(vRele)
elif 1 < CS.lead_distance < 149:
dRel = int(CS.lead_distance) # CS.lead_distance(레이더 차간간격)값 사용
vRel = int(CS.lead_objspd)
else:
dRel = 150
vRel = 0
if 1 < dRelef < 140:
dRel2 = int(dRelef)
vRel2 = int(vRelef) # for cut-in detection??
dst_lead_distance = int(CS.clu_Vanz*self.cv_Raio) # 기준 유지 거리
dst_lead_distance2 = int(CS.clu_Vanz*0.4) # 기준 유지 거리
if dst_lead_distance > 100:
dst_lead_distance = 100
#elif dst_lead_distance < 15:
#dst_lead_distance = 15
if 1 < dRel < 149: #앞차와의 간격이 150미터 미만이면, 즉 앞차가 인식되면,
self.time_no_lean = 0
d_delta = dRel - dst_lead_distance # d_delta = 앞차간격(이온값) - 유지거리
lead_objspd = vRel # 선행차량 상대속도.
else:
d_delta = 0
lead_objspd = 0
if 1 < dRel2 < 140:
d_delta2 = dRel2 - dst_lead_distance2
else:
d_delta2 = 0
if CS.driverAcc_time and not self.map_decel_only: #운전자가 가속페달 밟으면 크루즈 설정속도를 현재속도+1로 동기화
if int(CS.VSetDis) < int(round(CS.clu_Vanz)) + 1:
lead_set_speed = int(round(CS.clu_Vanz)) + 1
self.seq_step_debug = "운전자가속"
lead_wait_cmd = 10
elif int(round(self.target_speed)) < int(CS.VSetDis) and self.map_enable and ((int(round(self.target_speed)) < int(round(self.cruise_set_speed_kph))) and self.target_speed != 0):
Events().add(EventName.camSpeedDown)
self.seq_step_debug = "맵기반감속"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -1)
elif CC.res_speed != 0 and CC.res_speed < int(CS.VSetDis):
self.seq_step_debug = "RES속도조정"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -1)
# 거리 유지 조건
elif d_delta < 0 or d_delta2 < 0 and not self.map_decel_only: # 기준유지거리(현재속도*0.4)보다 가까이 있게 된 상황
if (int(CS.clu_Vanz)-1) <= int(CS.VSetDis) and dRele - dRelef > 3 and lead2_status:
self.seq_step_debug = "끼어들기감지"
#lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 15, -5)
self.cut_in = True
elif lead_objspd < 0 and self.cut_in == True and (int(CS.clu_Vanz)-6) <= int(CS.VSetDis) and dRele < int(CS.clu_Vanz)*0.25 and int(CS.clu_Vanz) > 80:
self.seq_step_debug = "거리확보3"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -1)
elif lead_objspd < 0 and self.cut_in == True and (int(CS.clu_Vanz)-4) <= int(CS.VSetDis) and dRele < int(CS.clu_Vanz)*0.3 and int(CS.clu_Vanz) > 50:
self.seq_step_debug = "거리확보2"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -1)
elif lead_objspd < 0 and self.cut_in == True and (int(CS.clu_Vanz)-2) <= int(CS.VSetDis) and dRele < int(CS.clu_Vanz)*0.35 and int(CS.clu_Vanz) > 20:
self.seq_step_debug = "거리확보1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -1)
elif lead_objspd <= 0 and self.cut_in == True and (int(CS.clu_Vanz)-3) <= int(CS.VSetDis):
self.seq_step_debug = "끼어들기감속중"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 10, -1)
elif lead_objspd < -30 or (dRel < 60 and CS.clu_Vanz > 60 and lead_objspd < -5) and (int(CS.clu_Vanz)-5) <= int(CS.VSetDis): # 끼어든 차가 급감속 하는 경우
self.seq_step_debug = "기준내,-5"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 15, -5)
self.cut_in = False
elif lead_objspd < -20 or (dRel < 80 and CS.clu_Vanz > 80 and lead_objspd < -5) and (int(CS.clu_Vanz)-4) <= int(CS.VSetDis): # 끼어든 차가 급감속 하는 경우
self.seq_step_debug = "기준내,-4"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 15, -4)
self.cut_in = False
elif lead_objspd < -10 and (int(CS.clu_Vanz)-3) <= int(CS.VSetDis):
self.seq_step_debug = "기준내,-3"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 15, -3)
self.cut_in = False
elif lead_objspd < 0 and (int(CS.clu_Vanz)-1) <= int(CS.VSetDis):
self.seq_step_debug = "기준내,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 35, -1)
self.cut_in = False
elif lead_objspd > 3 and int(CS.clu_Vanz) <= int(CS.VSetDis):
self.seq_step_debug = "기준내,앞차가속"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 60, 1)
self.cut_in = False
elif lead_objspd >= 0 and int(CS.clu_Vanz) <= int(CS.VSetDis):
self.seq_step_debug = "기준내>=0,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 290, -1)
self.cut_in = False
else:
self.seq_step_debug = "거리유지"
self.cut_in = False
# 선행차량이 멀리 있는 상태에서 감속 조건
elif 20 <= dRel < 149 and lead_objspd < -20 and not self.map_decel_only: #정지 차량 및 급감속 차량 발견 시
self.cut_in = False
if dRel >= 50:
self.seq_step_debug = "정차차량 감속"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 8, -10)
elif dRel >= 30:
self.seq_step_debug = "정차차량 감속"
lead_wait_cmd, lead_set_speed = self.get_tm_speed(CS, 20, -10)
elif self.cruise_set_speed_kph > int(round((CS.clu_Vanz))) and not self.map_decel_only: #이온설정속도가 차량속도보다 큰경우
self.cut_in = False
if 10 > dRel > 3 and lead_objspd <= 0 and 1 < int(CS.clu_Vanz) <= 7 and CS.VSetDis < 45 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "출발속도조정"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 7, 5)
elif 20 > dRel > 3 and lead_objspd > 5 and CS.clu_Vanz <= 25 and CS.VSetDis < 55 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "SS>VS,출발"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 110, 1)
#elif lead_objspd > 9 and CS.clu_Vanz > 20 and CS.VSetDis < 45: # 처음출발시 선행차량 급가속할 때 설정속도 많이 업
# self.seq_step_debug = "SS>VS,초가"
# lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 10, 5)
#elif lead_objspd > 8 and CS.clu_Vanz > 45 and CS.VSetDis < 60: # 중간속도에서 선행차량 급가속할 때 설정속도 많이 업
# self.seq_step_debug = "SS>VS,중가"
# lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 15, 5)
#elif lead_objspd > 7 and CS.clu_Vanz > 65 and CS.VSetDis < 80:
# self.seq_step_debug = "SS>VS,종가"
# lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 15, 5)
elif lead_objspd > 0 and int(CS.clu_Vanz//lead_objspd) >= int(CS.VSetDis//lead_objspd) and int(CS.clu_Vanz*0.4) < dRel < 149 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "SS>VS,++1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 15, 1)
elif lead_objspd > 0 and int(CS.clu_Vanz)+lead_objspd >= int(CS.VSetDis) and int(CS.clu_Vanz*0.4) < dRel < 149 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0) and not self.hesitant_status:
self.seq_step_debug = "SS>VS,+1"
if int(CS.VSetDis) > int(CS.clu_Vanz)+14:
self.hesitant_status = True
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 15, 1)
elif CS.clu_Vanz > 80 and lead_objspd < -1 and (int(CS.clu_Vanz)-1) <= int(CS.VSetDis) and int(CS.clu_Vanz) >= dRel*1.7 and 1 < dRel < 149: # 유지거리 범위 외 감속 조건 앞차 감속중 현재속도/2 아래로 거리 좁혀졌을 때 상대속도에 따라 점진적 감소
self.seq_step_debug = "SS>VS,v>80,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, max(15, 50+(lead_objspd*2)), -1)
elif CS.clu_Vanz > 60 and lead_objspd < -1 and (int(CS.clu_Vanz)-1) <= int(CS.VSetDis) and int(CS.clu_Vanz) >= dRel*1.9 and 1 < dRel < 149: # 유지거리 범위 외 감속 조건 앞차 감속중 현재속도/2 아래로 거리 좁혀졌을 때 상대속도에 따라 점진적 감소
self.seq_step_debug = "SS>VS,v>60,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, max(15, 50+(lead_objspd*2)), -1)
elif CS.clu_Vanz > 40 and lead_objspd < -1 and (int(CS.clu_Vanz)-1) <= int(CS.VSetDis) and int(CS.clu_Vanz) >= dRel*2.2 and 1 < dRel < 149: # 유지거리 범위 외 감속 조건 앞차 감속중 현재속도/2 아래로 거리 좁혀졌을 때 상대속도에 따라 점진적 감소
self.seq_step_debug = "SS>VS,v>40,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, max(15, 50+(lead_objspd*2)), -1)
elif 60 > CS.clu_Vanz > 30 and lead_objspd < -1 and (int(CS.clu_Vanz)-1) <= int(CS.VSetDis) and int(CS.clu_Vanz) >= dRel*0.85 and 1 < dRel < 149:
self.seq_step_debug = "SS>VS,60>v>30,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, max(15, 150-(abs(lead_objspd**3))), -1)
elif 7 < int(CS.clu_Vanz) < 30 and lead_objspd < 0 and CS.VSetDis > 30:
self.seq_step_debug = "SS>VS,30이하"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 10, -5)
elif lead_objspd <= 0 and int(CS.clu_Vanz)+5 <= int(CS.VSetDis) and int(CS.clu_Vanz) > 40 and 1 < dRel < 149: # 앞차와 속도 같을 시 현재속도+5으로 크루즈설정속도 유지
self.seq_step_debug = "SS>VS,vRel<=0"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 25, -1)
elif d_delta == 0 and lead_objspd == 0 and int(CS.clu_Vanz//10) >= int(CS.VSetDis//10) and dRel > 149 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "선행차없음"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 15, 5)
elif d_delta == 0 and lead_objspd == 0 and self.cruise_set_speed_kph > int(CS.VSetDis) and int(CS.clu_Vanz//10) >= int(CS.VSetDis//10) and dRel > 149 and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "점진가속"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 30, 1)
elif lead_objspd == 0 and int(CS.clu_Vanz) == 0 and dRel <= 6:
self.seq_step_debug = "출발대기"
else:
self.seq_step_debug = "SS>VS,거리유지"
if self.hesitant_status and self.hesitant_timer > 150:
self.hesitant_status = False
self.hesitant_timer = 0
elif self.hesitant_status:
self.hesitant_timer += 1
elif lead_objspd >= 0 and CS.clu_Vanz >= int(CS.VSetDis) and int(CS.clu_Vanz * 0.5) < dRel < 149 and not self.map_decel_only:
self.cut_in = False
self.seq_step_debug = "속도유지"
elif lead_objspd < 0 and int(CS.clu_Vanz * 0.5) >= dRel > 1 and not self.map_decel_only:
self.cut_in = False
self.seq_step_debug = "일반감속,-1"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 50, -1)
elif self.map_decel_only and self.cruise_set_speed_kph > int(round(CS.VSetDis)) and ((int(round(self.target_speed)) > int(CS.VSetDis) and self.target_speed != 0) or self.target_speed == 0):
self.seq_step_debug = "속도원복"
lead_wait_cmd, lead_set_speed = self.get_tm_speed( CS, 10, 1)
else:
self.cut_in = False
self.seq_step_debug = "속도유지"
return lead_wait_cmd, lead_set_speed
def update_curv(self, CS, sm, curve_speed):
wait_time_cmd = 0
set_speed = self.cruise_set_speed_kph
# 2. 커브 감속.
#if self.cruise_set_speed_kph >= 100:
if CS.out.cruiseState.modeSel == 1 and sm['lateralPlan'].laneChangeState == LaneChangeState.off and not (CS.out.leftBlinker or CS.out.rightBlinker)and not self.map_decel_only:
if curve_speed < 40 and CS.clu_Vanz > 40 and CS.lead_distance >= 15:
set_speed = min(45, self.cruise_set_speed_kph - int(CS.clu_Vanz * 0.25))
self.seq_step_debug = "커브감속-5"
wait_time_cmd = 10
elif curve_speed < 60 and CS.clu_Vanz > 40 and CS.lead_distance >= 15:
set_speed = min(55, self.cruise_set_speed_kph - int(CS.clu_Vanz * 0.2))
self.seq_step_debug = "커브감속-4"
wait_time_cmd = 20
elif curve_speed < 70 and CS.clu_Vanz > 40 and CS.lead_distance >= 15:
set_speed = min(65, self.cruise_set_speed_kph - int(CS.clu_Vanz * 0.15))
self.seq_step_debug = "커브감속-3"
wait_time_cmd = 30
elif curve_speed < 80 and CS.clu_Vanz > 40 and CS.lead_distance >= 15:
set_speed = min(75, self.cruise_set_speed_kph - int(CS.clu_Vanz * 0.1))
self.seq_step_debug = "커브감속-2"
wait_time_cmd = 40
elif curve_speed < 90 and CS.clu_Vanz > 40 and CS.lead_distance >= 15:
set_speed = min(85, self.cruise_set_speed_kph - int(CS.clu_Vanz * 0.05))
self.seq_step_debug = "커브감속-1"
wait_time_cmd = 50
return wait_time_cmd, set_speed
def update_log(self, CS, set_speed, target_set_speed, long_wait_cmd ):
if CS.out.cruiseState.modeSel == 0:
self.steer_mode = "오파모드"
elif CS.out.cruiseState.modeSel == 1:
self.steer_mode = "차간+커브"
elif CS.out.cruiseState.modeSel == 2:
self.steer_mode = "차간ONLY"
elif CS.out.cruiseState.modeSel == 3:
self.steer_mode = "편도1차선"
elif CS.out.cruiseState.modeSel == 4:
self.steer_mode = "맵감속ONLY"
if self.cruise_gap != CS.cruiseGapSet:
self.cruise_gap = CS.cruiseGapSet
str3 = 'MODE={:s} VL={:03.0f}/{:03.0f} TM={:03.0f}/{:03.0f} TS={:03.0f}'.format( self.steer_mode, set_speed, CS.VSetDis, long_wait_cmd, self.long_curv_timer, int(round(self.target_speed)) )
str4 = ' RD=D:{:03.0f}/V:{:03.0f} CG={:1.0f} DG={:s}'.format( CS.lead_distance, CS.lead_objspd, self.cruise_gap, self.seq_step_debug )
str5 = str3 + str4
trace1.printf2( str5 )
|
py | 1a460ed9257a693950b3f81255dd0e6e9d0a8b0e | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
ZMQ example using python3's asyncio
monkycoind should be started with the command line arguments:
monkycoind -testnet -daemon \
-zmqpubrawtx=tcp://127.0.0.1:28332 \
-zmqpubrawblock=tcp://127.0.0.1:28332 \
-zmqpubhashtx=tcp://127.0.0.1:28332 \
-zmqpubhashblock=tcp://127.0.0.1:28332
We use the asyncio library here. `self.handle()` installs itself as a
future at the end of the function. Since it never returns with the event
loop having an empty stack of futures, this creates an infinite loop. An
alternative is to wrap the contents of `handle` inside `while True`.
A blocking example using python 2.7 can be obtained from the git history:
https://github.com/bitcoin/bitcoin/blob/37a7fe9e440b83e2364d5498931253937abe9294/contrib/zmq/zmq_sub.py
"""
import binascii
import asyncio
import zmq
import zmq.asyncio
import signal
import struct
import sys
if (sys.version_info.major, sys.version_info.minor) < (3, 5):
print("This example only works with Python 3.5 and greater")
sys.exit(1)
port = 28332
class ZMQHandler():
def __init__(self):
self.loop = asyncio.get_event_loop()
self.zmqContext = zmq.asyncio.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.setsockopt(zmq.RCVHWM, 0)
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashtx")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx")
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)
async def handle(self) :
msg = await self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
sequence = "Unknown"
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == b"hashblock":
print('- HASH BLOCK ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"hashtx":
print('- HASH TX ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"rawblock":
print('- RAW BLOCK HEADER ('+sequence+') -')
print(binascii.hexlify(body[:80]))
elif topic == b"rawtx":
print('- RAW TX ('+sequence+') -')
print(binascii.hexlify(body))
# schedule ourselves to receive the next message
asyncio.ensure_future(self.handle())
def start(self):
self.loop.add_signal_handler(signal.SIGINT, self.stop)
self.loop.create_task(self.handle())
self.loop.run_forever()
def stop(self):
self.loop.stop()
self.zmqContext.destroy()
daemon = ZMQHandler()
daemon.start()
|
py | 1a460f373ef9c0f642aead62ae7a217d71e6d13f | import os
from tqdm import tqdm
import requests
from bs4 import BeautifulSoup
# get list of download urls from the database
data_url = "https://www1.ncdc.noaa.gov/pub/data/swdi/stormevents/csvfiles/"
r = requests.get(data_url)
soup = BeautifulSoup(r.text, features="html.parser")
urls = [link.get('href') for link in soup.findAll('a')]
# filter url by making sure urls are not directories or query links
urls = [url for url in urls if "/" not in url.split(".")[-1] and "?" not in url]
for i in urls:
print(i)
if not os.path.isdir("data"):
os.mkdir("data")
if not os.path.isdir("data"):
os.mkdir("data")
for url in tqdm(urls):
os.system(f"cd data && wget {data_url}{url} > /dev/null 2>&1")
os.system("gunzip data/*.gz") |
py | 1a460f400222d2d362236cd7c6e888ac26a6d472 | # -*- coding: utf-8 -*-
"""
Not so simple tkinter based gui around the pdf2xlsx.do_it function.
"""
from tkinter import Tk, ttk, filedialog, messagebox, StringVar, Toplevel, END
import os
import shutil
from .managment import do_it, do_it2
from .config import config
__version__ = '0.2.0'
class ConfOption:
"""
This widget is used to place the configuration options to the ConfigWindow. It contains
a label to show what is the configuration and an entry with StringVar to provide override
possibility. The value of the config :class:`JsonDict` is converted to a string for the entry.
If the value of a configuration is a list, it is converted to a comma separated string.
:param Frame root: Tk parent frame
:param str key: Key to the "config" :class:`JsonDict`
:param int row: Parameter for grid window manager
"""
def __init__(self, root, key, row):
self.key = key
dict_value = config[key]
ttk.Label(root, text=dict_value['text']).grid(row=row, column=0, sticky='w')
self.sv = StringVar()
if isinstance(dict_value['value'], list):
self.sv.set(", ".join(map(str, dict_value['value'])))
else:
self.sv.set(str(dict_value['value']))
self.entry = ttk.Entry(root, textvariable=self.sv, width=54)
self.entry.grid(row=row, column=1, sticky='e')
if dict_value['conf_method'] == 'filedialog':
ttk.Button(root, text='Sel',
command=self.browse_callback,
width=4).grid(row=row, column=2, sticky='w')
def update_config(self):
"""
Write the current entry value to the configuration. The original type of the
config value is checked, and the string is converted to this value (int, list of
int, list of string...)
"""
if isinstance(config[self.key]['value'], list):
if isinstance(config[self.key]['value'][0], int):
config[self.key]['value'] = list(map(int, self.sv.get().split(', ')))
else:
config[self.key]['value'] = self.sv.get().split(', ')
elif isinstance(config[self.key]['value'], int):
config[self.key]['value'] = int(self.sv.get())
else:
config[self.key]['value'] = self.sv.get()
def browse_callback(self):
"""
Asks for the source zip file, the opened dialog filters for zip files by default
The src_entry attribute is updated based on selection
"""
path = filedialog.askopenfilename(initialdir='.\\',
title="Choose file...",)
self.entry.delete(0, END)
self.entry.insert(0, path)
class ConfigWindow:
"""
Sub window for settings. The window is hidden by default, when the user clicks to the settings
button it is activated. It contains the configuration options.
There are two buttons the Save ( which hides the window ), and the Accept, both of them updates
the configuration file. The configuration items are stored in a list.
:param master: Tk parent class
"""
def __init__(self, master):
self.master = master
self.window = Toplevel(self.master)
self.window.resizable(False, False)
self.window.withdraw()
self.window.protocol("WM_DELETE_WINDOW", self._on_closing)
self.window.title('Settings...')
self.conf_list = []
self.main_frame = ttk.Frame(self.window)
self.main_frame.pack(padx=5, pady=5)
self.main_frame.grid_columnconfigure(1, minsize=20, weight=1)
ttk.Label(self.main_frame, text='Configuration:').grid(row=0, column=0,
columnspan=2, sticky='w')
row = 1
for conf_element in config:
if config[conf_element]['Display'] is True:
self.conf_list.append(
ConfOption(root=self.main_frame, key=conf_element, row=row))
row += 1
ttk.Button(self.main_frame, text='Save',
command=self.save_callback).grid(row=row, column=0, sticky='e')
ttk.Button(self.main_frame, text='Accept',
command=self.accept_callback).grid(row=row, column=1, sticky='w')
def save_callback(self):
"""
Hides the ConfigWindow and updates and stores the configuration
"""
self.window.withdraw()
self.accept_callback()
def accept_callback(self):
"""
Goes through on every configuration item and updates them one by one. Stores the updated
configuration.
"""
for conf in self.conf_list:
conf.update_config()
config.store()
def _on_closing(self):
self.window.withdraw()
class PdfXlsxGui:
"""
Simple GUI which lets the user select the source file zip and the destination directory
for the xlsx file. Contains a file dialog for selecting the zip file to work with.
There is a button to start the conversion, and also a Settings button to open the
settings window
:param master: Tk parent class
"""
def __init__(self, master):
self.master = master
self.master.title('Convert Zip -> Pdf -> Xlsx')
self.master.resizable(False, False)
self.main_frame = ttk.Frame(self.master)
self.main_frame.pack(padx=5, pady=5)
self.option_list = ["zip pdf to xlsx", "order details"]
self.selected_task = StringVar(self.main_frame)
self.selected_task.set("zip Pdf to xlsx") # default value
self.box = ttk.Combobox(self.main_frame, textvariable=self.selected_task, values=self.option_list)
self.box.bind("<<ComboboxSelected>>", self.update_task)
self.box.grid(row=0, column=0, columnspan=2)
self.task_do = self.process_pdf
ttk.Label(self.main_frame, text='Source File:').grid(row=1, column=0, sticky='w')
self.src_entry = ttk.Entry(self.main_frame, width=54)
self.src_entry.grid(row=1, column=0, sticky='e')
self.src_entry.insert(0, '.\\src.zip')
ttk.Button(self.main_frame, text='Browse...',
command=self.browse_src_callback).grid(row=1, column=1, sticky='w')
ttk.Button(self.main_frame, text='Start conversion',
command=self.execute_task).grid(row=5, column=0, sticky='w')
ttk.Button(self.main_frame, text='Settings',
command=self.config_callback).grid(row=5, column=1, columnspan=1, sticky='e')
self.config_window = ConfigWindow(self.master)
self.filetypes = (("zip files", "*.zip"), ("all files", "*.*"))
def update_task(self, event):
print(event.widget.get())
if event.widget.get() == self.option_list[0]:
self.task_do = self.process_pdf
self.filetypes = (("zip files", "*.zip"), ("all files", "*.*"))
elif event.widget.get() == self.option_list[1]:
self.task_do = self.convert_xlsx
self.filetypes = (("xlsx files", "*.xlsx"), ("all files", "*.*"))
else:
self.task_do = self.unknown_task
def config_callback(self):
"""
Bring the configuration window up
"""
self.config_window.window.state('normal')
self.config_window.window.lift(self.master)
def browse_src_callback(self):
"""
Asks for the source zip file, the opened dialog filters for zip files by default
The src_entry attribute is updated based on selection
"""
path = filedialog.askopenfilename(initialdir=config['last_path']['value'],
title="Choose the Zip file...",
filetypes=self.filetypes)
config['last_path']['value'] = os.path.dirname(path)
config.store()
self.src_entry.delete(0, END)
self.src_entry.insert(0, path)
def execute_task(self):
self.task_do()
def process_pdf(self):
"""
Facade for the do_it function. Only the src file and destination dir is updated
the other parameters are left for defaults.
"""
try:
logger = do_it(src_name=self.src_entry.get(),
dst_dir=config['tmp_dir']['value'],
xlsx_name=config['xlsx_name']['value'],
tmp_dir=config['tmp_dir']['value'],
file_extension=config['file_extension']['value'])
# tmp_str = '{1} Invoices were found with the following number of Entries:\n{0!s}'
# messagebox.showinfo(title='Conversion Completed',
# message=tmp_str.format(logger, len(logger.invo_list)))
except PermissionError as exc:
messagebox.showerror('Exception', exc)
def convert_xlsx(self):
print("Convert those xlsx: {}".format(self.box.get()))
try:
logger = do_it2(src_name=self.src_entry.get(),
dst_dir=config['tmp_dir']['value'],
xlsx_name=config['xlsx_name']['value'],
tmp_dir=config['tmp_dir']['value'])
# tmp_str = '{1} Invoices were found with the following number of Entries:\n{0!s}'
# messagebox.showinfo(title='Conversion Completed',
# message=tmp_str.format(logger, len(logger.invo_list)))
except PermissionError as exc:
messagebox.showerror('Exception', exc)
def unknown_task(self):
print("Unknown task selected: {}".format(self.box.get()))
def main():
root = Tk()
def _post_clean_up():
try:
shutil.rmtree(config['tmp_dir']['value'])
except FileNotFoundError:
print("You did nothing, you dummy, why did you start me up???")
finally:
root.destroy()
root.protocol("WM_DELETE_WINDOW", _post_clean_up)
gui = PdfXlsxGui(root)
root.mainloop()
if __name__ == '__main__':
main()
|
py | 1a460f712706b36a979270ce15a7161d762c9459 | import tensorflow as tf
from ...utils.masking.gen_mask_tf import gen_mask_tf
from ...utils.multicoil.smap_extract import extract_smaps
from ....models.utils.fourier import tf_unmasked_adj_op
def generic_from_kspace_to_masked_kspace_and_mask(AF=4, scale_factor=1, parallel=True):
def from_kspace_to_masked_kspace_and_mask(images, kspaces):
mask = gen_mask_tf(kspaces, accel_factor=AF, multicoil=not parallel)
if parallel:
images = tf.abs(tf_unmasked_adj_op(kspaces[..., None]))[..., 0]
else:
smaps = extract_smaps(kspaces, low_freq_percentage=AF)
kspaces_masked = tf.cast(mask, kspaces.dtype) * kspaces
kspaces_scaled = kspaces_masked * scale_factor
images_scaled = images * scale_factor
kspaces_channeled = kspaces_scaled[..., None]
images_channeled = images_scaled[..., None]
if parallel:
return (kspaces_channeled, mask), images_channeled
else:
return (kspaces_channeled, mask, smaps), images_channeled
return from_kspace_to_masked_kspace_and_mask
# TODO: adapt to multicoil
# def generic_prepare_mask_and_kspace(scale_factor=1):
# def prepare(mask, kspaces):
# shape = tf.shape(kspaces)
# num_cols = shape[-1]
# mask_shape = tf.ones_like(shape)
# # TODO: this could be refactored with gen_mask_tf
# final_mask_shape = tf.concat([
# mask_shape[:2],
# tf.expand_dims(num_cols, axis=0),
# ], axis=0)
# final_mask_reshaped = tf.reshape(mask, final_mask_shape)
# fourier_mask = tf.tile(final_mask_reshaped, [shape[0], shape[1], 1])
# fourier_mask = tf.dtypes.cast(fourier_mask, 'complex64')
# kspaces_scaled = kspaces * scale_factor
# kspaces_channeled = kspaces_scaled[..., None]
# return kspaces_channeled, fourier_mask
# return prepare
|
py | 1a46107452bcb73afd43cead8a457dc1579fbfaa | """Home Assistant control object."""
import asyncio
from ipaddress import IPv4Address
import logging
from pathlib import Path
import shutil
import tarfile
from tempfile import TemporaryDirectory
from typing import Optional
from uuid import UUID
from awesomeversion import AwesomeVersion, AwesomeVersionException
from securetar import atomic_contents_add, secure_path
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..const import (
ATTR_ACCESS_TOKEN,
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_BOOT,
ATTR_IMAGE,
ATTR_PORT,
ATTR_REFRESH_TOKEN,
ATTR_SSL,
ATTR_TYPE,
ATTR_UUID,
ATTR_VERSION,
ATTR_WAIT_BOOT,
ATTR_WATCHDOG,
FILE_HASSIO_HOMEASSISTANT,
BusEvent,
)
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
ConfigurationFileError,
HomeAssistantError,
HomeAssistantWSError,
)
from ..hardware.const import PolicyGroup
from ..hardware.data import Device
from ..jobs.decorator import Job
from ..utils import remove_folder
from ..utils.common import FileConfiguration
from ..utils.json import read_json_file, write_json_file
from .api import HomeAssistantAPI
from .const import WSType
from .core import HomeAssistantCore
from .secrets import HomeAssistantSecrets
from .validate import SCHEMA_HASS_CONFIG
from .websocket import HomeAssistantWebSocket
_LOGGER: logging.Logger = logging.getLogger(__name__)
HOMEASSISTANT_BACKUP_EXCLUDE = [
"*.db-shm",
"*.corrupt.*",
"__pycache__/*",
"*.log",
"*.log.*",
"OZW_Log.txt",
]
class HomeAssistant(FileConfiguration, CoreSysAttributes):
"""Home Assistant core object for handle it."""
def __init__(self, coresys: CoreSys):
"""Initialize Home Assistant object."""
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
self.coresys: CoreSys = coresys
self._api: HomeAssistantAPI = HomeAssistantAPI(coresys)
self._websocket: HomeAssistantWebSocket = HomeAssistantWebSocket(coresys)
self._core: HomeAssistantCore = HomeAssistantCore(coresys)
self._secrets: HomeAssistantSecrets = HomeAssistantSecrets(coresys)
@property
def api(self) -> HomeAssistantAPI:
"""Return API handler for core."""
return self._api
@property
def websocket(self) -> HomeAssistantWebSocket:
"""Return Websocket handler for core."""
return self._websocket
@property
def core(self) -> HomeAssistantCore:
"""Return Core handler for docker."""
return self._core
@property
def secrets(self) -> HomeAssistantSecrets:
"""Return Secrets Manager for core."""
return self._secrets
@property
def machine(self) -> str:
"""Return the system machines."""
return self.core.instance.machine
@property
def arch(self) -> str:
"""Return arch of running Home Assistant."""
return self.core.instance.arch
@property
def error_state(self) -> bool:
"""Return True if system is in error."""
return self.core.error_state
@property
def ip_address(self) -> IPv4Address:
"""Return IP of Home Assistant instance."""
return self.core.instance.ip_address
@property
def api_port(self) -> int:
"""Return network port to Home Assistant instance."""
return self._data[ATTR_PORT]
@api_port.setter
def api_port(self, value: int) -> None:
"""Set network port for Home Assistant instance."""
self._data[ATTR_PORT] = value
@property
def api_ssl(self) -> bool:
"""Return if we need ssl to Home Assistant instance."""
return self._data[ATTR_SSL]
@api_ssl.setter
def api_ssl(self, value: bool):
"""Set SSL for Home Assistant instance."""
self._data[ATTR_SSL] = value
@property
def api_url(self) -> str:
"""Return API url to Home Assistant."""
return (
f"{'https' if self.api_ssl else 'http'}://{self.ip_address}:{self.api_port}"
)
@property
def ws_url(self) -> str:
"""Return API url to Home Assistant."""
return f"{'wss' if self.api_ssl else 'ws'}://{self.ip_address}:{self.api_port}/api/websocket"
@property
def watchdog(self) -> bool:
"""Return True if the watchdog should protect Home Assistant."""
return self._data[ATTR_WATCHDOG]
@watchdog.setter
def watchdog(self, value: bool):
"""Return True if the watchdog should protect Home Assistant."""
self._data[ATTR_WATCHDOG] = value
@property
def wait_boot(self) -> int:
"""Return time to wait for Home Assistant startup."""
return self._data[ATTR_WAIT_BOOT]
@wait_boot.setter
def wait_boot(self, value: int):
"""Set time to wait for Home Assistant startup."""
self._data[ATTR_WAIT_BOOT] = value
@property
def latest_version(self) -> Optional[AwesomeVersion]:
"""Return last available version of Home Assistant."""
return self.sys_updater.version_homeassistant
@property
def image(self) -> str:
"""Return image name of the Home Assistant container."""
if self._data.get(ATTR_IMAGE):
return self._data[ATTR_IMAGE]
return f"ghcr.io/home-assistant/{self.sys_machine}-homeassistant"
@image.setter
def image(self, value: Optional[str]) -> None:
"""Set image name of Home Assistant container."""
self._data[ATTR_IMAGE] = value
@property
def version(self) -> Optional[AwesomeVersion]:
"""Return version of local version."""
return self._data.get(ATTR_VERSION)
@version.setter
def version(self, value: AwesomeVersion) -> None:
"""Set installed version."""
self._data[ATTR_VERSION] = value
@property
def boot(self) -> bool:
"""Return True if Home Assistant boot is enabled."""
return self._data[ATTR_BOOT]
@boot.setter
def boot(self, value: bool):
"""Set Home Assistant boot options."""
self._data[ATTR_BOOT] = value
@property
def uuid(self) -> UUID:
"""Return a UUID of this Home Assistant instance."""
return self._data[ATTR_UUID]
@property
def supervisor_token(self) -> Optional[str]:
"""Return an access token for the Supervisor API."""
return self._data.get(ATTR_ACCESS_TOKEN)
@supervisor_token.setter
def supervisor_token(self, value: str) -> None:
"""Set the access token for the Supervisor API."""
self._data[ATTR_ACCESS_TOKEN] = value
@property
def refresh_token(self) -> Optional[str]:
"""Return the refresh token to authenticate with Home Assistant."""
return self._data.get(ATTR_REFRESH_TOKEN)
@refresh_token.setter
def refresh_token(self, value: Optional[str]):
"""Set Home Assistant refresh_token."""
self._data[ATTR_REFRESH_TOKEN] = value
@property
def path_pulse(self):
"""Return path to asound config."""
return Path(self.sys_config.path_tmp, "homeassistant_pulse")
@property
def path_extern_pulse(self):
"""Return path to asound config for Docker."""
return Path(self.sys_config.path_extern_tmp, "homeassistant_pulse")
@property
def audio_output(self) -> Optional[str]:
"""Return a pulse profile for output or None."""
return self._data[ATTR_AUDIO_OUTPUT]
@audio_output.setter
def audio_output(self, value: Optional[str]):
"""Set audio output profile settings."""
self._data[ATTR_AUDIO_OUTPUT] = value
@property
def audio_input(self) -> Optional[str]:
"""Return pulse profile for input or None."""
return self._data[ATTR_AUDIO_INPUT]
@audio_input.setter
def audio_input(self, value: Optional[str]):
"""Set audio input settings."""
self._data[ATTR_AUDIO_INPUT] = value
@property
def need_update(self) -> bool:
"""Return true if a Home Assistant update is available."""
try:
return self.version < self.latest_version
except (AwesomeVersionException, TypeError):
return False
async def load(self) -> None:
"""Prepare Home Assistant object."""
await asyncio.wait([self.secrets.load(), self.core.load()])
# Register for events
self.sys_bus.register_event(BusEvent.HARDWARE_NEW_DEVICE, self._hardware_events)
def write_pulse(self):
"""Write asound config to file and return True on success."""
pulse_config = self.sys_plugins.audio.pulse_client(
input_profile=self.audio_input, output_profile=self.audio_output
)
# Cleanup wrong maps
if self.path_pulse.is_dir():
shutil.rmtree(self.path_pulse, ignore_errors=True)
# Write pulse config
try:
self.path_pulse.write_text(pulse_config, encoding="utf-8")
except OSError as err:
_LOGGER.error("Home Assistant can't write pulse/client.config: %s", err)
else:
_LOGGER.info("Update pulse/client.config: %s", self.path_pulse)
async def _hardware_events(self, device: Device) -> None:
"""Process hardware requests."""
if (
not self.sys_hardware.policy.is_match_cgroup(PolicyGroup.UART, device)
or not self.version
or self.version < "2021.9.0"
):
return
configuration = await self.sys_homeassistant.websocket.async_send_command(
{ATTR_TYPE: "get_config"}
)
if not configuration or "usb" not in configuration.get("components", []):
return
self.sys_homeassistant.websocket.send_message({ATTR_TYPE: "usb/scan"})
@Job()
async def backup(self, tar_file: tarfile.TarFile) -> None:
"""Backup Home Assistant Core config/ directory."""
# Let Home Assistant Core know we are about to backup
try:
await self.websocket.async_send_command({ATTR_TYPE: WSType.BACKUP_START})
except HomeAssistantWSError:
_LOGGER.warning(
"Preparing backup of Home Assistant Core failed. Check HA Core logs."
)
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
# Store local configs/state
try:
write_json_file(temp_path.joinpath("homeassistant.json"), self._data)
except ConfigurationFileError as err:
raise HomeAssistantError(
f"Can't save meta for Home Assistant Core: {err!s}", _LOGGER.error
) from err
# Backup data config folder
def _write_tarfile():
with tar_file as backup:
# Backup metadata
backup.add(temp, arcname=".")
# Backup data
atomic_contents_add(
backup,
self.sys_config.path_homeassistant,
excludes=HOMEASSISTANT_BACKUP_EXCLUDE,
arcname="data",
)
try:
_LOGGER.info("Backing up Home Assistant Core config folder")
await self.sys_run_in_executor(_write_tarfile)
_LOGGER.info("Backup Home Assistant Core config folder done")
finally:
try:
await self.sys_homeassistant.websocket.async_send_command(
{ATTR_TYPE: WSType.BACKUP_END}
)
except HomeAssistantWSError:
_LOGGER.warning(
"Error during Home Assistant Core backup. Check HA Core logs."
)
async def restore(self, tar_file: tarfile.TarFile) -> None:
"""Restore Home Assistant Core config/ directory."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
temp_data = temp_path.joinpath("data")
temp_meta = temp_path.joinpath("homeassistant.json")
# extract backup
def _extract_tarfile():
"""Extract tar backup."""
with tar_file as backup:
backup.extractall(path=temp_path, members=secure_path(backup))
try:
await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
raise HomeAssistantError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
# Check old backup format v1
if not temp_data.exists():
temp_data = temp_path
# Restore data
def _restore_data():
"""Restore data."""
shutil.copytree(
temp_data, self.sys_config.path_homeassistant, symlinks=True
)
_LOGGER.info("Restore Home Assistant Core config folder")
await remove_folder(self.sys_config.path_homeassistant)
try:
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
raise HomeAssistantError(
f"Can't restore origin data: {err}", _LOGGER.error
) from err
_LOGGER.info("Restore Home Assistant Core config folder done")
if not temp_meta.exists():
return
_LOGGER.info("Restore Home Assistant Core metadata")
# Read backup data
try:
data = read_json_file(temp_meta)
except ConfigurationFileError as err:
raise HomeAssistantError() from err
# Validate
try:
data = SCHEMA_HASS_CONFIG(data)
except vol.Invalid as err:
raise HomeAssistantError(
f"Can't validate backup data: {humanize_error(data, err)}",
_LOGGER.err,
) from err
# Restore metadata
for attr in (
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_PORT,
ATTR_SSL,
ATTR_REFRESH_TOKEN,
ATTR_WATCHDOG,
ATTR_WAIT_BOOT,
):
self._data[attr] = data[attr]
|
py | 1a46117fc3a47f314d3c3b86fc213f29313e8706 | # Ana 1.
class bankAccount():
def __init__(self, ownerName, balance):
self.ownerName = ownerName
self.balance = balance
def bankAccountDetails(self):
print("Account Holder :", self.ownerName)
print("Available Balance :", self.balance)
def deposit(self):
depositMoney = int(input("Enter amount to be deposited : "))
self.balance += depositMoney
print("Available Net Balance :", self.balance)
def withdraw(self):
withdrawMoney = int(input("Enter amount to be Withdrawn : "))
if self.balance >= withdrawMoney:
self.balance -= withdrawMoney
print("Withdrawn Money :", withdrawMoney)
print("Avalable Balance :", self.balance)
print("Transaction Successful !!!")
else :
print("Insufficient Balance")
def bankingServices(self):
transaction = "n"
cashDeposit = "n"
cashWithdraw = "n"
transaction = input("Start the transaction [Y/N] - ")
while transaction.lower() == "y":
while cashDeposit.lower() != "y":
details.deposit()
cashDeposit = input("End the transaction [Y/N] - ")
while cashWithdraw.lower() != "y":
details.withdraw()
cashWithdraw = input("End the transaction [Y/N] - ")
print("Thankyou for using our banking services")
details = bankAccount("Abhi",5000)
details.bankAccountDetails()
details.deposit()
details.withdraw()
details.bankingServices()
# Ans 2.
import math
class cone(parameters):
def __init__(self, radius, height):
parameters.__init__(self, "Cone")
self.radius = radius
self.height = height
def volume(self):
print("Volume of cone :", math.pi * (self.radius * self.radius) * self.height // 3)
def surfaceArea(self):
print("Surface Area of Cone :", math.pi * self.radius * math.sqrt(self.radius * self.radius + self.height * self.height))
abc = cone(5,10)
abc.volume()
abc.surfaceArea() |
py | 1a4611864775b27bf676ea962a726255df8afc96 | import unittest
import nideconv
import numpy as np
from scipy import signal
def double_gamma_with_d(x, a1=6, a2=12, b1=0.9, b2=0.9, c=0.35, d1=5.4, d2=10.8):
return (x/(d1))**a1 * np.exp(-(x-d1)/b1) - c*(x/(d2))**a2 * np.exp(-(x-d2)/b2)
class ResponseFytterTest(unittest.TestCase):
"""Tests for ResponseFytter"""
def create_signals(self,
signal_sample_frequency=4,
event_1_gain=1,
event_2_gain=1,
event_1_sd=0,
event_2_sd=0,
noise_gain=1.5,
deconv_sample_frequency=4,
deconvolution_interval=[-5, 25]):
"""creates signals to be used for the deconvolution of
2 specific impulse response shapes, with covariates.
It's supposed to create a signal that's long enough to
result in testable outcomes even with moderate
amounts of noise.
"""
self.signal_sample_frequency = signal_sample_frequency
# deconvolution parameters
self.deconvolution_interval = deconvolution_interval
# create some exponentially distributed random ISI events (Dale, 1999)
# of which we will create and deconvolve responses.
period_durs = np.random.gamma(4.0, 8, size=1000)
events = period_durs.cumsum()
self.events_1, self.events_2 = events[0::2], events[1::2]
self.durations_1, self.durations_2 = np.ones(self.events_1.shape[0])/signal_sample_frequency, \
np.ones(self.events_2.shape[0])/signal_sample_frequency
#self.durations_1 -= 1e-5
#self.durations_2 -= 1e-5
#self.durations_1, self.durations_2 = None, None
# these events are scaled with their own underlying covariate.
# for instance, you could have a model-based variable that scales the signal on a per-trial basis.
self.events_gains_1 = event_1_gain * np.ones(len(self.events_1)) + \
np.random.randn(len(self.events_1)) * event_1_sd
self.events_gains_2 = event_2_gain * np.ones(len(self.events_2)) + \
np.random.randn(len(self.events_2)) * event_2_sd
times = np.arange(0, events.max()+45.0, 1.0 /
self.signal_sample_frequency)
event_1_in_times = np.array([((times > te) * (times < te+d)) * eg
for te, d, eg in zip(self.events_1, self.durations_1, self.events_gains_1)]).sum(axis=0)
event_2_in_times = np.array([((times > te) * (times < te+d)) * eg
for te, d, eg in zip(self.events_2, self.durations_2, self.events_gains_2)]).sum(axis=0)
# create hrfs
time_points_hrf = np.arange(0, 20, 1.0/self.signal_sample_frequency)
self.hrf_1 = double_gamma_with_d(
time_points_hrf, a1=4.5, a2=10, d1=7.0, d2=10.0)
self.hrf_2 = double_gamma_with_d(
time_points_hrf, a1=1.5, a2=10, d1=5.0, d2=10.0)
self.hrf_1 /= self.hrf_1.max()
self.hrf_2 /= self.hrf_2.max()
signal_1 = signal.convolve(event_1_in_times, self.hrf_1, 'full')[
:times.shape[0]]
signal_2 = signal.convolve(event_2_in_times, self.hrf_2, 'full')[
:times.shape[0]]
# combine the two signals with one another, z-score and add noise
self.input_data = signal_1 + signal_2
# input_data = (input_data - np.mean(input_data)) / input_data.std()
self.input_data += np.random.randn(
self.input_data.shape[0]) * noise_gain
def test_vanilla_deconvolve(self,
event_1_gain=1,
event_2_gain=1,
noise_gain=1.5,
signal_sample_frequency=4,
**kwargs):
"""The simplest of possible tests, two impulse response functions
with different shapes, both with gain = 1
"""
self.create_signals(signal_sample_frequency=signal_sample_frequency,
event_1_gain=event_1_gain,
event_2_gain=event_2_gain,
event_1_sd=0,
event_2_sd=0,
noise_gain=noise_gain)
self.rfy = nideconv.ResponseFitter(
input_signal=self.input_data,
sample_rate=self.signal_sample_frequency)
# first event type, no covariate
self.rfy.add_event(
event_name='1',
onset_times=self.events_1,
durations=self.durations_1,
# durations=None,
interval=self.deconvolution_interval,
**kwargs
)
# second
self.rfy.add_event(
event_name='2',
onset_times=self.events_2,
durations=self.durations_2,
# durations=None,
interval=self.deconvolution_interval,
**kwargs
)
self.rfy.regress()
#self.assertAlmostEqual(rfy.event_types['1'].timecourses['int'], event_1_gain)
#self.assertAlmostEqual(rfy.event_types['2'].timecourses['int'], event_2_gain)
if __name__ == '__main__':
unittest.main()
|
py | 1a4612845e1deddc98aa1e6d3af37e77b0ef9239 | # Generated by Django 3.2.3 on 2021-05-24 20:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("jbank", "0008_auto_20210512_2208"),
]
operations = [
migrations.AddField(
model_name="referencepaymentbatchfile",
name="cached_total_amount",
field=models.DecimalField(blank=True, decimal_places=2, default=None, max_digits=10, null=True, verbose_name="total amount"),
),
]
|
py | 1a46135392ec4b8e3a8be61ebc0fb2cf8c3d1d58 | n = str(input('Digite seu nome completo: ')).strip()
print(f'Seu nome tem Silva? {"SILVA" in n.upper()}')
|
py | 1a46139118ebe86e3759ea55c400e227c556c0f9 | from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index")
path("print")
] |
py | 1a4614b603310c312ad0889a095cf3a8b57e2e2f | # coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .provider import *
# Make subpackages available:
from . import (
meta_v1,
operator,
)
|
py | 1a4615201bde16cf40a1f29ca3b641719e8f74f9 | from .captcha2UploadCaseSensitive import CaptchaUploadCaseSensitive
|
py | 1a4615d0429ea107459d3ac517315e1cefbbc690 | import os
from transformers import BertTokenizer
from utils import get_rank, mkdir, synchronize
class CustomBertTokenizer(BertTokenizer):
def __init__(self, *args, **kwargs):
super(CustomBertTokenizer, self).__init__(*args, **kwargs)
def decode(self, token_ids, skip_special_tokens=True,
clean_up_tokenization_spaces=True, end_flags=[]):
filtered_tokens = self.convert_ids_to_tokens(
token_ids,
skip_special_tokens=skip_special_tokens,
end_flags=end_flags)
# To avoid mixing byte-level and unicode for byte-level BPT
# we need to build string separately for added tokens and byte-level tokens
# cf. https://github.com/huggingface/transformers/issues/1133
sub_texts = []
current_sub_text = []
for token in filtered_tokens:
if skip_special_tokens and token in self.all_special_ids:
continue
if token in self.added_tokens_encoder:
if current_sub_text:
sub_texts.append(self.convert_tokens_to_string(current_sub_text))
current_sub_text = []
sub_texts.append(" " + token)
else:
current_sub_text.append(token)
if current_sub_text:
sub_texts.append(self.convert_tokens_to_string(current_sub_text))
text = ''.join(sub_texts)
if clean_up_tokenization_spaces:
clean_text = self.clean_up_tokenization(text)
return clean_text
else:
return text
def convert_ids_to_tokens(self, ids, skip_special_tokens=False, end_flags=[]):
if isinstance(ids, int):
if ids in self.added_tokens_decoder:
return self.added_tokens_decoder[ids]
else:
return self._convert_id_to_token(ids)
tokens = []
for index in ids:
if skip_special_tokens and index in self.all_special_ids:
continue
if index in end_flags:
tokens.append('.')
break
if index in self.added_tokens_decoder:
tokens.append(self.added_tokens_decoder[index])
else:
tokens.append(self._convert_id_to_token(index))
return tokens
def get_tokenizer(config):
if get_rank() != 0:
synchronize()
pretrained_cache_dir = '.cache_uncased/'
bert_base_path = 'bert-base-uncased' # 30522
if not os.path.exists(pretrained_cache_dir):
mkdir(pretrained_cache_dir)
tokenizer = CustomBertTokenizer.from_pretrained(bert_base_path)
tokenizer.save_pretrained(save_directory=pretrained_cache_dir)
else:
tokenizer = CustomBertTokenizer.from_pretrained(pretrained_cache_dir)
if get_rank() == 0:
synchronize()
SEP = tokenizer.sep_token_id
PAD = tokenizer.pad_token_id
MASK = tokenizer.mask_token_id
EOS = tokenizer.convert_tokens_to_ids('.')
num_tokens = tokenizer.vocab_size
return tokenizer, SEP, EOS, MASK, PAD, num_tokens
|
py | 1a4615efadf1ac1e2259cfdb4d94d7b590eeabb7 | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 29 09:35:23 2019
@author: giles
"""
#Everything in python is an object!
#x = 1
#
##help(x)
##dir(x)
##
#y = [1,2,3]
##help(y)
##dir(y)
##
#z = {'a':1}
#help(z)
#dir(z)
# Objects combine functions with data. Think of a list
# Class is the blue print of an object
# Inside the class there are attributes, which are the variables for that class
#class Patient(object):
# ''' Medical centre patient'''
# pass
#class variables and instance variables:
#class Patient(object):
# ''' Medical centre patient'''
#
# def __init__(self,name,age):
#
# self.name = name
# self.age = age
#
#steve = Patient('Steven Hughes',48)
#abigail = Patient('Abigail Sandwick',32)
#class Patient(object):
# ''' Medical centre patient'''
#
# status = 'patient'
#
# def __init__(self,name,age):
# self.name = name
# self.age = age
##
##
#steve = Patient('Steven Hughes',48)
#abigail = Patient('Abigail Sandwick',32)
#
#
#
#class Patient(object):
# ''' Medical centre patient'''
#
# status = 'patient'
#
# def __init__(self,name,age):
# self.name = name
# self.age = age
#
#
# def get_details(self):
# print(f'Patient record: {self.name}, {self.age} years.')
#
#steve = Patient('Steven Hughes',48)
#abigail = Patient('Abigail Sandwick',32)
class Patient(object):
''' Medical centre patient'''
status = 'patient'
def __init__(self,name,age):
self.name = name
self.age = age
self.conditions = []
def get_details(self):
print(f'Patient record: {self.name}, {self.age} years.' \
f' Current information: {self.conditions}.')
def add_info(self,information):
self.conditions.append(information)
#
#
#steve = Patient('Steven Hughes',48)
#abigail = Patient('Abigail Sandwick',32)
class Infant(Patient):
''' Patient under 2 years'''
def __init__(self,name,age):
self.vaccinations = []
super().__init__(name,age)
def add_vac(self,vaccine):
self.vaccinations.append(vaccine)
def get_details(self):
print(f'Patient record: {self.name}, {self.age} years.' \
f' Patient has had {self.vaccinations} vaccines.' \
f' Current information: {self.conditions}.' \
f'\n{self.name} IS AN INFANT, HAS HE HAD ALL HIS CHECKS?')
#archie = Infant('Archie Fittleworth',0)
#archie.add_vac('MMR')
class Patient(object):
'''
Attributes
----------
name: Patient name
age: Patient age
conditions: Existing medical conditions
'''
status = 'patient'
def __init__(self,name,age):
self.name = name
self.age = age
self.conditions = []
def get_details(self):
print(f'Patient record: {self.name}, {self.age} years.' \
f' Current information: {self.conditions}.')
def add_info(self,information):
self.conditions.append(information)
|
py | 1a46162219b158be9908c0d3d13163a4a267bed7 | #!/usr/bin/python3
import sys
from collections import OrderedDict
from eth_typing import Hash32
from eth_utils import big_endian_to_int
import rlp
from Crypto.Hash import keccak
from rlp.sedes import BigEndianInt, big_endian_int, Binary, binary
from rlp import encode
from eth_utils import to_bytes, to_hex
from web3 import IPCProvider, Web3
_BYTES = 4 # bytes in word
DATASET_BYTES_INIT = 2**30 # bytes in dataset at genesis
DATASET_BYTES_GROWTH = 2**23 # dataset growth per epoch
CACHE_BYTES_INIT = 2**24 # bytes in cache at genesis
CACHE_BYTES_GROWTH = 2**17 # cache growth per epoch
CACHE_MULTIPLIER=1024 # Size of the DAG relative to the cache
EPOCH_LENGTH = 30000 # blocks per epoch
MIX_BYTES = 128 # width of mix
HASH_BYTES = 64 # hash length in bytes
DATASET_PARENTS = 256 # number of parents of each dataset element
CACHE_ROUNDS = 3 # number of rounds in cache production
ACCESSES = 64 # number of accesses in hashimoto loop
address = Binary.fixed_length(20, allow_empty=True)
hash32 = Binary.fixed_length(32)
uint256 = BigEndianInt(256)
trie_root = Binary.fixed_length(32, allow_empty=True)
class MiningBlockHeader(rlp.Serializable):
fields = [
('parent_hash', hash32),
('uncles_hash', hash32),
('coinbase', address),
('state_root', trie_root),
('transaction_root', trie_root),
('receipt_root', trie_root),
('bloom', uint256),
('difficulty', big_endian_int),
('block_number', big_endian_int),
('gas_limit', big_endian_int),
('gas_used', big_endian_int),
('timestamp', big_endian_int),
('extra_data', binary),
#('mix_hash', binary), we have removed these 2 fields because we want a mining block header only
#('nonce', Binary(8, allow_empty=True)
]
provider = Web3.IPCProvider('/home/chronic/TMP_Stuff/geth.ipc')
w3 = Web3(provider)
print(w3.isConnected())
blockNumber = int(sys.argv[1], 10)
myHeader = MiningBlockHeader(
parent_hash = to_bytes(int(w3.eth.getBlock(blockNumber).parentHash.hex(), 16)),
uncles_hash = to_bytes(int(w3.eth.getBlock(blockNumber).sha3Uncles.hex(), 16)),
coinbase = to_bytes(int(w3.eth.getBlock(blockNumber).miner, 16)),
state_root = to_bytes(int(w3.eth.getBlock(blockNumber).stateRoot.hex(), 16)),
transaction_root = to_bytes(int(w3.eth.getBlock(blockNumber).transactionsRoot.hex(), 16)),
receipt_root = to_bytes(int(w3.eth.getBlock(blockNumber).receiptsRoot.hex(), 16)),
bloom = int(w3.eth.getBlock(blockNumber).logsBloom.hex(), 16),
difficulty = w3.eth.getBlock(blockNumber).difficulty,
block_number = w3.eth.getBlock(blockNumber).number,
gas_limit = w3.eth.getBlock(blockNumber).gasLimit,
gas_used = w3.eth.getBlock(blockNumber).gasUsed,
timestamp = w3.eth.getBlock(blockNumber).timestamp,
extra_data = to_bytes(int(w3.eth.getBlock(blockNumber).extraData.hex(), 16)),
#mix_hash = to_bytes(int(w3.eth.getBlock(blockNumber).mixHash.hex(), 16)),
#nonce = to_bytes(int(w3.eth.getBlock(blockNumber).nonce.hex(), 16)),
)
from pyethash import hashimoto_light, mkcache_bytes
# Type annotation here is to ensure we don't accidentally use strings instead of bytes.
cache_by_epoch: 'OrderedDict[int, bytearray]' = OrderedDict() #here we cache by epoch order
CACHE_MAX_ITEMS = 10 #and limit the items to 10
def get_cache(block_number: int) -> bytes:
epoch_index = block_number // EPOCH_LENGTH #this is where we get the block number
# Get the cache if already generated, marking it as recently used
if epoch_index in cache_by_epoch:
c = cache_by_epoch.pop(epoch_index) # pop and append at end
cache_by_epoch[epoch_index] = c
return c
# Generate the cache if it was not already in memory
# Simulate requesting mkcache by block number: multiply index by epoch length
c = mkcache_bytes(epoch_index * EPOCH_LENGTH)
cache_by_epoch[epoch_index] = c #stores the cash bytes generated
return c
# Limit memory usage for cache
if len(cache_by_epoch) > CACHE_MAX_ITEMS: #this is related to the lenght
cache_by_epoch.popitem(last=False) # remove last recently accessed
#ref line88
return c
#now we will write the check proof of work funtion. We need here to check if the data of the blocks is according to the requirements
def check_pow(block_number: int,
mining_hash: Hash32,
mix_hash: Hash32,
nonce: bytes,
difficulty: int) -> None:
cache = get_cache(block_number) #we get cache by block number
mining_output = hashimoto_light(block_number,
cache,
mining_hash,
big_endian_to_int(nonce)) # MISTAKE not int_to_big_endian but the other way around
#big_endian_to_int(nonce)
#int_to_big_endian(nonce)) #this is the hashimoto light mining output. It takes block_number, cache, mining_hash, int_to_big_endian(nonce) and hash it
print("MIX Digest: ", mining_output[b'mix digest'])
print("MIX HASH: ", w3.eth.getBlock(block_number).mixHash.hex())
print("RESULT: ", mining_output[b'result'])
print("CONDITION: ", (2**256) // difficulty)
if mining_output[b'mix digest'] != mining_hash: #this is to say that if the mining digest is not equal to the mix hash, then...
return False
elif int_to_big_endian(mining_output[b'result']) <= (2**256 // difficulty): #to convert the result int integer and check if it meets the condition of being less or equal to 2^256 divided by the difficulty
return False
else:
return True #if it returns true, then all good! We could do more checks but this is enough for now. For additional checks see here https://github.com/ethereum/py-evm/blob/d553bd405bbf41a1da0c227a614baba7b43e9449/eth/consensus/pow.py
#the next section's objective is tomake sure that data is formated correctly and make sure we can get the proper hash and that the data is accurately fromated
block_number = blockNumber
myHash = "0x" + keccak.new(data=rlp.encode(myHeader), digest_bits=256).hexdigest()
mining_hash = to_bytes(int(myHash, 16))
mix_hash = to_bytes(int(w3.eth.getBlock(block_number).mixHash.hex(), 16))
nonce = to_bytes(int(w3.eth.getBlock(block_number).nonce.hex(), 16))
difficulty = myHeader.difficulty
check_pow(block_number, mining_hash, mix_hash, nonce, difficulty)
|
py | 1a46166a443a6e1fe2feb395e93e023b542cff67 | # Generated by Django 2.0 on 2018-10-30 01:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('report', '0003_auto_20181026_1558'),
]
operations = [
migrations.AddField(
model_name='url',
name='user_timings_migrated',
field=models.DateTimeField(null=True),
),
]
|
py | 1a4616cb4bf950f3e3ab55a4217366608258ef0d | """
Define application-wide configuration.
"""
import os
import pytz
basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'data.sqlite')
SQLALCHEMY_TRACK_MODIFICATIONS = False
# DATABASE TIMEZONE. All datetimes are converted to this before being entered in the database
TIMEZONE = pytz.timezone("UTC")
|
py | 1a4617155a2523ef2c7e3a43f4b973004e4df7b4 | r"""Machine learning based ADP for finite-horizon discrete-time stochastic optimal control problems
An optimal control problem within the scope of :mod:`ml_adp` of $T$ *steps* consists of $T+1$ *state functions*
$$F_0(s_0,a_0,\xi_1),\dots ,F_T(s_T, a_T, \xi_{T+1}),$$
together with $T+1$ *cost functions*
$$k_0(s_0, a_0, \xi_0),\dots, k_T(s_T, a_T).$$
Given an initial value $S_0$ and independent "random effects" $\Xi_1,\dots, \Xi_{T+1}$, a "dynamic" choice of controls $A_0,\dots, A_T$ defines a controlled random *state evolution*
$$S_{t+1} = F_{t+1}(S_t, A_t, \Xi_{t+1}), \quad t=0,\dots, T$$
with which the random *total cost*
$$k^{F, A}(S_0, \Xi_1, \dots, \Xi_{T+1}) = \sum_{t=0}^T k_t(S_t, A_t)$$
is associated.
A choice of controls is optimal, if, in prospective expectation, the associated cost is minimal.
The theory establishes in sufficiently well-behaved situations that choices of controls
in the form of *control functions* $A_0(s_0), \dots, A_T(s_T)$ (via $A_t = A_t(S_t)$)
from function classes within which common neural network architectures have universal approximation capabilities exhibit the required dynamicity and are not restricted in regard to their potential optimality.
:mod:`ml_adp` wraps the state functions $F=(F_0,\dots, F_T)$ and a choice of controls $A=(A_0,\dots, A_T)$ into :class:`ml_adp.cost.Propagator`'s to provide the numerical simulation of the state evolution and bundles a such :class:`ml_adp.cost.Propagator`-instance with the data $k=(k_0,\dots, k_T)$ into :class:`ml_adp.cost.CostToGo`'s to provide the numerical simulation of the total cost.
"""
__version__ = "0.3.0a0" |
py | 1a461739804d6cf15f46428a3bb0423d6bf859ea | from settings import *
def display_image(X):
""" displays the image from the image matrix X """
im = X.reshape(28, 28)
temp = plt.imshow(im)
plt.show()
def generate_2D(X):
""" generate 2D image matrix from the 1D vector """
no_of_images = len(X)
data = np.zeros((no_of_images, 28, 28))
for i in xrange(no_of_images):
data[i] = np.copy(X[i].reshape(28, 28))
return data
def get_mean_image(data):
""" returns the mean image """
no_of_images = len(data)
mean_im = np.zeros((28, 28))
for i in xrange(no_of_images):
mean_im = mean_im + data[i, 0:28, 0:28]
mean_im = mean_im / no_of_images
return mean_im
def substract_mean(data, mean_im):
""" substract the mean image from all images """
no_of_images = len(data)
for i in xrange(no_of_images):
data[i] = data[i] - mean_im
return data |
py | 1a461777e08e3fe353d9d08bbf07b706192d1e0b | """
Tests shared by MaskedArray subclasses.
"""
import numpy as np
import pandas as pd
import pandas._testing as tm
from pandas.tests.extension.base import BaseOpsUtil
class ComparisonOps(BaseOpsUtil):
def _compare_other(self, data, op_name, other):
op = self.get_op_from_name(op_name)
# array
result = pd.Series(op(data, other))
expected = pd.Series(op(data._data, other), dtype="boolean")
# fill the nan locations
expected[data._mask] = pd.NA
tm.assert_series_equal(result, expected)
# series
ser = pd.Series(data)
result = op(ser, other)
expected = op(pd.Series(data._data), other)
# fill the nan locations
expected[data._mask] = pd.NA
expected = expected.astype("boolean")
tm.assert_series_equal(result, expected)
# subclass will override to parametrize 'other'
def test_scalar(self, other, all_compare_operators, dtype):
op = self.get_op_from_name(all_compare_operators)
left = pd.array([1, 0, None], dtype=dtype)
result = op(left, other)
if other is pd.NA:
expected = pd.array([None, None, None], dtype="boolean")
else:
values = op(left._data, other)
expected = pd.arrays.BooleanArray(values, left._mask, copy=True)
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
result[0] = pd.NA
tm.assert_extension_array_equal(left, pd.array([1, 0, None], dtype=dtype))
class NumericOps:
# Shared by IntegerArray and FloatingArray, not BooleanArray
def test_no_shared_mask(self, data):
result = data + 1
assert np.shares_memory(result._mask, data._mask) is False
def test_array(self, all_compare_operators, dtype):
op = self.get_op_from_name(all_compare_operators)
left = pd.array([0, 1, 2, None, None, None], dtype=dtype)
right = pd.array([0, 1, None, 0, 1, None], dtype=dtype)
result = op(left, right)
values = op(left._data, right._data)
mask = left._mask | right._mask
expected = pd.arrays.BooleanArray(values, mask)
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
result[0] = pd.NA
tm.assert_extension_array_equal(
left, pd.array([0, 1, 2, None, None, None], dtype=dtype)
)
tm.assert_extension_array_equal(
right, pd.array([0, 1, None, 0, 1, None], dtype=dtype)
)
def test_compare_with_booleanarray(self, all_compare_operators, dtype):
op = self.get_op_from_name(all_compare_operators)
left = pd.array([True, False, None] * 3, dtype="boolean")
right = pd.array([0] * 3 + [1] * 3 + [None] * 3, dtype=dtype)
other = pd.array([False] * 3 + [True] * 3 + [None] * 3, dtype="boolean")
expected = op(left, other)
result = op(left, right)
tm.assert_extension_array_equal(result, expected)
# reversed op
expected = op(other, left)
result = op(right, left)
tm.assert_extension_array_equal(result, expected)
def test_compare_to_string(self, dtype):
# GH#28930
ser = pd.Series([1, None], dtype=dtype)
result = ser == "a"
expected = pd.Series([False, pd.NA], dtype="boolean")
self.assert_series_equal(result, expected)
|
py | 1a4617f8fc0d5bbd4c66a6d9799714ab5e50c307 | import imghdr
from flask.ext.wtf import Form
from flask.ext.login import current_user
from wtforms.fields import StringField, DecimalField, BooleanField, SubmitField, TextAreaField, FileField, IntegerField, RadioField, SelectField
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.validators import DataRequired, URL, Email, Length, Regexp
from wtforms import ValidationError, widgets, SelectMultipleField
from ..models import Listing, User
from .. import db
PRICE_MESSAGE = "This value needs to be filled and needs to be a number"
class ChangeListingInformation(Form):
listing_name = StringField('Item Name', validators=[DataRequired(), Length(1, 1000)])
listing_description = TextAreaField('Item Description',
validators=[DataRequired(), Length(1, 2500)])
listing_price = DecimalField('Item Price', places=2,
validators=[DataRequired(message=PRICE_MESSAGE)])
listing_unit = StringField('Item Unit', validators=[DataRequired(), Length(1, 1000)])
listing_quantity = IntegerField('Item Quantity (per unit)',
validators=[DataRequired(message=PRICE_MESSAGE)])
listing_available = BooleanField('Available?')
submit = SubmitField('Update Item Information')
class NewItemForm(Form):
listing_name = StringField('Item Name',
validators=[DataRequired(), Length(1, 1000)])
listing_description = TextAreaField('Item Description',
validators=[DataRequired(), Length(1, 2500)])
listing_price = DecimalField('Item Price', places=2,
validators=[DataRequired(message=PRICE_MESSAGE)])
listing_unit = StringField('Item Unit', validators=[DataRequired(), Length(1, 1000)])
listing_quantity = IntegerField('Item Quantity (per unit)',
validators=[DataRequired(message=PRICE_MESSAGE)])
listing_productID = IntegerField('Item Product ID',
validators=[DataRequired(message='Provide a valid numerical Product Identification')])
submit = SubmitField('Create New Item')
def validate_listing_name(self, field):
if current_user.listings.filter_by(name=field.data).first():
raise ValidationError('You already have an item with this name.')
class EditProfileForm(Form):
image = FileField('Image File (note cannot be more than 5 MB)')
pdf = FileField('Credit Application File (note cannot be more than 5 MB)')
bio = TextAreaField('Bio')
address = StringField('Address')
phone_number = StringField('Phone Number')
website = StringField('Website (http://www.example.com)',
validators=[URL('This URL is invalid. Please enter a valid website name')])
email = StringField('Email', validators=[Email('Please enter a valid email address')])
featured1 = TextAreaField('Specials')
submit = SubmitField('Save')
class NewCSVForm(Form):
file_upload = FileField(validators=[DataRequired()])
replace_or_merge = SelectField('Would you like to replace all of your current items on the system with this upload or merge this upload with the items on the system?', choices=[('replace', 'Replace'), ('merge', 'Merge')], validators=[DataRequired()])
submit = SubmitField('Submit Upload')
|
py | 1a4618c698383c719acf32f9a73ef595ccea79ea |
# Copyright (c) 2019, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import rmm
from cuml.test.utils import array_equal, unit_param, quality_param, \
stress_param
from cuml.neighbors import NearestNeighbors as cuKNN
from sklearn.neighbors import NearestNeighbors as skKNN
from sklearn.datasets.samples_generator import make_blobs
import cudf
import pandas as pd
import numpy as np
import sklearn
import cuml
from cuml.common import has_scipy
def predict(neigh_ind, _y, n_neighbors):
import scipy.stats as stats
neigh_ind = neigh_ind.astype(np.int32)
ypred, count = stats.mode(_y[neigh_ind], axis=1)
return ypred.ravel(), count.ravel() * 1.0 / n_neighbors
def valid_metrics():
cuml_metrics = cuml.neighbors.VALID_METRICS["brute"]
sklearn_metrics = sklearn.neighbors.VALID_METRICS["brute"]
return [value for value in cuml_metrics if value in sklearn_metrics]
@pytest.mark.parametrize("datatype", ["dataframe", "numpy"])
@pytest.mark.parametrize("nrows", [500, 1000, 10000])
@pytest.mark.parametrize("ncols", [100, 1000])
@pytest.mark.parametrize("n_neighbors", [10, 50])
@pytest.mark.parametrize("n_clusters", [2, 10])
def test_neighborhood_predictions(nrows, ncols, n_neighbors, n_clusters,
datatype):
if not has_scipy():
pytest.skip('Skipping test_neighborhood_predictions because ' +
'Scipy is missing')
X, y = make_blobs(n_samples=nrows, centers=n_clusters,
n_features=ncols, random_state=0)
X = X.astype(np.float32)
if datatype == "dataframe":
X = cudf.DataFrame.from_gpu_matrix(rmm.to_device(X))
knn_cu = cuKNN()
knn_cu.fit(X)
neigh_ind = knn_cu.kneighbors(X, n_neighbors=n_neighbors,
return_distance=False)
if datatype == "dataframe":
assert isinstance(neigh_ind, cudf.DataFrame)
neigh_ind = neigh_ind.as_gpu_matrix().copy_to_host()
else:
assert isinstance(neigh_ind, np.ndarray)
labels, probs = predict(neigh_ind, y, n_neighbors)
assert array_equal(labels, y)
def test_return_dists():
n_samples = 50
n_feats = 50
k = 5
X, y = make_blobs(n_samples=n_samples,
n_features=n_feats, random_state=0)
knn_cu = cuKNN()
knn_cu.fit(X)
ret = knn_cu.kneighbors(X, k, return_distance=False)
assert not isinstance(ret, tuple)
assert ret.shape == (n_samples, k)
ret = knn_cu.kneighbors(X, k, return_distance=True)
assert isinstance(ret, tuple)
assert len(ret) == 2
@pytest.mark.parametrize('input_type', ['dataframe', 'ndarray'])
@pytest.mark.parametrize('nrows', [unit_param(500), quality_param(5000),
stress_param(500000)])
@pytest.mark.parametrize('n_feats', [unit_param(3), quality_param(100),
stress_param(1000)])
@pytest.mark.parametrize('k', [unit_param(3), quality_param(30),
stress_param(50)])
@pytest.mark.parametrize("metric", valid_metrics())
def test_cuml_against_sklearn(input_type, nrows, n_feats, k, metric):
X, _ = make_blobs(n_samples=nrows,
n_features=n_feats, random_state=0)
p = 5 # Testing 5-norm of the minkowski metric only
knn_sk = skKNN(metric=metric, p=p) # Testing
knn_sk.fit(X)
D_sk, I_sk = knn_sk.kneighbors(X, k)
X_orig = X
if input_type == "dataframe":
X = cudf.DataFrame.from_gpu_matrix(rmm.to_device(X))
knn_cu = cuKNN(metric=metric, p=p)
knn_cu.fit(X)
D_cuml, I_cuml = knn_cu.kneighbors(X, k)
if input_type == "dataframe":
assert isinstance(D_cuml, cudf.DataFrame)
assert isinstance(I_cuml, cudf.DataFrame)
D_cuml_arr = D_cuml.as_gpu_matrix().copy_to_host()
I_cuml_arr = I_cuml.as_gpu_matrix().copy_to_host()
else:
assert isinstance(D_cuml, np.ndarray)
assert isinstance(I_cuml, np.ndarray)
D_cuml_arr = D_cuml
I_cuml_arr = I_cuml
# Assert the cuml model was properly reverted
np.testing.assert_allclose(knn_cu.X_m.to_output("numpy"), X_orig,
atol=1e-5, rtol=1e-4)
# Allow a max relative diff of 10% and absolute diff of 1%
np.testing.assert_allclose(D_cuml_arr, D_sk, atol=1e-2,
rtol=1e-1)
assert I_cuml_arr.all() == I_sk.all()
def test_knn_fit_twice():
"""
Test that fitting a model twice does not fail.
This is necessary since the NearestNeighbors class
needs to free Cython allocated heap memory when
fit() is called more than once.
"""
n_samples = 1000
n_feats = 50
k = 5
X, y = make_blobs(n_samples=n_samples,
n_features=n_feats, random_state=0)
knn_cu = cuKNN()
knn_cu.fit(X)
knn_cu.fit(X)
knn_cu.kneighbors(X, k)
del knn_cu
@pytest.mark.parametrize('input_type', ['ndarray'])
@pytest.mark.parametrize('nrows', [unit_param(500), quality_param(5000),
stress_param(500000)])
@pytest.mark.parametrize('n_feats', [unit_param(20), quality_param(100),
stress_param(1000)])
def test_nn_downcast_fails(input_type, nrows, n_feats):
X, y = make_blobs(n_samples=nrows,
n_features=n_feats, random_state=0)
knn_cu = cuKNN()
if input_type == 'dataframe':
X_pd = pd.DataFrame({'fea%d' % i: X[0:, i] for i in range(X.shape[1])})
X_cudf = cudf.DataFrame.from_pandas(X_pd)
knn_cu.fit(X_cudf, convert_dtype=True)
with pytest.raises(Exception):
knn_cu.fit(X, convert_dtype=False)
# Test fit() fails when downcast corrupted data
X = np.array([[np.finfo(np.float32).max]], dtype=np.float64)
knn_cu = cuKNN()
with pytest.raises(Exception):
knn_cu.fit(X, convert_dtype=False)
|
py | 1a4618d821197ff9e7a221bfb335d700f7ca9877 | """Test converting quaternions to and from rotation matrices"""
from __future__ import division, print_function, absolute_import
import unittest
import numpy as np
import os
import rowan
zero = np.array([0, 0, 0, 0])
one = np.array([1, 0, 0, 0])
half = np.array([0.5, 0.5, 0.5, 0.5])
# Load test files
TESTDATA_FILENAME = os.path.join(
os.path.dirname(__file__),
'files/test_arrays.npz')
with np.load(TESTDATA_FILENAME) as data:
input1 = data['input1']
vector_inputs = data['vector_inputs']
class TestMatrix(unittest.TestCase):
"""Test rotation matrix conversions"""
def test_from_matrix(self):
self.assertTrue(np.all(
rowan.from_matrix(np.eye(3)) == one
))
with self.assertRaises(ValueError):
self.assertTrue(np.allclose(
rowan.from_matrix(
2*np.eye(3)
)
))
mat = np.array([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
self.assertTrue(
np.logical_or(
np.allclose(rowan.from_matrix(mat), half),
np.allclose(rowan.from_matrix(mat), -half)
)
)
mat = np.array([[0, 1, 0],
[0, 0, -1],
[-1, 0, 0]])
v = np.copy(half)
v[3] *= -1
self.assertTrue(np.allclose(
rowan.from_matrix(mat), v
))
def test_to_matrix(self):
v = np.copy(zero)
with self.assertRaises(ZeroDivisionError):
rowan.to_matrix(v)
v = 2*np.ones(4)
with self.assertRaises(ValueError):
rowan.to_matrix(v)
v = np.copy(one)
self.assertTrue(np.all(
rowan.to_matrix(v) == np.eye(3)
))
v = np.copy(half)
self.assertTrue(np.allclose(
rowan.to_matrix(v),
np.array([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
))
v[3] *= -1
self.assertTrue(np.allclose(
rowan.to_matrix(v),
np.array([[0, 1, 0],
[0, 0, -1],
[-1, 0, 0]])
))
def test_to_from_matrix(self):
# The equality is only guaranteed up to a sign
converted = rowan.from_matrix(
rowan.to_matrix(
input1))
self.assertTrue(
np.all(
np.logical_or(
np.isclose(input1 - converted, 0),
np.isclose(input1 + converted, 0),
)
)
)
def test_rotation(self):
quat_rotated = rowan.rotate(
input1,
vector_inputs)
matrices = rowan.to_matrix(
input1)
matrix_rotated = np.einsum(
'ijk,ki->ij',
matrices,
vector_inputs.T
)
self.assertTrue(np.allclose(matrix_rotated, quat_rotated))
|
py | 1a4619cd350fe2beae1828d2d836b8523032be74 | from vnpy_tap.api import *
|
py | 1a461aaf45df44bddbcea7968f0406858feb8346 | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.ads.google_ads.v2.proto.services import click_view_service_pb2_grpc
class ClickViewServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.ads.googleads.v2.services ClickViewService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
)
def __init__(self, channel=None, credentials=None,
address='googleads.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.',
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'click_view_service_stub': click_view_service_pb2_grpc.ClickViewServiceStub(channel),
}
@classmethod
def create_channel(
cls,
address='googleads.googleapis.com:443',
credentials=None,
**kwargs):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
**kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def get_click_view(self):
"""Return the gRPC stub for :meth:`ClickViewServiceClient.get_click_view`.
Returns the requested click view in full detail.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['click_view_service_stub'].GetClickView |
py | 1a461b369b0f6b5fc419e8020a55d6798fce20f8 | import os
import shutil
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
class Command(BaseCommand):
help = '清除项目路径下所有migrations下的文件'
def handle(self, *args, **options):
for app in settings.INSTALLED_APPS:
path = os.path.join(os.path.join(settings.BASE_DIR, app.replace(".", "/")), "migrations")
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
with open(os.path.join(path, "__init__.py"), "w+") as file:
pass
self.stdout.write(self.style.SUCCESS(f"Clear {path}"))
self.stdout.write(self.style.SUCCESS('Successfully cleared!'))
|
py | 1a461b3ad5da692fd6684195d1f4ce15e8eb6cd0 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkretailcloud.endpoint import endpoint_data
class DeleteAppDetailRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'retailcloud', '2018-03-13', 'DeleteAppDetail','retailcloud')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AppId(self):
return self.get_query_params().get('AppId')
def set_AppId(self,AppId):
self.add_query_param('AppId',AppId)
def get_Force(self):
return self.get_query_params().get('Force')
def set_Force(self,Force):
self.add_query_param('Force',Force) |
py | 1a461b88007d55b305013aecc4d726e22fd1638c | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from veriloggen import *
def mkTest():
m = Module('test')
clk = m.Reg('CLK')
rst = m.Reg('RST')
count = m.Reg('count', width=32)
m.Initial(
Systask('dumpfile', 'uut.vcd'),
Systask('dumpvars', 0, clk, rst, count),
)
m.Initial(
clk(0),
Forever(clk(Not(clk), ldelay=5)) # forever #5 CLK = ~CLK;
)
m.Initial(
rst(0),
Delay(100),
rst(1),
Delay(100),
rst(0),
Delay(1000),
count(0),
While(count < 1024)(
count( count + 1 ),
Event(Posedge(clk))
),
Systask('finish'),
)
return m
if __name__ == '__main__':
test = mkTest()
verilog = test.to_verilog('')
print(verilog)
|
py | 1a461bea4bdb3b73fd065fe29b32f430af28d794 | class Solution:
def numRollsToTarget(self, d: int, f: int, target: int) -> int:
m = 10 ** 9 + 7
dp = [[0] * (target + 1) for _ in range(d + 1)]
dp[0][0] = 1
for i in range(1, d + 1):
for j in range(1, f + 1):
for k in range(j, target + 1):
dp[i][k] = (dp[i][k] + dp[i - 1][k - j]) % m
return dp[d][target]
d = 1
f = 6
target = 3
res = Solution().numRollsToTarget(d, f, target)
print(res) |
py | 1a461c3b56e6156984b14e2277605d6c1be80129 | """
Module for working with the Grafana v4 API
.. versionadded:: 2017.7.0
:depends: requests
:configuration: This module requires a configuration profile to be configured
in the minion config, minion pillar, or master config.
The module will use the 'grafana' key by default, if defined.
For example:
.. code-block:: yaml
grafana:
grafana_url: http://grafana.localhost
grafana_user: admin
grafana_password: admin
grafana_timeout: 3
"""
try:
import requests
HAS_LIBS = True
except ImportError:
HAS_LIBS = False
__virtualname__ = "grafana4"
def __virtual__():
"""
Only load if requests is installed
"""
if HAS_LIBS:
return __virtualname__
else:
return (
False,
'The "{}" module could not be loaded: "requests" is not installed.'.format(
__virtualname__
),
)
def _get_headers(profile):
headers = {"Content-type": "application/json"}
if profile.get("grafana_token", False):
headers["Authorization"] = "Bearer {}".format(profile["grafana_token"])
return headers
def _get_auth(profile):
if profile.get("grafana_token", False):
return None
return requests.auth.HTTPBasicAuth(
profile["grafana_user"], profile["grafana_password"]
)
def get_users(profile="grafana"):
"""
List all users.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_users
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.get(
"{}/api/users".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_user(login, profile="grafana"):
"""
Show a single user.
login
Login of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_user <login>
"""
data = get_users(profile)
for user in data:
if user["login"] == login:
return user
return None
def get_user_data(userid, profile="grafana"):
"""
Get user data.
userid
Id of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_user_data <user_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.get(
"{}/api/users/{}".format(profile["grafana_url"], userid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def create_user(profile="grafana", **kwargs):
"""
Create a new user.
login
Login of the new user.
password
Password of the new user.
email
Email of the new user.
name
Optional - Full name of the new user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.create_user login=<login> password=<password> email=<email>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.post(
"{}/api/admin/users".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_user(userid, profile="grafana", **kwargs):
"""
Update an existing user.
userid
Id of the user.
login
Optional - Login of the user.
email
Optional - Email of the user.
name
Optional - Full name of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user <user_id> login=<login> email=<email>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.put(
"{}/api/users/{}".format(profile["grafana_url"], userid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_user_password(userid, profile="grafana", **kwargs):
"""
Update a user password.
userid
Id of the user.
password
New password of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user_password <user_id> password=<password>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.put(
"{}/api/admin/users/{}/password".format(profile["grafana_url"], userid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_user_permissions(userid, profile="grafana", **kwargs):
"""
Update a user password.
userid
Id of the user.
isGrafanaAdmin
Whether user is a Grafana admin.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user_permissions <user_id> isGrafanaAdmin=<true|false>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.put(
"{}/api/admin/users/{}/permissions".format(profile["grafana_url"], userid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def delete_user(userid, profile="grafana"):
"""
Delete a user.
userid
Id of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_user <user_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.delete(
"{}/api/admin/users/{}".format(profile["grafana_url"], userid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_user_orgs(userid, profile="grafana"):
"""
Get the list of organisations a user belong to.
userid
Id of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_user_orgs <user_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.get(
"{}/api/users/{}/orgs".format(profile["grafana_url"], userid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def delete_user_org(userid, orgid, profile="grafana"):
"""
Remove a user from an organization.
userid
Id of the user.
orgid
Id of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_user_org <user_id> <org_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.delete(
"{}/api/orgs/{}/users/{}".format(profile["grafana_url"], orgid, userid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_orgs(profile="grafana"):
"""
List all organizations.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_orgs
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.get(
"{}/api/orgs".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_org(name, profile="grafana"):
"""
Show a single organization.
name
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_org <name>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.get(
"{}/api/orgs/name/{}".format(profile["grafana_url"], name),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def switch_org(orgname, profile="grafana"):
"""
Switch the current organization.
name
Name of the organization to switch to.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.switch_org <name>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
org = get_org(orgname, profile)
response = requests.post(
"{}/api/user/using/{}".format(profile["grafana_url"], org["id"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return org
def get_org_users(orgname=None, profile="grafana"):
"""
Get the list of users that belong to the organization.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_org_users <orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.get(
"{}/api/org/users".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def create_org_user(orgname=None, profile="grafana", **kwargs):
"""
Add user to the organization.
loginOrEmail
Login or email of the user.
role
Role of the user for this organization. Should be one of:
- Admin
- Editor
- Read Only Editor
- Viewer
orgname
Name of the organization in which users are added.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.create_org_user <orgname> loginOrEmail=<loginOrEmail> role=<role>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.post(
"{}/api/org/users".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_org_user(userid, orgname=None, profile="grafana", **kwargs):
"""
Update user role in the organization.
userid
Id of the user.
loginOrEmail
Login or email of the user.
role
Role of the user for this organization. Should be one of:
- Admin
- Editor
- Read Only Editor
- Viewer
orgname
Name of the organization in which users are updated.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_org_user <user_id> <orgname> loginOrEmail=<loginOrEmail> role=<role>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.patch(
"{}/api/org/users/{}".format(profile["grafana_url"], userid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def delete_org_user(userid, orgname=None, profile="grafana"):
"""
Remove user from the organization.
userid
Id of the user.
orgname
Name of the organization in which users are updated.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_org_user <user_id> <orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.delete(
"{}/api/org/users/{}".format(profile["grafana_url"], userid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_org_address(orgname=None, profile="grafana"):
"""
Get the organization address.
orgname
Name of the organization in which users are updated.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_org_address <orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.get(
"{}/api/org/address".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_org_address(orgname=None, profile="grafana", **kwargs):
"""
Update the organization address.
orgname
Name of the organization in which users are updated.
address1
Optional - address1 of the org.
address2
Optional - address2 of the org.
city
Optional - city of the org.
zip_code
Optional - zip_code of the org.
state
Optional - state of the org.
country
Optional - country of the org.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_org_address <orgname> country=<country>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.put(
"{}/api/org/address".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_org_prefs(orgname=None, profile="grafana"):
"""
Get the organization preferences.
orgname
Name of the organization in which users are updated.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_org_prefs <orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.get(
"{}/api/org/preferences".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_org_prefs(orgname=None, profile="grafana", **kwargs):
"""
Update the organization preferences.
orgname
Name of the organization in which users are updated.
theme
Selected theme for the org.
homeDashboardId
Home dashboard for the org.
timezone
Timezone for the org (one of: "browser", "utc", or "").
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_org_prefs <orgname> theme=<theme> timezone=<timezone>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.put(
"{}/api/org/preferences".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def create_org(profile="grafana", **kwargs):
"""
Create a new organization.
name
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.create_org <name>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.post(
"{}/api/orgs".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_org(orgid, profile="grafana", **kwargs):
"""
Update an existing organization.
orgid
Id of the organization.
name
New name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_org <org_id> name=<name>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.put(
"{}/api/orgs/{}".format(profile["grafana_url"], orgid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def delete_org(orgid, profile="grafana"):
"""
Delete an organization.
orgid
Id of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_org <org_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.delete(
"{}/api/orgs/{}".format(profile["grafana_url"], orgid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_datasources(orgname=None, profile="grafana"):
"""
List all datasources in an organisation.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_datasources <orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.get(
"{}/api/datasources".format(profile["grafana_url"]),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_datasource(name, orgname=None, profile="grafana"):
"""
Show a single datasource in an organisation.
name
Name of the datasource.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_datasource <name> <orgname>
"""
data = get_datasources(orgname=orgname, profile=profile)
for datasource in data:
if datasource["name"] == name:
return datasource
return None
def create_datasource(orgname=None, profile="grafana", **kwargs):
"""
Create a new datasource in an organisation.
name
Name of the data source.
type
Type of the datasource ('graphite', 'influxdb' etc.).
access
Use proxy or direct.
url
The URL to the data source API.
user
Optional - user to authenticate with the data source.
password
Optional - password to authenticate with the data source.
database
Optional - database to use with the data source.
basicAuth
Optional - set to True to use HTTP basic auth to authenticate with the
data source.
basicAuthUser
Optional - HTTP basic auth username.
basicAuthPassword
Optional - HTTP basic auth password.
jsonData
Optional - additional json data to post (eg. "timeInterval").
isDefault
Optional - set data source as default.
withCredentials
Optional - Whether credentials such as cookies or auth headers should
be sent with cross-site requests.
typeLogoUrl
Optional - Logo to use for this datasource.
orgname
Name of the organization in which the data source should be created.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.create_datasource
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.post(
"{}/api/datasources".format(profile["grafana_url"]),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def update_datasource(datasourceid, orgname=None, profile="grafana", **kwargs):
"""
Update a datasource.
datasourceid
Id of the datasource.
name
Name of the data source.
type
Type of the datasource ('graphite', 'influxdb' etc.).
access
Use proxy or direct.
url
The URL to the data source API.
user
Optional - user to authenticate with the data source.
password
Optional - password to authenticate with the data source.
database
Optional - database to use with the data source.
basicAuth
Optional - set to True to use HTTP basic auth to authenticate with the
data source.
basicAuthUser
Optional - HTTP basic auth username.
basicAuthPassword
Optional - HTTP basic auth password.
jsonData
Optional - additional json data to post (eg. "timeInterval").
isDefault
Optional - set data source as default.
withCredentials
Optional - Whether credentials such as cookies or auth headers should
be sent with cross-site requests.
typeLogoUrl
Optional - Logo to use for this datasource.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_datasource <datasourceid>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.put(
"{}/api/datasources/{}".format(profile["grafana_url"], datasourceid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
# temporary fix for https://github.com/grafana/grafana/issues/6869
# return response.json()
return {}
def delete_datasource(datasourceid, orgname=None, profile="grafana"):
"""
Delete a datasource.
datasourceid
Id of the datasource.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_datasource <datasource_id>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
response = requests.delete(
"{}/api/datasources/{}".format(profile["grafana_url"], datasourceid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def get_dashboard(slug, orgname=None, profile="grafana"):
"""
Get a dashboard.
slug
Slug (name) of the dashboard.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.get_dashboard <slug>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.get(
"{}/api/dashboards/db/{}".format(profile["grafana_url"], slug),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
data = response.json()
if response.status_code == 404:
return None
if response.status_code >= 400:
response.raise_for_status()
return data.get("dashboard")
def delete_dashboard(slug, orgname=None, profile="grafana"):
"""
Delete a dashboard.
slug
Slug (name) of the dashboard.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.delete_dashboard <slug>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.delete(
"{}/api/dashboards/db/{}".format(profile["grafana_url"], slug),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
def create_update_dashboard(orgname=None, profile="grafana", **kwargs):
"""
Create or update a dashboard.
dashboard
A dict that defines the dashboard to create/update.
overwrite
Whether the dashboard should be overwritten if already existing.
orgname
Name of the organization.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.create_update_dashboard dashboard=<dashboard> overwrite=True orgname=<orgname>
"""
if isinstance(profile, str):
profile = __salt__["config.option"](profile)
if orgname:
switch_org(orgname, profile)
response = requests.post(
"{}/api/dashboards/db".format(profile.get("grafana_url")),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get("grafana_timeout", 3),
)
if response.status_code >= 400:
response.raise_for_status()
return response.json()
|
py | 1a461d2a43205fbc6009f1c09d2ea1dcce11241e | from migen import *
from migen.genlib.cdc import MultiReg
from misoc.interconnect.csr import *
from migen.fhdl.decorators import ClockDomainsRenamer
class SDTriggerOutputDriver(Module, AutoCSR):
def __init__(self, trig_out, latch_in, posedge_in):
posedge_prev = Signal()
self.sync += [
posedge_prev.eq(posedge_in),
If(posedge_in & ~posedge_prev,
trig_out.eq(latch_in)
).Else(
trig_out.eq(0)
)
]
class SDTrigger(Module, AutoCSR):
"""Add-on core for generating trigger signals timed in sync with
the SDEmulator's data output completion.
"""
def __init__(self, sd_linklayer, pins):
self._latch = CSRStorage(len(pins))
self.clock_domains.cd_sd = ClockDomain(reset_less=True)
self.comb += self.cd_sd.clk.eq(sd_linklayer.cd_sd.clk)
sdcd_latch = Signal(len(pins))
self.specials += MultiReg(self._latch.storage, sdcd_latch, odomain="sd", n=3)
# Output circuit itself is entirely in SD clock domain
self.submodules.drv = ClockDomainsRenamer("sd")(
SDTriggerOutputDriver(pins, sdcd_latch, sd_linklayer.data_out_done))
|
py | 1a461de54d0b18975d63ef12ddee79a1932b2b33 | import urllib
import urllib2
url="http://licensing.research.ncsu.edu/technologies"
values1={"limit":200,"offset":0}
values2={"limit":200,"offset":200}
data1=urllib.urlencode(values1)
data2=urllib.urlencode(values2)
theurl1=url+"?"+data1
theurl2=url+"?"+data2
r1=urllib2.urlopen(theurl1)
r2=urllib2.urlopen(theurl2)
f1=open("1.html","w")
f1.write(r1.read())
f1.close()
f2=open("2.html","w")
f2.write(r2.read())
f2.close()
|
py | 1a461e1cc86205d0ec51be1d9677deed996d0549 | import os
import pickle
import pandas as pd
from sklearn.model_selection import KFold, train_test_split
from pymatgen.io.vasp import Poscar
from tqdm import tqdm
def generate_graph_cache(poscar_path, save_path, save_name):
all_data = {crystal_name: Poscar.from_file(os.path.join(poscar_path, crystal_name)).as_dict()
for crystal_name in tqdm(os.listdir(poscar_path))}
with open(os.path.join(save_path, f'{save_name}.pickle'), 'wb') as f:
pickle.dump(all_data, f)
def split_and_save_data(file_path, seed):
kfold = KFold(n_splits=9, shuffle=True, random_state=seed)
if not os.path.exists(f'./calculate/seed_{seed}/'):
os.makedirs(f'./calculate/seed_{seed}')
data = pd.read_csv(file_path)
train_val_data, test_data = train_test_split(data, test_size=0.1, random_state=seed)
test_data.to_csv(f'./calculate/seed_{seed}/test.csv', index=None)
for fold_num, (train_index, valid_index) in enumerate(kfold.split(train_val_data)):
train_data, valid_data = train_val_data.iloc[train_index], train_val_data.iloc[valid_index]
train_data.to_csv(f'./calculate/seed_{seed}/train_fold_{fold_num + 1}.csv', index=None)
valid_data.to_csv(f'./calculate/seed_{seed}/valid_fold_{fold_num + 1}.csv', index=None)
def fine_tune_split_data(file_path, seed):
kfold = KFold(n_splits=9, shuffle=True, random_state=seed)
if not os.path.exists(f'./seed_{seed}/'):
os.makedirs(f'./seed_{seed}')
data = pd.read_csv(file_path)
for fold_num, (train_index, valid_index) in enumerate(kfold.split(data)):
train_data, valid_data = data.iloc[train_index], data.iloc[valid_index]
train_data.to_csv(f'./seed_{seed}/finetune_train_fold_{fold_num + 1}.csv', index=None)
valid_data.to_csv(f'./seed_{seed}/finetune_valid_fold_{fold_num + 1}.csv', index=None)
if __name__ == "__main__":
#split_and_save_data('./calculate/property_rm_outliers.csv', seed=333)
#split_and_save_data('./calculate/property.csv', seed=333)
generate_graph_cache(poscar_path='./poscar_size_influence/seed_333/400_big', save_path='./poscar_size_influence/seed_333/',
save_name='graph_cache_big_size')
|
py | 1a461f80efe5d3413ffa422e7ad00022cd621be0 | #!/usr/bin/python2.7
class Test:
def run(self, b):
self.a = 10 + b
return self.a
print "Hello World"
a = Test()
|
py | 1a4620efc7035ca22566f6c266a9d8adb740707d | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Maths Keras layers
~~~~~~~~~~~~~~~~~~
"""
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ipu.ops import math_ops as ipu_math_ops
class SerialDense(Layer):
"""Densely-connected NN layer where the dot operation is serialized to reduce
the size of this operation.
`Dense` implements the operation:
`output = activation(dot(input, kernel) + bias)`
where `activation` is the element-wise activation function
passed as the `activation` argument, `kernel` is a weights matrix
created by the layer, and `bias` is a bias vector created by the layer
(only applicable if `use_bias` is `True`).
Given the `input` tensor with shape `[..., m, k]` and `kernel` tensor with
shape `[k, n]`, the matrix multiplication can be serialized as follows:
* Along the `m` dimension of `input`, by setting `serialization_dimension` to
`input_columns`.
* Along the `k` dimension of `input` and `kernel` by setting
`serialization_dimension` to `input_rows_kernel_columns`.
* Along `n` dimension of `kernel`, by setting `serialization_dimension` to
`kernel_rows`.
Example:
.. code-block:: python
# as first layer in a sequential model:
model = Sequential()
model.add(SerialDense(32, input_shape=(16,)))
# now the model will take as input arrays of shape (*, 16)
# and output arrays of shape (*, 32)
# after the first layer, you don't need to specify
# the size of the input anymore:
model.add(SerialDense(32))
Arguments:
units: Positive integer, dimensionality of the output space.
serialization_factor: An integer indicating the number of smaller matrix
multiplies this operation is broken up into. Must divide the dimension
along which the operation is serialized on.
serialization_dimension: A string, must be one of `input_columns`,
`input_rows_kernel_columns` or `kernel_rows`. Indicates the dimension
along which the operation is serialzed on.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation").
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
Input shape:
N-D tensor with shape: `(batch_size, ..., input_dim)`.
The most common situation would be
a 2D input with shape `(batch_size, input_dim)`.
Output shape:
N-D tensor with shape: `(batch_size, ..., units)`.
For instance, for a 2D input with shape `(batch_size, input_dim)`,
the output would have shape `(batch_size, units)`.
"""
def __init__(self,
units,
serialization_factor,
serialization_dimension,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super().__init__(
activity_regularizer=regularizers.get(activity_regularizer), **kwargs)
self.serialization_factor = int(serialization_factor)
self.serialization_dimension = serialization_dimension
self.units = int(units) if not isinstance(units, int) else units
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.supports_masking = True
self.input_spec = InputSpec(min_ndim=2)
def build(self, input_shape):
dtype = dtypes.as_dtype(self.dtype or K.floatx())
if not (dtype.is_floating or dtype.is_complex):
raise TypeError('Unable to build `SerialDense` layer with non-floating '
'point dtype %s' % (dtype,))
input_shape = tensor_shape.TensorShape(input_shape)
if tensor_shape.dimension_value(input_shape[-1]) is None:
raise ValueError('The last dimension of the inputs to `SerialDense` '
'should be defined. Found `None`.')
last_dim = tensor_shape.dimension_value(input_shape[-1])
self.input_spec = InputSpec(min_ndim=2, axes={-1: last_dim})
self.kernel = self.add_weight('kernel',
shape=[last_dim, self.units],
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
dtype=self.dtype,
trainable=True)
if self.use_bias:
self.bias = self.add_weight('bias',
shape=[
self.units,
],
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
dtype=self.dtype,
trainable=True)
else:
self.bias = None
self.built = True
def call(self, inputs, **kwargs):
"""
Args:
inputs: The tensor to apply the dense weights to.
Returns:
The tensor resulting from applying the dense weights.
"""
if K.is_sparse(inputs):
raise TypeError(
'Unable to build `SerialDense` layer with sparse inputs.')
if self.serialization_factor < 1:
raise ValueError(
'serialization_factor has to be at least 1, but was {}.'.format(
self.serialization_factor))
inputs = math_ops.cast(inputs, self._compute_dtype)
# Transform the dimension name.
serialization_dimension = self.serialization_dimension
if serialization_dimension == "input_columns":
serialization_dimension = "a_columns"
elif serialization_dimension == "input_rows_kernel_columns":
serialization_dimension = "a_rows_b_columns"
elif serialization_dimension == "kernel_rows":
serialization_dimension = "b_rows"
else:
raise ValueError('Invalid serialization_dimension={}, expected one of: '
'\'input_columns\', \'input_rows_kernel_columns\', '
'\'kernel_rows\'.'.format(serialization_dimension))
outputs = ipu_math_ops.serialized_matmul(inputs, self.kernel,
self.serialization_factor,
serialization_dimension)
if self.use_bias:
outputs = nn.bias_add(outputs, self.bias)
if self.activation is not None:
return self.activation(outputs) # pylint: disable=not-callable
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_shape = input_shape.with_rank_at_least(2)
if tensor_shape.dimension_value(input_shape[-1]) is None:
raise ValueError(
'The innermost dimension of input_shape must be defined, but saw: %s'
% input_shape)
return input_shape[:-1].concatenate(self.units)
def get_config(self):
config = {
'units': self.units,
'serialization_factor': self.serialization_factor,
'serialization_dimension': self.serialization_dimension,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))
|
py | 1a4621441935a3d8b85669c9ccb162d22686c90a | import copy
from django.conf import settings
from django.contrib import messages
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import redirect
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views.decorators.http import require_GET
from django.views.generic import View
from memoized import memoized
from corehq.apps.accounting.decorators import always_allow_project_access
from corehq.apps.ota.rate_limiter import restore_rate_limiter
from dimagi.utils.web import get_ip, json_request, json_response
from corehq import feature_previews, privileges, toggles
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.domain.calculations import (
CALC_FNS,
CALC_ORDER,
CALCS,
dom_calc,
)
from corehq.apps.domain.decorators import (
domain_admin_required,
login_and_domain_required,
login_required,
require_superuser,
)
from corehq.apps.domain.forms import DomainInternalForm, TransferDomainForm
from corehq.apps.domain.models import Domain, TransferDomainRequest
from corehq.apps.domain.views.settings import (
BaseAdminProjectSettingsView,
BaseProjectSettingsView,
)
from corehq.apps.hqwebapp.decorators import use_jquery_ui, use_multiselect
from corehq.apps.hqwebapp.tasks import send_html_email_async, send_mail_async
from corehq.apps.hqwebapp.views import BasePageView
from corehq.apps.receiverwrapper.rate_limiter import submission_rate_limiter
from corehq.apps.toggle_ui.views import ToggleEditView
from corehq.apps.users.models import CouchUser
class BaseInternalDomainSettingsView(BaseProjectSettingsView):
strict_domain_fetching = True
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
def main_context(self):
context = super(BaseInternalDomainSettingsView, self).main_context
context.update({
'project': self.domain_object,
})
return context
@property
def page_name(self):
return mark_safe("%s <small>Internal</small>" % self.page_title)
class EditInternalDomainInfoView(BaseInternalDomainSettingsView):
urlname = 'domain_internal_settings'
page_title = ugettext_lazy("Project Information")
template_name = 'domain/internal_settings.html'
strict_domain_fetching = True
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
@use_jquery_ui # datepicker
@use_multiselect
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
@memoized
def internal_settings_form(self):
can_edit_eula = toggles.CAN_EDIT_EULA.enabled(self.request.couch_user.username)
if self.request.method == 'POST':
return DomainInternalForm(self.request.domain, can_edit_eula, self.request.POST)
initial = {
'countries': self.domain_object.deployment.countries,
'is_test': self.domain_object.is_test,
'use_custom_auto_case_update_hour': 'Y' if self.domain_object.auto_case_update_hour else 'N',
'auto_case_update_hour': self.domain_object.auto_case_update_hour,
'use_custom_auto_case_update_limit': 'Y' if self.domain_object.auto_case_update_limit else 'N',
'auto_case_update_limit': self.domain_object.auto_case_update_limit,
'use_custom_odata_feed_limit': 'Y' if self.domain_object.odata_feed_limit else 'N',
'odata_feed_limit': self.domain_object.odata_feed_limit,
'granted_messaging_access': self.domain_object.granted_messaging_access,
}
internal_attrs = [
'sf_contract_id',
'sf_account_id',
'initiative',
'self_started',
'area',
'sub_area',
'organization_name',
'notes',
'phone_model',
'commtrack_domain',
'performance_threshold',
'experienced_threshold',
'amplifies_workers',
'amplifies_project',
'data_access_threshold',
'business_unit',
'workshop_region',
'partner_technical_competency',
'support_prioritization',
'gs_continued_involvement',
'technical_complexity',
'app_design_comments',
'training_materials',
'partner_comments',
'partner_contact',
'dimagi_contact',
]
if can_edit_eula:
internal_attrs += [
'custom_eula',
'can_use_data',
]
for attr in internal_attrs:
val = getattr(self.domain_object.internal, attr)
if isinstance(val, bool):
val = 'true' if val else 'false'
initial[attr] = val
return DomainInternalForm(self.request.domain, can_edit_eula, initial=initial)
@property
def page_context(self):
return {
'project': self.domain_object,
'form': self.internal_settings_form,
'areas': dict([(a["name"], a["sub_areas"]) for a in settings.INTERNAL_DATA["area"]]),
}
def send_handoff_email(self):
partner_contact = self.internal_settings_form.cleaned_data['partner_contact']
dimagi_contact = self.internal_settings_form.cleaned_data['dimagi_contact']
recipients = [partner_contact, dimagi_contact]
params = {'contact_name': CouchUser.get_by_username(dimagi_contact).human_friendly_name}
send_html_email_async.delay(
subject="Project Support Transition",
recipient=recipients,
html_content=render_to_string(
"domain/email/support_handoff.html", params),
text_content=render_to_string(
"domain/email/support_handoff.txt", params),
email_from=settings.SUPPORT_EMAIL,
)
messages.success(self.request,
_("Sent hand-off email to {}.").format(" and ".join(recipients)))
def post(self, request, *args, **kwargs):
if self.internal_settings_form.is_valid():
old_attrs = copy.copy(self.domain_object.internal)
self.internal_settings_form.save(self.domain_object)
eula_props_changed = (bool(old_attrs.custom_eula) != bool(self.domain_object.internal.custom_eula) or
bool(old_attrs.can_use_data) != bool(self.domain_object.internal.can_use_data))
if eula_props_changed and settings.EULA_CHANGE_EMAIL:
message = '\n'.join([
'{user} changed either the EULA or data sharing properties for domain {domain}.',
'',
'The properties changed were:',
'- Custom eula: {eula_old} --> {eula_new}',
'- Can use data: {can_use_data_old} --> {can_use_data_new}'
]).format(
user=self.request.couch_user.username,
domain=self.domain,
eula_old=old_attrs.custom_eula,
eula_new=self.domain_object.internal.custom_eula,
can_use_data_old=old_attrs.can_use_data,
can_use_data_new=self.domain_object.internal.can_use_data,
)
send_mail_async.delay(
'Custom EULA or data use flags changed for {}'.format(self.domain),
message, settings.DEFAULT_FROM_EMAIL, [settings.EULA_CHANGE_EMAIL]
)
messages.success(request,
_("The internal information for project %s was successfully updated!") % self.domain)
if self.internal_settings_form.cleaned_data['send_handoff_email']:
self.send_handoff_email()
return redirect(self.urlname, self.domain)
else:
messages.error(request, _(
"Your settings are not valid, see below for errors. Correct them and try again!"))
return self.get(request, *args, **kwargs)
class EditInternalCalculationsView(BaseInternalDomainSettingsView):
urlname = 'domain_internal_calculations'
page_title = ugettext_lazy("Calculated Properties")
template_name = 'domain/internal_calculations.html'
@method_decorator(always_allow_project_access)
@method_decorator(login_and_domain_required)
@method_decorator(require_superuser)
def dispatch(self, request, *args, **kwargs):
return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
return {
'calcs': CALCS,
'order': CALC_ORDER,
}
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(require_superuser, name='dispatch')
class FlagsAndPrivilegesView(BaseAdminProjectSettingsView):
urlname = 'feature_flags_and_privileges'
page_title = ugettext_lazy("Feature Flags and Privileges")
template_name = 'domain/admin/flags_and_privileges.html'
def _get_toggles(self):
def _sort_key(toggle):
return (not (toggle['domain_enabled'] or toggle['user_enabled']),
toggle['tag_index'],
toggle['label'])
unsorted_toggles = [{
'slug': toggle.slug,
'label': toggle.label,
'description': toggle.description,
'help_link': toggle.help_link,
'tag': toggle.tag.name,
'tag_index': toggle.tag.index,
'tag_description': toggle.tag.description,
'tag_css_class': toggle.tag.css_class,
'has_domain_namespace': toggles.NAMESPACE_DOMAIN in toggle.namespaces,
'domain_enabled': toggle.enabled(self.domain, namespace=toggles.NAMESPACE_DOMAIN),
'user_enabled': toggle.enabled(self.request.couch_user.username,
namespace=toggles.NAMESPACE_USER),
} for toggle in toggles.all_toggles()]
return sorted(unsorted_toggles, key=_sort_key)
def _get_privileges(self):
return sorted([
(privileges.Titles.get_name_from_privilege(privilege),
domain_has_privilege(self.domain, privilege))
for privilege in privileges.MAX_PRIVILEGES
], key=lambda name_has: (not name_has[1], name_has[0]))
@property
def page_context(self):
return {
'toggles': self._get_toggles(),
'use_sql_backend': self.domain_object.use_sql_backend,
'privileges': self._get_privileges(),
}
@method_decorator(always_allow_project_access, name='dispatch')
@method_decorator(require_superuser, name='dispatch')
class ProjectLimitsView(BaseAdminProjectSettingsView):
urlname = 'internal_project_limits_summary'
page_title = ugettext_lazy("Project Limits")
template_name = 'domain/admin/project_limits.html'
@property
def page_context(self):
return get_project_limits_context([
('Submission Rate Limits', submission_rate_limiter),
('Restore Rate Limits', restore_rate_limiter),
], self.domain)
def get_project_limits_context(name_limiter_tuple_list, scope=None):
return {
'project_limits': [
(name, _get_rate_limits(scope, rate_limiter))
for (name, rate_limiter) in name_limiter_tuple_list
]
}
def _get_rate_limits(scope, rate_limiter):
return [
{'key': key, 'current_usage': int(current_usage), 'limit': int(limit),
'percent_usage': round(100 * current_usage / limit, 1)}
for key, current_usage, limit in rate_limiter.iter_rates(scope)
]
class TransferDomainView(BaseAdminProjectSettingsView):
urlname = 'transfer_domain_view'
page_title = ugettext_lazy("Transfer Project")
template_name = 'domain/admin/transfer_domain.html'
@property
@memoized
def active_transfer(self):
return TransferDomainRequest.get_active_transfer(self.domain,
self.request.user.username)
@property
@memoized
def transfer_domain_form(self):
return TransferDomainForm(self.domain,
self.request.user.username,
self.request.POST or None)
def get(self, request, *args, **kwargs):
if self.active_transfer:
self.template_name = 'domain/admin/transfer_domain_pending.html'
if request.GET.get('resend', None):
self.active_transfer.send_transfer_request()
messages.info(request,
_("Resent transfer request for project '{domain}'").format(domain=self.domain))
return super(TransferDomainView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
form = self.transfer_domain_form
if form.is_valid():
# Initiate domain transfer
transfer = form.save()
transfer.send_transfer_request()
return HttpResponseRedirect(self.page_url)
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
@property
def page_context(self):
if self.active_transfer:
return {'transfer': self.active_transfer.as_dict()}
else:
return {'form': self.transfer_domain_form}
@method_decorator(domain_admin_required)
def dispatch(self, request, *args, **kwargs):
if not toggles.TRANSFER_DOMAIN.enabled(request.domain):
raise Http404()
return super(TransferDomainView, self).dispatch(request, *args, **kwargs)
class ActivateTransferDomainView(BasePageView):
urlname = 'activate_transfer_domain'
page_title = 'Activate Domain Transfer'
template_name = 'domain/activate_transfer_domain.html'
@property
@memoized
def active_transfer(self):
return TransferDomainRequest.get_by_guid(self.guid)
@property
def page_context(self):
if self.active_transfer:
return {'transfer': self.active_transfer.as_dict()}
else:
return {}
@property
def page_url(self):
return self.request.get_full_path()
def get(self, request, guid, *args, **kwargs):
self.guid = guid
if (self.active_transfer and
self.active_transfer.to_username != request.user.username and
not request.user.is_superuser):
return HttpResponseRedirect(reverse("no_permissions"))
return super(ActivateTransferDomainView, self).get(request, *args, **kwargs)
def post(self, request, guid, *args, **kwargs):
self.guid = guid
if not self.active_transfer:
raise Http404()
if self.active_transfer.to_username != request.user.username and not request.user.is_superuser:
return HttpResponseRedirect(reverse("no_permissions"))
self.active_transfer.transfer_domain(ip=get_ip(request))
messages.success(request, _("Successfully transferred ownership of project '{domain}'")
.format(domain=self.active_transfer.domain))
return HttpResponseRedirect(reverse('dashboard_default', args=[self.active_transfer.domain]))
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ActivateTransferDomainView, self).dispatch(*args, **kwargs)
class DeactivateTransferDomainView(View):
def post(self, request, guid, *args, **kwargs):
transfer = TransferDomainRequest.get_by_guid(guid)
if not transfer:
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
if (transfer.to_username != request.user.username and
transfer.from_username != request.user.username and
not request.user.is_superuser):
return HttpResponseRedirect(reverse("no_permissions"))
transfer.active = False
transfer.save()
referer = request.META.get('HTTP_REFERER', '/')
# Do not want to send them back to the activate page
if referer.endswith(reverse('activate_transfer_domain', args=[guid])):
messages.info(request,
_("Declined ownership of project '{domain}'").format(domain=transfer.domain))
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect(referer)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DeactivateTransferDomainView, self).dispatch(*args, **kwargs)
@login_and_domain_required
@require_superuser
@require_GET
def toggle_diff(request, domain):
params = json_request(request.GET)
other_domain = params.get('domain')
diff = []
if Domain.get_by_name(other_domain):
diff = [{
'slug': t.slug,
'label': t.label,
'url': reverse(ToggleEditView.urlname, args=[t.slug]),
'tag_name': _('Preview'),
'tag_css_class': 'default',
'tag_index': -1,
} for t in feature_previews.all_previews() if _can_copy_toggle(t, request.domain, other_domain)]
diff.extend([{
'slug': t.slug,
'label': t.label,
'url': reverse(ToggleEditView.urlname, args=[t.slug]),
'tag_name': t.tag.name,
'tag_css_class': t.tag.css_class,
'tag_index': t.tag.index,
} for t in toggles.all_toggles() if _can_copy_toggle(t, request.domain, other_domain)])
diff.sort(key=lambda x: (x['tag_index'], x['label']))
return json_response(diff)
def _can_copy_toggle(toggle, domain, other_domain):
return (
toggle.enabled(domain, toggles.NAMESPACE_DOMAIN)
and not toggle.enabled(other_domain, toggles.NAMESPACE_DOMAIN)
)
@login_and_domain_required
@require_superuser
def calculated_properties(request, domain):
calc_tag = request.GET.get("calc_tag", '').split('--')
extra_arg = calc_tag[1] if len(calc_tag) > 1 else ''
calc_tag = calc_tag[0]
if not calc_tag or calc_tag not in list(CALC_FNS):
data = {"error": 'This tag does not exist'}
else:
data = {"value": dom_calc(calc_tag, domain, extra_arg)}
return json_response(data)
|
py | 1a462179a1b8413f6354041dd416048f0c09b1ef | class ServiceException(Exception):
pass
def assert_(expr, msg):
try:
assert expr
except AssertionError:
raise ServiceException(msg)
|
py | 1a462212bd6e2d83148d8f8b66b5c062c610f36d | DEFAULT_HEADER = \
'''
/*
* This file is generated automatically. Modifications of this file will be overwritten.
* Generated with n5a - https://github.com/sschaetz/n5a
*/
#include <cereal/cereal.hpp>
'''
def _generate_save_load(obj, signature, nl, tab):
# Generates save load methods.
s = '{nl}{tab}template <class Archive>{nl}'.format(nl=nl, tab=tab)
s += '{tab}{signature}{nl}{tab}{{{nl}'.format(
signature=signature, nl=nl, tab=tab
)
# Prevent unused warning by compiler.
s +='{tab}{tab}(void)(version);{nl}'.format(nl=nl, tab=tab)
for member in obj.members:
s += '{tab}{tab}ar(::cereal::make_nvp("{name}", {name}));{nl}'.format(
name=member.name,
nl=nl,
tab=tab
)
s += '{tab}}}{nl}'.format(nl=nl, tab=tab)
return s
def generate(obj, header=DEFAULT_HEADER, footer=None, nl='\n', tab='\t'):
# Generates a class definition.
s = ''
if header is not None:
s+=header
s += '{nl}struct {name}{nl}{{{nl}'.format(name=obj.name, nl=nl)
# Generate members in struct.
for member in obj.members:
s += '{tab}{n5atype} {name};{nl}'.format(
n5atype=member.n5atype.cpptype,
name=member.name,
nl=nl,
tab=tab
)
# Generate save method.
s += _generate_save_load(
obj,
'void save(Archive& ar, std::uint32_t const version) const',
nl,
tab
)
# Generate load method.
s += _generate_save_load(
obj,
'void load(Archive& ar, std::uint32_t const version)',
nl,
tab
)
s += '}};{0}'.format(nl)
if footer is not None:
s += footer
return s
|
py | 1a4622f2ca8286686d9c8e35e065068b6256adac | from .triangle_metric import *
from .triangle_condition_metric import *
class TriangleShapeMetric(TriangleMetric):
def __init__(self):
super(TriangleShapeMetric, self).__init__(
name='Triangle Shape',
dimension='1',
acceptable_range=Range(min=0.25, max=1),
normal_range=Range(min=0, max=1),
full_range=Range(min=0, max=1),
q_for_unit=1,
)
def eval(self, P, T):
q = TriangleConditionMetric().eval(P, T)
return torch.reciprocal(q)
|
py | 1a46232e4fbc3c355d27a325613b1e4fe6150acf | from lambdaHandlers.alexa_handler import alexa_lambda_handler
from lambdaHandlers.web_handler import web_lambda_handler
|
py | 1a462389096eea74c7e652361be02cbc7fbe661c | from flask import Flask, Response, request
import requests
import random
app = Flask(__name__)
@app.route('/chance', methods=['GET'])
def chance():
# Gets a shot
shot_response = requests.get("http://service-2:5001/shooter")
shot = (shot_response.text)
# Gets the dive
dive_response = requests.get("http://service-3:5002/goalie")
dive = (dive_response.text)
# Gets shot_dive
shot_dive = shot + "-" + dive
chance = None
if shot_dive == "Left-Left":
chance = "90%"
elif shot_dive == "Left-Middle":
chance = "100%"
elif shot_dive == "Left-Right":
chance = "44%"
elif shot_dive == "Middle-Left":
chance = "81%"
elif shot_dive == "Middle-Middle":
chance = "0%"
elif shot_dive == "Middle-Right":
chance = "89%"
elif shot_dive == "Right-Left":
chance = "63%"
elif shot_dive == "Right-Middle":
chance = "100%"
elif shot_dive == "Right-Right":
chance = "94%"
return Response(chance, mimetype="text/plain")
|
py | 1a4623ec2294a93b98675b691ec8acd4ecff96fe | # -*- coding: utf-8 -*-
from maya import mel
from maya import cmds
from . import lang
from . import common
import os
import json
import re
class WeightCopyPaste():
def main(self, skinMeshes, mode='copy', saveName='default', method='index', weightFile='auto',
threshold=0.2, engine='maya', tgt=1, path='default', viewmsg=False):
if viewmsg:
cmds.inViewMessage( amg='<hl>Simple Weight</hl> : '+mode, pos='midCenterTop', fade=True, ta=0.75, a=0.5)
'''
ウェイトデータの保存、読み込み関数
mode→コピーするかペーストするか'copy'or'paste'
saveName→ウェイトデータの保存フォルダ名。ツール、モデル名とかで分けたい場合に指定
method→ペーストの仕方,「index」、「nearest」、「barycentric」、「over」
「index」法は、頂点インデックスを使用してウェイトをオブジェクトにマッピングします。マッピング先のオブジェクトと書き出し後のデータのトポロジが同じ場合、これが最も便利な手法です。
「nearest」法は、読み込んだデータのニアレスト頂点を検索し、ウェイト値をその値に設定します。これは、高解像度メッシュを低解像度メッシュにマッピングする場合に最適です。
「barycentric」法はポリゴン メッシュでのみサポートされます。ターゲット ジオメトリのニアレスト三角を検索し、
ソース ポイントと頂点の距離に応じてウェイトを再スケールします。これは通常、高解像度メッシュにマッピングされる粗いメッシュで使用されます。
「over」法は「index」法に似ていますが、マッピング前に対象メッシュのウェイトがクリアされないため、一致していないインデックスのウェイトがそのまま維持されます。
nearest と barycentricは不具合のため現状仕様不可能(処理が終わらない)2016/11/03現在
→barycentric、bylinearはMaya2016Extention2から利用可能
weightFile→メッシュ名検索でなく手動指定したい場合にパスを指定。methodのnearest、barycentricとセットで使う感じ。
→Mayaコピー時にファイル名指定すると複数保存できないので注意。
threshold→nearest,barycentricの位置検索範囲
'''
self.skinMeshes = skinMeshes
self.saveName = saveName
self.method = method
self.weightFile = weightFile
self.threshold = threshold
self.engine = engine
self.memShapes = {}
self.target = tgt
self.pasteMode = {'index':1, 'nearest':3}
# リストタイプじゃなかったらリストに変換する
if not isinstance(self.skinMeshes, list):
temp = self.skinMeshes
self.skinMeshes = []
self.skinMeshes.append(temp)
# ファイルパスを生成しておく
if path == 'default':
self.filePath = os.getenv('MAYA_APP_DIR') + os.sep +'Scripting_Files'+ os.sep + 'weight' + os.sep + self.saveName
elif path == 'project':
self.scene_path = os.sep.join(cmds.file(q=True, sceneName=True).split(os.sep)[:-1])
self.protect_path = os.path.join(self.scene_path, 'weight_protector')
try:
if not os.path.exists(self.protect_path):
os.makedirs(self.protect_path)
except Exception as e:
print(e.message)
return
self.filePath = self.protect_pat+os.sep + self.saveName
self.fileName = os.path.join(self.filePath, self.saveName + '.json')
self.apiName = os.path.join(self.filePath, self.saveName + '.skn')
# コピーかペーストをそれぞれ呼び出し
if mode == 'copy':
self.weightCopy()
if mode == 'paste':
self.weightPaste()
def weightPaste(self):
dummy = cmds.spaceLocator()
for skinMesh in self.skinMeshes:
# 読みに行くセーブファイル名を指定、autoならメッシュ名
if self.weightFile == 'auto':
weightFile = skinMesh
else:
weightFile = self.weightFile
dstSkinCluster = cmds.ls(cmds.listHistory(skinMesh), type='skinCluster')
# スキンクラスタがない場合はあらかじめ取得しておいた情報をもとにバインドする
if not dstSkinCluster:
meshName = str(weightFile).replace('|', '__pipe__')
if os.path.exists(self.fileName):
try:
with open(self.fileName, 'r') as f: # ファイル開く'r'読み込みモード'w'書き込みモード
saveData = json.load(f) # ロード
# self.visibility = saveData['visibility']#セーブデータ読み込み
skinningMethod = saveData[';skinningMethod']
dropoffRate = saveData[';dropoffRate']
maintainMaxInfluences = saveData[';maintainMaxInfluences']
maxInfluences = saveData[';maxInfluences']
bindMethod = saveData[';bindMethod']
normalizeWeights = saveData[';normalizeWeights']
influences = saveData[';influences']
# 子のノードがトランスフォームならダミーに親子付けして退避
common.TemporaryReparent().main(skinMesh, dummyParent=dummy, mode='cut')
influences = cmds.ls(influences, l=True, tr=True)
# バインド
dstSkinCluster = cmds.skinCluster(
skinMesh,
influences,
omi=maintainMaxInfluences,
mi=maxInfluences,
dr=dropoffRate,
sm=skinningMethod,
nw=normalizeWeights,
tsb=True,
)
dstSkinCluster = dstSkinCluster[0]
# 親子付けを戻す
common.TemporaryReparent().main(skinMesh, dummyParent=dummy, mode='parent')
tempSkinNode = skinMesh#親を取得するためスキンクラスタのあるノードを保存しておく
except Exception as e:
print(e.message)
print('Error !! Skin bind failed : ' + skinMesh)
continue
else:
dstSkinCluster = dstSkinCluster[0]
tempSkinNode = skinMesh#親を取得するためスキンクラスタのあるノードを保存しておく
if self.engine == 'maya':
files = os.listdir(self.filePath)
print(files)
if len(files) == 2:
for file in files:
name, ext = os.path.splitext(file)
if ext == '.xml':
xml_name = file
else:
# Pipeはファイル名に出来ないので変換しておく
meshName = str(weightFile).replace('|', '__pipe__')
# コロンはファイル名に出来ないので変換しておく
meshName = str(meshName).replace(':', '__colon__')
xml_name = meshName + '.xml'
if os.path.isfile(self.filePath + os.sep + xml_name):
if self.method == 'index' or self.method == 'over':
cmds.deformerWeights(xml_name,
im=True,
method=self.method,
deformer=dstSkinCluster,
path=self.filePath + os.sep)
else:
cmds.deformerWeights(xml_name,
im=True,
deformer=dstSkinCluster,
method=self.method,
worldSpace=True,
positionTolerance=self.threshold,
path=self.filePath + os.sep)
cmds.skinCluster(dstSkinCluster, e=True, forceNormalizeWeights=True)
print('Weight paste to : ' + str(skinMesh))
else:
print('Not exist seved weight XML file : ' + skinMesh)
# ダミー親削除
cmds.delete(dummy)
cmds.select(self.skinMeshes, r=True)
# ウェイト情報を保存する関数
def weightCopy(self):
saveData = {}
# 保存ディレクトリが無かったら作成
if not os.path.exists(self.filePath):
os.makedirs(os.path.dirname(self.filePath + os.sep)) # 末尾\\が必要なので注意
else: # ある場合は中身を削除
files = os.listdir(self.filePath)
if files is not None:
for file in files:
os.remove(self.filePath + os.sep + file)
skinFlag = False
all_influences = []
for skinMesh in self.skinMeshes:
try:
cmds.bakePartialHistory(skinMesh, ppt=True)
except:
pass
# ノードの中からスキンクラスタを取得してくる#inMesh直上がSkinClusterとは限らないので修正
srcSkinCluster = cmds.ls(cmds.listHistory(skinMesh), type='skinCluster')
if not srcSkinCluster:
continue # スキンクラスタがなかったら次に移行
tempSkinNode = skinMesh#親を取得するためスキンクラスタのあるノードを保存しておく
# スキンクラスタのパラメータ色々を取得しておく
srcSkinCluster = srcSkinCluster[0]
skinningMethod = cmds.getAttr(srcSkinCluster + ' .skm')
dropoffRate = cmds.getAttr(srcSkinCluster + ' .dr')
maintainMaxInfluences = cmds.getAttr(srcSkinCluster + ' .mmi')
maxInfluences = cmds.getAttr(srcSkinCluster + ' .mi')
bindMethod = cmds.getAttr(srcSkinCluster + ' .bm')
normalizeWeights = cmds.getAttr(srcSkinCluster + ' .nw')
influences = cmds.skinCluster(srcSkinCluster, q=True, inf=True)
saveData[';skinningMethod'] = skinningMethod
saveData[';dropoffRate'] = dropoffRate
saveData[';maintainMaxInfluences'] = maintainMaxInfluences
saveData[';maxInfluences'] = maxInfluences
saveData[';bindMethod'] = bindMethod
saveData[';normalizeWeights'] = normalizeWeights
all_influences += influences
#saveData[';influences'] = influences
skinFlag = True
all_influences = list(set(all_influences))
saveData[';influences'] = all_influences
#インフルエンス数の変化に耐えられるようにあらかじめAddしてからコピーするS
for skinMesh in self.skinMeshes:
srcSkinCluster = cmds.ls(cmds.listHistory(skinMesh), type='skinCluster')
if not srcSkinCluster:
continue # スキンクラスタがなかったらfor分の次に移行
srcSkinCluster = srcSkinCluster[0]
influences = cmds.skinCluster(srcSkinCluster, q=True, inf=True)
sub_influences = list(set(all_influences) - set(influences))
if sub_influences:
cmds.skinCluster(skinMesh, e=True, ai=sub_influences, lw=True, ug=True, wt=0, ps=0)
if self.engine == 'maya':
# 読みに行くセーブファイル名を指定、autoならメッシュ名
if self.weightFile == 'auto':
weightFile = skinMesh
else:
weightFile = self.weightFile
# Pipeはファイル名に出来ないので変換しておく
meshName = str(weightFile).replace('|', '__pipe__')
# コロンはファイル名に出来ないので変換しておく
meshName = str(meshName).replace(':', '__colon__')
cmds.deformerWeights(meshName + '.xml', export=True, deformer=srcSkinCluster, path=self.filePath + os.sep)
with open(self.fileName, 'w') as f: # ファイル開く'r'読み込みモード'w'書き込みモード
json.dump(saveData, f)
def transfer_weight(skinMesh, transferedMesh, transferWeight=True, returnInfluences=False, logTransfer=True):
'''
スキンウェイトの転送関数
転送先がバインドされていないオブジェクトの場合は転送元のバインド情報を元に自動バインド
・引数
skinMesh→転送元メッシュ(1個,リスト形式でも可)
transferedMesh(リスト形式,複数可、リストじゃなくても大丈夫)
transferWeight→ウェイトを転送するかどうか。省略可能、デフォルトはTrue
logTransfer→ログ表示するかどうか
returnInfluences→バインドされているインフルエンス情報を戻り値として返すかどうか。省略可能、デフォルトはFalse
'''
massege01 = lang.Lang(
en=': It does not perform the transfer of weight because it is not a skin mesh.',
ja=u': スキンメッシュではないのでウェイトの転送を行いません'
).output()
massege02 = lang.Lang(
en='Transfer the weight:',
ja=u'ウェイトを転送:'
).output()
massege03 = lang.Lang(
en='Transfer bind influences:',
ja=u'バインド状態を転送:'
).output()
if isinstance(skinMesh, list): # 転送元がリストだった場合、最初のメッシュのみ取り出す
skinMesh = skinMesh[0] # リストを渡されたときのための保険
# ノードの中からスキンクラスタを取得してくる#inMesh直上がSkinClusterとは限らないので修正
srcSkinCluster = cmds.ls(cmds.listHistory(skinMesh), type='skinCluster')
# srcSkinCluster = cmds.listConnections(skinMesh+'.inMesh', s=True, d=False)
if not srcSkinCluster:
if logTransfer:
print(skinMesh + massege01)
return False # スキンクラスタがなかったら関数抜ける
# スキンクラスタのパラメータ色々を取得しておく
srcSkinCluster = srcSkinCluster[0]
skinningMethod = cmds.getAttr(srcSkinCluster + ' .skm')
dropoffRate = cmds.getAttr(srcSkinCluster + ' .dr')
maintainMaxInfluences = cmds.getAttr(srcSkinCluster + ' .mmi')
maxInfluences = cmds.getAttr(srcSkinCluster + ' .mi')
bindMethod = cmds.getAttr(srcSkinCluster + ' .bm')
normalizeWeights = cmds.getAttr(srcSkinCluster + ' .nw')
influences = cmds.skinCluster(srcSkinCluster, q=True, inf=True) # qフラグは照会モード、ちなみにeは編集モード
# リストタイプじゃなかったらリストに変換する
if not isinstance(transferedMesh, list):
temp = transferedMesh
transferedMesh = []
transferedMesh.append(temp)
for dst in transferedMesh:
#子供のノード退避用ダミーペアレントを用意
dummy = common.TemporaryReparent().main(mode='create')
common.TemporaryReparent().main(dst,dummyParent=dummy, mode='cut')
shapes = cmds.listRelatives(dst, s=True, pa=True, type='mesh')
if not shapes: # もしメッシュがなかったら
continue # 処理を中断して次のオブジェクトへ
# スキンクラスタの有無を取得
dstSkinCluster = cmds.ls(cmds.listHistory(shapes[0]), type='skinCluster')
# スキンクラスタがない場合はあらかじめ取得しておいた情報をもとにバインドする
if not dstSkinCluster:
# バインド
dstSkinCluster = cmds.skinCluster(
dst,
influences,
omi=maintainMaxInfluences,
mi=maxInfluences,
dr=dropoffRate,
sm=skinningMethod,
nw=normalizeWeights,
tsb=True,
)
if logTransfer:
print(massege03 + '[' + skinMesh + '] >>> [' + dst + ']')
dstSkinCluster = dstSkinCluster[0]
if transferWeight:
cmds.copySkinWeights(
ss=srcSkinCluster,
ds=dstSkinCluster,
surfaceAssociation='closestPoint',
influenceAssociation=['name', 'closestJoint', 'oneToOne'],
normalize=True,
noMirror=True
)
if logTransfer:
print(massege02 + '[' + skinMesh + '] >>> [' + dst + ']')
#親子付けを戻す
common.TemporaryReparent().main(dst,dummyParent=dummy, mode='parent')
#ダミーペアレントを削除
common.TemporaryReparent().main(dummyParent=dummy, mode='delete')
if returnInfluences:
return influences
else:
return True
def symmetry_weight(srcNode=None, dstNode=None, symWeight=True):
'''
ウェイトシンメトリする関数
srcNode→反転元
dstNode→反転先
symWeight→ウェイトミラーするかどうか
'''
# スキンクラスタを取得
if srcNode is None:
return
srcShapes = cmds.listRelatives(srcNode, s=True, pa=True, type='mesh')
if srcShapes:
srcSkinCluster = cmds.ls(cmds.listHistory(srcNode), type='skinCluster')
# スキンクラスタがあったらジョイントラベルを設定してウェイトミラー
if srcSkinCluster:
# バインド状態を転送する関数呼び出し
skinJointAll = cmds.skinCluster(srcSkinCluster, q=True, inf=True) #ジョイントを取得
for skinJoint in skinJointAll:
# ジョイントラベル設定関数呼び出し
joint_label(skinJoint, visibility=False)
if symWeight is False or dstNode is None:
return
transfer_weight(srcNode, dstNode, transferWeight=False, returnInfluences=True)
dstShapes = cmds.listRelatives(dstNode, s=True, pa=True, type='mesh')
dstSkinCluster = cmds.listConnections(dstShapes[0] + '.inMesh', s=True, d=False)
cmds.copySkinWeights(ss=srcSkinCluster[0], ds=dstSkinCluster[0],
mirrorMode='YZ', surfaceAssociation='closestComponent',
influenceAssociation='label', normalize=True)
def load_joint_label_rules():
#ロードできなかった時の初期値
start_l_list = ['L_', 'l_', 'Left_', 'left_']
start_r_list = ['R_', 'r_', 'Right_', 'right_']
mid_l_list = ['_L_', '_l_', '_Left_', '_left_']
mid_r_list = ['_R_', '_r_', '_Right_', '_right_']
end_l_list = ['_L', '_l', '_L.', '_l.', '_L..', '_l..', '_Left', '_left']
end_r_list = ['_R', '_r', '_R.', '_r.', '_R..', '_r..', '_Right', '_right']
def_left_list_list = [start_l_list, mid_l_list, end_l_list]
def_right_list_list = [start_r_list, mid_r_list, end_r_list]
#左右対称設定ファイルからルールをロードする
dir_path = os.path.join(
os.getenv('MAYA_APP_DIR'),
'Scripting_Files')
start_file = dir_path+os.sep+'joint_rule_start.json'
middle_file = dir_path+os.sep+'joint_rule_middle.json'
end_file = dir_path+os.sep+'joint_rule_end.json'
save_files = [start_file, middle_file, end_file]
left_list_list = []
right_list_list = []
for i, save_file in enumerate(save_files):
if os.path.exists(save_file):#保存ファイルが存在したら
try:
with open(save_file, 'r') as f:
save_data = json.load(f)
l_list = save_data.keys()
r_list = save_data.values()
left_list_list.append(l_list)
right_list_list.append(r_list)
except Exception as e:
print(e.message)
left_list_list.append(def_left_list_list[i])
right_list_list.append(def_right_list_list[i])
else:
left_list_list.append(def_left_list_list[i])
right_list_list.append(def_right_list_list[i])
return left_list_list, right_list_list
def joint_label(object, visibility=False):
'''
ジョイントラベル設定関数
object→オブジェクト、リスト形式可
visibility→ラベルの可視性、省略可能。デフォルトFalse。
'''
#ラベリングルールをロードしておく
left_list_list, right_list_list = load_joint_label_rules()
# リストタイプじゃなかったらリストに変換する
if not isinstance(object, list):
temp = object
object = []
object.append(temp)
for skinJoint in object:
objTypeName = cmds.objectType(skinJoint)
if objTypeName == 'joint':
split_name = skinJoint.split('|')[-1]
# スケルトン名にLRが含まれているかどうかを判定
side = 0
side_name = ''
for i, (l_list, r_list) in enumerate(zip(left_list_list, right_list_list)):
for j, lr_list in enumerate([l_list, r_list]):
for k, lr in enumerate(lr_list):
if i == 0:
if re.match(lr, split_name):
side = j + 1
if i == 1:
if re.search(lr, split_name):
side = j + 1
if i == 2:
if re.match(lr[::-1], split_name[::-1]):
side = j + 1
if side:#対象が見つかってたら全部抜ける
side_name = lr
break
if side:
break
if side:
break
#print('joint setting :', split_name, side, side_name)
# 左右のラベルを設定、どちらでもないときは中央
cmds.setAttr(skinJoint + '.side', side)
# ラベルタイプを”その他”に設定
cmds.setAttr(skinJoint + '.type', 18)
new_joint_name = split_name.replace(side_name.replace('.', ''), '')
# スケルトン名設定
cmds.setAttr(skinJoint + '.otherType', new_joint_name, type='string')
# 可視性設定
cmds.setAttr(skinJoint + '.drawLabel', visibility)
else:
print(str(skinJoint) + ' : ' + str(objTypeName) + ' Skip Command')
#ウェイトのミュートをトグル
def toggle_mute_skinning():
msg01 = lang.Lang(
en='No mesh selection.\nWould you like to process all of mesh in this scene?.',
ja=u'選択メッシュがありません。\nシーン内のすべてのメッシュを処理しますか?').output()
msg02 = lang.Lang(en='Yes', ja=u'はい').output()
msg03 = lang.Lang(en='No', ja=u'いいえ').output()
msg04 = lang.Lang(
en='Skinning is disabled',
ja=u'スキニングは無効になりました') .output()
msg05 = lang.Lang(
en='Skinning is enabled',
ja=u'スキニングが有効になりました') .output()
cmds.selectMode(o=True)
objects = cmds.ls(sl=True, l=True)
ad_node = []
for node in objects:
children = cmds.ls(cmds.listRelatives(node, ad=True, f=True), type ='transform')
ad_node += [node]+children
#print(len(ad_node))
objects = set(ad_node)
#print(len(objects))
if not objects:
all_mesh = cmds.confirmDialog(m=msg01, t='', b= [msg02, msg03], db=msg02, cb=msg03, icn='question',ds=msg03)
if all_mesh == msg02:
objects = cmds.ls(type='transform')
if not objects:
return
mute_flag = 1
skin_list = []
for node in objects:
skin = cmds.ls(cmds.listHistory(node), type='skinCluster')
if not skin:
continue
skin_list.append(skin)
if cmds.getAttr(skin[0]+'.envelope') > 0:
mute_flag = 0
for skin in skin_list:
cmds.setAttr(skin[0]+'.envelope', mute_flag)
if mute_flag == 0:
cmds.confirmDialog(m=msg04)
if mute_flag == 1:
cmds.confirmDialog(m=msg05)
|
py | 1a4624a41d2cf5b9ec9d6b4f63454fd65a464e9d |
#Faça um programa que leia o ano de nascimento
#de um jovem e informe, de acordo com sua idade.
#Se ele ainda vai se alistar ao serviço militar
#se è a hora de se alistar
#Se ja passou do tempo do alistamento
#Seu programa tambem devera mostrar o tempo que
#falta ou se passou do prazo.
from datetime import date
anoNascimento=int(input("Digite o ano que voce nasceu com 4 digitos: "))
anoAtual=date.today().year
idade=anoAtual-anoNascimento
if idade>18:
saldo=idade-18
print("\nVoce tem {} anos em {}. Deve Procurar a Junta do Serviço Militar (JSM). Voce ja deveria ter se alistado ha {} ano(s).".format(idade,anoAtual,saldo))
ano=anoAtual-saldo
print("Voce deveria ter se alistado no ano de {}.".format(ano))
elif idade<18:
saldo=18-idade
print("\nVoce tem {} anos em {} e NAO deve se Alistar. Falta(m) {} ano(s) para o seu Alistamento.".format(idade,anoAtual,saldo))
ano=anoAtual+saldo
print("Voce deve se Alistar em {}.".format(ano))
else:
print("\nVoce tem {} anos em {}. E DEVE Fazer o Alistamento Militar ESTE ANO.".format(idade,anoAtual))
|
py | 1a46253ed9f5216789f067d671ce08dbd0f43cc0 | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import petstore_api
from petstore_api.model.danish_pig import DanishPig
class TestDanishPig(unittest.TestCase):
"""DanishPig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def test_DanishPig(self):
"""Test DanishPig"""
# FIXME: construct object with mandatory attributes with example values
# model = DanishPig() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
py | 1a4625b55cc7964aee126ab1d828aef4f455a561 | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from akg.utils import kernel_exec as utils
import numpy as np
from akg.ops.array import tile
from tests.common.tensorio import compare_tensor
from tests.common.base import get_rtol_atol
from tests.common.gen_random import random_gaussian
def tile_execute(shape, dtype, multiples, attrs):
if 'tuning' in attrs.keys():
t = attrs.get("tuning", False)
kernel_name = attrs.get("kernel_name", False)
mod = tile_compile(shape, dtype, multiples, attrs, kernel_name=kernel_name, tuning=t)
if t:
exp_output, inputs, output = gen_data(dtype, multiples, shape)
return mod, exp_output, (inputs, output)
else:
return mod
else:
mod = tile_compile(shape, dtype, multiples, attrs)
exp_output, inputs, output = gen_data(dtype, multiples, shape)
acu_output = utils.mod_launch(mod, [inputs, output], expect=exp_output)
rtol, atol = get_rtol_atol("tile", dtype)
return inputs, acu_output, exp_output, compare_tensor(acu_output, exp_output, rtol=rtol, atol=atol, equal_nan=True)
def gen_data(dtype, multiples, shape):
inputs = random_gaussian(shape, miu=1, sigma=0.1).astype(dtype)
exp_output = np.tile(inputs, multiples)
output = np.full(exp_output.shape, np.nan, dtype)
return exp_output, inputs, output
def tile_compile(shape, dtype, multiples, attrs, kernel_name="tile", tuning=False):
return utils.op_build_test(tile.tile, [shape], [dtype], [multiples], kernel_name=kernel_name, attrs=attrs, tuning=tuning)
|
py | 1a4625cb485b842012f87c64dd34e291c67d0afe | from __future__ import print_function
import argparse
import signal
import sys
import time
parser = argparse.ArgumentParser()
parser.add_argument('--sigint', dest='sigint', help='Terminate on SIGINT instead of SIGTERM', action='store_true')
parser.add_argument('--timed', dest='timed', help='Terminate after a 60s logged countdown instead of immediately', action='store_true')
parser.add_argument('--error', dest='error', help='Exits with non-zero exit code', action='store_true')
parser.set_defaults(sigint=False, timed=False, error=False)
args = parser.parse_args()
def exitHelper():
if args.timed:
for i in range(30):
time.sleep(1)
print(time.strftime("%H:%M:%S", time.localtime()), file=sys.stderr)
time.sleep(30)
print(time.strftime("%H:%M:%S", time.localtime()), file=sys.stderr)
exit(0 if not args.error else 1)
exit(0 if not args.error else 1)
def receiveSignal(signalNumber, frame):
print("Received", signalNumber, file=sys.stderr)
if not args.sigint and signalNumber == signal.SIGTERM :
exitHelper()
elif args.sigint and signalNumber == signal.SIGINT:
exitHelper()
if __name__ == '__main__':
signal.signal(signal.SIGHUP, receiveSignal)
signal.signal(signal.SIGINT, receiveSignal)
signal.signal(signal.SIGQUIT, receiveSignal)
signal.signal(signal.SIGILL, receiveSignal)
signal.signal(signal.SIGTRAP, receiveSignal)
signal.signal(signal.SIGABRT, receiveSignal)
signal.signal(signal.SIGBUS, receiveSignal)
signal.signal(signal.SIGFPE, receiveSignal)
# Can't register handler for SIGKILL
# signal.signal(signal.SIGKILL, receiveSignal)
signal.signal(signal.SIGUSR1, receiveSignal)
signal.signal(signal.SIGSEGV, receiveSignal)
signal.signal(signal.SIGUSR2, receiveSignal)
signal.signal(signal.SIGPIPE, receiveSignal)
signal.signal(signal.SIGALRM, receiveSignal)
signal.signal(signal.SIGTERM, receiveSignal)
print('SIGINT' if args.sigint else 'SIGTERM', file=sys.stderr)
print('Timed exit' if args.timed else 'Immediate exit', file=sys.stderr)
print('Non-zero exit code' if args.error else 'Zero exit code', file=sys.stderr)
while True:
time.sleep(30)
|
py | 1a4626df92acd8dcdf151d9869e19a7cfb165d1e | from aspen import Response
from aspen.resources import PAGE_BREAK
from aspen.resources.resource import Resource
class StringDefaultingList(list):
def __getitem__(self, key):
try:
return list.__getitem__(self, key)
except KeyError:
return str(key)
ORDINALS = StringDefaultingList([ 'zero' , 'one' , 'two', 'three', 'four'
, 'five', 'six', 'seven', 'eight', 'nine'
])
class DynamicResource(Resource):
"""This is the base for JSON, negotiating, socket, and rendered resources.
"""
min_pages = None # set on subclass
max_pages = None
def __init__(self, *a, **kw):
Resource.__init__(self, *a, **kw)
self.pages = self.parse_into_pages(self.raw)
self.pages = self.compile_pages(self.pages)
def respond(self, request, response=None):
"""Given a Request and maybe a Response, return or raise a Response.
"""
response = response or Response(charset=self.website.charset_dynamic)
# Populate context.
# =================
context = self.populate_context(request, response)
# Exec page two.
# ==============
try:
exec self.pages[1] in context
except Response, response:
self.process_raised_response(response)
raise
# Hook.
# =====
try:
response = self.get_response(context)
except Response, response:
self.process_raised_response(response)
raise
else:
return response
def populate_context(self, request, response):
"""Factored out to support testing.
"""
context = request.context
context.update(self.pages[0])
context['request'] = request
context['response'] = response
context['resource'] = self
return context
def parse_into_pages(self, raw):
"""Given a bytestring, return a list of pages.
Subclasses extend this to implement additional semantics.
"""
# Support caret-L in addition to .
uncareted = raw.replace("^L", PAGE_BREAK)
pages = uncareted.split(PAGE_BREAK)
npages = len(pages)
# Check for too few pages. This is a sanity check as get_resource_class
# should guarantee this. Bug if it fails.
assert npages >= self.min_pages, npages
# Check for too many pages. This is user error.
if self.max_pages is not None and npages > self.max_pages:
type_name = self.__class__.__name__[:-len('resource')]
msg = "%s resources must have at most %s pages; %s has %s."
msg %= ( type_name
, ORDINALS[self.max_pages]
, self.fs
, ORDINALS[npages]
)
raise SyntaxError(msg)
return pages
def compile_pages(self, pages):
"""Given a list of bytestrings, replace the bytestrings with objects.
All dynamic resources compile the first two pages the same way. It's
the third and following pages that differ, so we require subclasses to
supply a method for that: compile_page.
"""
# Standardize newlines.
# =====================
# compile requires \n, and doing it now makes the next line easier. In
# general it's nice to standardize this, I think. XXX Should we be
# going back to \r\n for the wire? That's HTTP, right?
for i, page in enumerate(pages):
pages[i] = page.replace('\r\n', '\n')
one = pages[0]
two = pages[1]
# Compute paddings and pad the second and third pages.
# ====================================================
# This is so we get accurate tracebacks. We pass padding to the
# compile_page hook; the SocketResource subclass uses it, since it has
# an additional logic page that it wants to pad. We don't simply pad
# all pages because then for content pages the user would view source
# in their browser and see nothing but whitespace until they scroll way
# down.
paddings = self._compute_paddings(pages)
two = paddings[1] + two
# Exec the first page and compile the second.
# ===========================================
context = dict()
context['__file__'] = self.fs
context['website'] = self.website
one = compile(one, self.fs, 'exec')
exec one in context # mutate context
one = context # store it
two = compile(two, self.fs, 'exec')
pages[0] = one
pages[1] = two
# Subclasses are responsible for the rest.
# ========================================
for i, page in enumerate(pages[2:]):
i += 2 # no start kw to enumerate in Python 2.5
pages[i] = self.compile_page(page, paddings[i])
return pages
def _compute_paddings(pages):
"""Given a list of bytestrings, return a 1-shorter list of bytestrings.
"""
if not pages:
return []
# A file with many, many lines would flog this algorithm.
lines_in = lambda s: '\n' * s.count('\n')
paddings = [''] # first page doesn't need padding
paddings += [paddings[-1] + lines_in(page) for page in pages[:-1]]
return paddings
_compute_paddings = staticmethod(_compute_paddings)
# Hooks
# =====
def compile_page(self, *a):
"""Given a bytestring, return an object.
"""
raise NotImplementedError
def process_raised_response(self, response):
"""Given a response object, mutate it as needed.
"""
pass
def get_response(self, context):
"""Given a context dictionary, return a Response object.
"""
raise NotImplementedError
|
py | 1a4628119cb2efda6766edcecc37fc3595b3d83d | # NOTE: bad django practice but /ee specifically depends on /posthog so it should be fine
from datetime import timedelta
from typing import Any, Dict, List, Optional, Tuple
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from rest_framework import serializers
from rest_framework.decorators import action
from rest_framework.request import Request
from rest_framework.response import Response
from ee.clickhouse.client import sync_execute
from ee.clickhouse.models.action import format_action_filter, format_entity_filter
from ee.clickhouse.models.cohort import format_filter_query
from ee.clickhouse.models.person import ClickhousePersonSerializer
from ee.clickhouse.models.property import parse_prop_clauses
from ee.clickhouse.queries.util import get_trunc_func_ch, parse_timestamps
from ee.clickhouse.sql.person import GET_LATEST_PERSON_SQL, PEOPLE_SQL, PEOPLE_THROUGH_DISTINCT_SQL, PERSON_TREND_SQL
from ee.clickhouse.sql.stickiness.stickiness_people import STICKINESS_PEOPLE_SQL
from posthog.api.action import ActionSerializer, ActionViewSet
from posthog.api.utils import get_target_entity
from posthog.constants import ENTITY_ID, ENTITY_TYPE, TREND_FILTER_TYPE_ACTIONS
from posthog.models.action import Action
from posthog.models.cohort import Cohort
from posthog.models.entity import Entity
from posthog.models.filters import Filter
from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.property import Property
from posthog.models.team import Team
class ClickhouseActionSerializer(ActionSerializer):
is_calculating = serializers.SerializerMethodField()
def get_count(self, action: Action) -> Optional[int]:
if self.context.get("view") and self.context["view"].action != "list":
query, params = format_action_filter(action)
if query == "":
return None
return sync_execute(
"SELECT count(1) FROM events WHERE team_id = %(team_id)s AND {}".format(query),
{"team_id": action.team_id, **params},
)[0][0]
return None
def get_is_calculating(self, action: Action) -> bool:
return False
class ClickhouseActionsViewSet(ActionViewSet):
serializer_class = ClickhouseActionSerializer
# Don't calculate actions in Clickhouse as it's on the fly
def _calculate_action(self, action: Action) -> None:
pass
def list(self, request: Request, *args: Any, **kwargs: Any) -> Response:
actions = self.get_queryset()
actions_list: List[Dict[Any, Any]] = self.serializer_class(actions, many=True, context={"request": request}).data # type: ignore
return Response({"results": actions_list})
@action(methods=["GET"], detail=False)
def people(self, request: Request, *args: Any, **kwargs: Any) -> Response:
team = self.team
filter = Filter(request=request)
entity = get_target_entity(request)
# adhoc date handling. parsed differently with django orm
date_from = filter.date_from or timezone.now()
data = {}
if filter.interval == "month":
data.update(
{"date_to": (date_from + relativedelta(months=1) - timedelta(days=1)).strftime("%Y-%m-%d %H:%M:%S")}
)
elif filter.interval == "week":
data.update({"date_to": (date_from + relativedelta(weeks=1)).strftime("%Y-%m-%d %H:%M:%S")})
elif filter.interval == "hour":
data.update({"date_to": date_from + timedelta(hours=1)})
elif filter.interval == "minute":
data.update({"date_to": date_from + timedelta(minutes=1)})
filter = Filter(data={**filter._data, **data})
current_url = request.get_full_path()
serialized_people = self._calculate_entity_people(team, entity, filter)
current_url = request.get_full_path()
next_url: Optional[str] = request.get_full_path()
offset = filter.offset
if len(serialized_people) > 100 and next_url:
if "offset" in next_url:
next_url = next_url[1:]
next_url = next_url.replace("offset=" + str(offset), "offset=" + str(offset + 100))
else:
next_url = request.build_absolute_uri(
"{}{}offset={}".format(next_url, "&" if "?" in next_url else "?", offset + 100)
)
else:
next_url = None
return Response(
{
"results": [{"people": serialized_people[0:100], "count": len(serialized_people[0:99])}],
"next": next_url,
"previous": current_url[1:],
}
)
def _calculate_entity_people(self, team: Team, entity: Entity, filter: Filter):
parsed_date_from, parsed_date_to, _ = parse_timestamps(filter=filter, team_id=team.pk)
entity_sql, entity_params = format_entity_filter(entity=entity)
person_filter = ""
person_filter_params: Dict[str, Any] = {}
if filter.breakdown_type == "cohort" and filter.breakdown_value != "all":
cohort = Cohort.objects.get(pk=filter.breakdown_value, team_id=team.pk)
person_filter, person_filter_params = format_filter_query(cohort)
person_filter = "AND distinct_id IN ({})".format(person_filter)
elif (
filter.breakdown_type == "person"
and isinstance(filter.breakdown, str)
and isinstance(filter.breakdown_value, str)
):
person_prop = Property(**{"key": filter.breakdown, "value": filter.breakdown_value, "type": "person"})
filter.properties.append(person_prop)
prop_filters, prop_filter_params = parse_prop_clauses(filter.properties, team.pk)
params: Dict = {"team_id": team.pk, **prop_filter_params, **entity_params, "offset": filter.offset}
content_sql = PERSON_TREND_SQL.format(
entity_filter=f"AND {entity_sql}",
parsed_date_from=parsed_date_from,
parsed_date_to=parsed_date_to,
filters=prop_filters,
breakdown_filter="",
person_filter=person_filter,
)
people = sync_execute(
PEOPLE_THROUGH_DISTINCT_SQL.format(
content_sql=content_sql, latest_person_sql=GET_LATEST_PERSON_SQL.format(query="")
),
{**params, **person_filter_params},
)
serialized_people = ClickhousePersonSerializer(people, many=True).data
return serialized_people
class LegacyClickhouseActionsViewSet(ClickhouseActionsViewSet):
legacy_team_compatibility = True
|
py | 1a4628b4cb007967fbbe30b1b61e71271fd84695 | """
Gateway for Binance Crypto Exchange.
"""
import urllib
import hashlib
import hmac
import time
from copy import copy
from datetime import datetime, timedelta
from enum import Enum
from threading import Lock
import pytz
from vnpy.api.rest import RestClient, Request
from vnpy.api.websocket import WebsocketClient
from vnpy.trader.constant import (
Direction,
Exchange,
Product,
Status,
OrderType,
Interval
)
from vnpy.trader.gateway import BaseGateway
from vnpy.trader.object import (
TickData,
OrderData,
TradeData,
AccountData,
ContractData,
BarData,
OrderRequest,
CancelRequest,
SubscribeRequest,
HistoryRequest
)
from vnpy.trader.event import EVENT_TIMER
from vnpy.event import Event
REST_HOST = "https://www.binance.com"
WEBSOCKET_TRADE_HOST = "wss://stream.binance.com:9443/ws/"
WEBSOCKET_DATA_HOST = "wss://stream.binance.com:9443/stream?streams="
STATUS_BINANCE2VT = {
"NEW": Status.NOTTRADED,
"PARTIALLY_FILLED": Status.PARTTRADED,
"FILLED": Status.ALLTRADED,
"CANCELED": Status.CANCELLED,
"REJECTED": Status.REJECTED
}
ORDERTYPE_VT2BINANCE = {
OrderType.LIMIT: "LIMIT",
OrderType.MARKET: "MARKET"
}
ORDERTYPE_BINANCE2VT = {v: k for k, v in ORDERTYPE_VT2BINANCE.items()}
DIRECTION_VT2BINANCE = {
Direction.LONG: "BUY",
Direction.SHORT: "SELL"
}
DIRECTION_BINANCE2VT = {v: k for k, v in DIRECTION_VT2BINANCE.items()}
INTERVAL_VT2BINANCE = {
Interval.MINUTE: "1m",
Interval.HOUR: "1h",
Interval.DAILY: "1d",
}
TIMEDELTA_MAP = {
Interval.MINUTE: timedelta(minutes=1),
Interval.HOUR: timedelta(hours=1),
Interval.DAILY: timedelta(days=1),
}
CHINA_TZ = pytz.timezone("Asia/Shanghai")
class Security(Enum):
NONE = 0
SIGNED = 1
API_KEY = 2
symbol_name_map = {}
class BinanceGateway(BaseGateway):
"""
VN Trader Gateway for Binance connection.
"""
default_setting = {
"key": "",
"secret": "",
"session_number": 3,
"proxy_host": "",
"proxy_port": 0,
}
exchanges = [Exchange.BINANCE]
def __init__(self, event_engine):
"""Constructor"""
super().__init__(event_engine, "BINANCE")
self.trade_ws_api = BinanceTradeWebsocketApi(self)
self.market_ws_api = BinanceDataWebsocketApi(self)
self.rest_api = BinanceRestApi(self)
def connect(self, setting: dict):
""""""
key = setting["key"]
secret = setting["secret"]
session_number = setting["session_number"]
proxy_host = setting["proxy_host"]
proxy_port = setting["proxy_port"]
self.rest_api.connect(key, secret, session_number,
proxy_host, proxy_port)
self.market_ws_api.connect(proxy_host, proxy_port)
self.event_engine.register(EVENT_TIMER, self.process_timer_event)
def subscribe(self, req: SubscribeRequest):
""""""
self.market_ws_api.subscribe(req)
def send_order(self, req: OrderRequest):
""""""
return self.rest_api.send_order(req)
def cancel_order(self, req: CancelRequest):
""""""
self.rest_api.cancel_order(req)
def query_account(self):
""""""
pass
def query_position(self):
""""""
pass
def query_history(self, req: HistoryRequest):
""""""
return self.rest_api.query_history(req)
def close(self):
""""""
self.rest_api.stop()
self.trade_ws_api.stop()
self.market_ws_api.stop()
def process_timer_event(self, event: Event):
""""""
self.rest_api.keep_user_stream()
class BinanceRestApi(RestClient):
"""
BINANCE REST API
"""
def __init__(self, gateway: BinanceGateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.trade_ws_api = self.gateway.trade_ws_api
self.key = ""
self.secret = ""
self.user_stream_key = ""
self.keep_alive_count = 0
self.recv_window = 5000
self.time_offset = 0
self.order_count = 1_000_000
self.order_count_lock = Lock()
self.connect_time = 0
def sign(self, request):
"""
Generate BINANCE signature.
"""
security = request.data["security"]
if security == Security.NONE:
request.data = None
return request
if request.params:
path = request.path + "?" + urllib.parse.urlencode(request.params)
else:
request.params = dict()
path = request.path
if security == Security.SIGNED:
timestamp = int(time.time() * 1000)
if self.time_offset > 0:
timestamp -= abs(self.time_offset)
elif self.time_offset < 0:
timestamp += abs(self.time_offset)
request.params["timestamp"] = timestamp
query = urllib.parse.urlencode(sorted(request.params.items()))
signature = hmac.new(self.secret, query.encode(
"utf-8"), hashlib.sha256).hexdigest()
query += "&signature={}".format(signature)
path = request.path + "?" + query
request.path = path
request.params = {}
request.data = {}
# Add headers
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json",
"X-MBX-APIKEY": self.key
}
if security in [Security.SIGNED, Security.API_KEY]:
request.headers = headers
return request
def connect(
self,
key: str,
secret: str,
session_number: int,
proxy_host: str,
proxy_port: int
):
"""
Initialize connection to REST server.
"""
self.key = key
self.secret = secret.encode()
self.proxy_port = proxy_port
self.proxy_host = proxy_host
self.connect_time = (
int(datetime.now(CHINA_TZ).strftime("%y%m%d%H%M%S")) * self.order_count
)
self.init(REST_HOST, proxy_host, proxy_port)
self.start(session_number)
self.gateway.write_log("REST API啟動成功")
self.query_time()
self.query_account()
self.query_order()
self.query_contract()
self.start_user_stream()
def query_time(self):
""""""
data = {
"security": Security.NONE
}
path = "/api/v1/time"
return self.add_request(
"GET",
path,
callback=self.on_query_time,
data=data
)
def query_account(self):
""""""
data = {"security": Security.SIGNED}
self.add_request(
method="GET",
path="/api/v3/account",
callback=self.on_query_account,
data=data
)
def query_order(self):
""""""
data = {"security": Security.SIGNED}
self.add_request(
method="GET",
path="/api/v3/openOrders",
callback=self.on_query_order,
data=data
)
def query_contract(self):
""""""
data = {
"security": Security.NONE
}
self.add_request(
method="GET",
path="/api/v1/exchangeInfo",
callback=self.on_query_contract,
data=data
)
def _new_order_id(self):
""""""
with self.order_count_lock:
self.order_count += 1
return self.order_count
def send_order(self, req: OrderRequest):
""""""
orderid = "NKD8FYX4-" + str(self.connect_time + self._new_order_id())
order = req.create_order_data(
orderid,
self.gateway_name
)
self.gateway.on_order(order)
data = {
"security": Security.SIGNED
}
params = {
"symbol": req.symbol.upper(),
"timeInForce": "GTC",
"side": DIRECTION_VT2BINANCE[req.direction],
"type": ORDERTYPE_VT2BINANCE[req.type],
"price": str(req.price),
"quantity": str(req.volume),
"newClientOrderId": orderid,
"newOrderRespType": "ACK"
}
self.add_request(
method="POST",
path="/api/v3/order",
callback=self.on_send_order,
data=data,
params=params,
extra=order,
on_error=self.on_send_order_error,
on_failed=self.on_send_order_failed
)
return order.vt_orderid
def cancel_order(self, req: CancelRequest):
""""""
data = {
"security": Security.SIGNED
}
params = {
"symbol": req.symbol.upper(),
"origClientOrderId": req.orderid
}
self.add_request(
method="DELETE",
path="/api/v3/order",
callback=self.on_cancel_order,
params=params,
data=data,
extra=req
)
def start_user_stream(self):
""""""
data = {
"security": Security.API_KEY
}
self.add_request(
method="POST",
path="/api/v1/userDataStream",
callback=self.on_start_user_stream,
data=data
)
def keep_user_stream(self):
""""""
self.keep_alive_count += 1
if self.keep_alive_count < 600:
return
self.keep_alive_count = 0
data = {
"security": Security.API_KEY
}
params = {
"listenKey": self.user_stream_key
}
self.add_request(
method="PUT",
path="/api/v1/userDataStream",
callback=self.on_keep_user_stream,
params=params,
data=data
)
def on_query_time(self, data, request):
""""""
local_time = int(time.time() * 1000)
server_time = int(data["serverTime"])
self.time_offset = local_time - server_time
def on_query_account(self, data, request):
""""""
for account_data in data["balances"]:
account = AccountData(
accountid=account_data["asset"],
balance=float(account_data["free"]) + float(account_data["locked"]),
frozen=float(account_data["locked"]),
gateway_name=self.gateway_name
)
if account.balance:
self.gateway.on_account(account)
self.gateway.write_log("賬戶資金查詢成功")
def on_query_order(self, data, request):
""""""
for d in data:
order = OrderData(
orderid=d["clientOrderId"],
symbol=d["symbol"].lower(),
exchange=Exchange.BINANCE,
price=float(d["price"]),
volume=float(d["origQty"]),
type=ORDERTYPE_BINANCE2VT[d["type"]],
direction=DIRECTION_BINANCE2VT[d["side"]],
traded=float(d["executedQty"]),
status=STATUS_BINANCE2VT.get(d["status"], None),
datetime=generate_datetime(d["time"]),
gateway_name=self.gateway_name,
)
self.gateway.on_order(order)
self.gateway.write_log("委託資訊查詢成功")
def on_query_contract(self, data, request):
""""""
for d in data["symbols"]:
base_currency = d["baseAsset"]
quote_currency = d["quoteAsset"]
name = f"{base_currency.upper()}/{quote_currency.upper()}"
pricetick = 1
min_volume = 1
for f in d["filters"]:
if f["filterType"] == "PRICE_FILTER":
pricetick = float(f["tickSize"])
elif f["filterType"] == "LOT_SIZE":
min_volume = float(f["stepSize"])
contract = ContractData(
symbol=d["symbol"].lower(),
exchange=Exchange.BINANCE,
name=name,
pricetick=pricetick,
size=1,
min_volume=min_volume,
product=Product.SPOT,
history_data=True,
gateway_name=self.gateway_name,
)
self.gateway.on_contract(contract)
symbol_name_map[contract.symbol] = contract.name
self.gateway.write_log("合約資訊查詢成功")
def on_send_order(self, data, request):
""""""
pass
def on_send_order_failed(self, status_code: str, request: Request):
"""
Callback when sending order failed on server.
"""
order = request.extra
order.status = Status.REJECTED
self.gateway.on_order(order)
msg = f"委託失敗,狀態碼:{status_code},資訊:{request.response.text}"
self.gateway.write_log(msg)
def on_send_order_error(
self, exception_type: type, exception_value: Exception, tb, request: Request
):
"""
Callback when sending order caused exception.
"""
order = request.extra
order.status = Status.REJECTED
self.gateway.on_order(order)
# Record exception if not ConnectionError
if not issubclass(exception_type, ConnectionError):
self.on_error(exception_type, exception_value, tb, request)
def on_cancel_order(self, data, request):
""""""
pass
def on_start_user_stream(self, data, request):
""""""
self.user_stream_key = data["listenKey"]
self.keep_alive_count = 0
url = WEBSOCKET_TRADE_HOST + self.user_stream_key
self.trade_ws_api.connect(url, self.proxy_host, self.proxy_port)
def on_keep_user_stream(self, data, request):
""""""
pass
def query_history(self, req: HistoryRequest):
""""""
history = []
limit = 1000
start_time = int(datetime.timestamp(req.start))
while True:
# Create query params
params = {
"symbol": req.symbol.upper(),
"interval": INTERVAL_VT2BINANCE[req.interval],
"limit": limit,
"startTime": start_time * 1000, # convert to millisecond
}
# Add end time if specified
if req.end:
end_time = int(datetime.timestamp(req.end))
params["endTime"] = end_time * 1000 # convert to millisecond
# Get response from server
resp = self.request(
"GET",
"/api/v1/klines",
data={"security": Security.NONE},
params=params
)
# Break if request failed with other status code
if resp.status_code // 100 != 2:
msg = f"獲取歷史資料失敗,狀態碼:{resp.status_code},資訊:{resp.text}"
self.gateway.write_log(msg)
break
else:
data = resp.json()
if not data:
msg = f"獲取歷史資料為空,開始時間:{start_time}"
self.gateway.write_log(msg)
break
buf = []
for l in data:
bar = BarData(
symbol=req.symbol,
exchange=req.exchange,
datetime=generate_datetime(l[0]),
interval=req.interval,
volume=float(l[5]),
open_price=float(l[1]),
high_price=float(l[2]),
low_price=float(l[3]),
close_price=float(l[4]),
gateway_name=self.gateway_name
)
buf.append(bar)
history.extend(buf)
begin = buf[0].datetime
end = buf[-1].datetime
msg = f"獲取歷史資料成功,{req.symbol} - {req.interval.value},{begin} - {end}"
self.gateway.write_log(msg)
# Break if total data count less than limit (latest date collected)
if len(data) < limit:
break
# Update start time
start_dt = bar.datetime + TIMEDELTA_MAP[req.interval]
start_time = int(datetime.timestamp(start_dt))
return history
class BinanceTradeWebsocketApi(WebsocketClient):
""""""
def __init__(self, gateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
def connect(self, url, proxy_host, proxy_port):
""""""
self.init(url, proxy_host, proxy_port)
self.start()
def on_connected(self):
""""""
self.gateway.write_log("交易Websocket API連線成功")
def on_packet(self, packet: dict): # type: (dict)->None
""""""
if packet["e"] == "outboundAccountInfo":
self.on_account(packet)
elif packet["e"] == "executionReport":
self.on_order(packet)
def on_account(self, packet):
""""""
for d in packet["B"]:
account = AccountData(
accountid=d["a"],
balance=float(d["f"]) + float(d["l"]),
frozen=float(d["l"]),
gateway_name=self.gateway_name
)
if account.balance:
self.gateway.on_account(account)
def on_order(self, packet: dict):
""""""
if packet["C"] == "":
orderid = packet["c"]
else:
orderid = packet["C"]
order = OrderData(
symbol=packet["s"].lower(),
exchange=Exchange.BINANCE,
orderid=orderid,
type=ORDERTYPE_BINANCE2VT[packet["o"]],
direction=DIRECTION_BINANCE2VT[packet["S"]],
price=float(packet["p"]),
volume=float(packet["q"]),
traded=float(packet["z"]),
status=STATUS_BINANCE2VT[packet["X"]],
datetime=generate_datetime(packet["O"]),
gateway_name=self.gateway_name
)
self.gateway.on_order(order)
# Push trade event
trade_volume = float(packet["l"])
if not trade_volume:
return
trade = TradeData(
symbol=order.symbol,
exchange=order.exchange,
orderid=order.orderid,
tradeid=packet["t"],
direction=order.direction,
price=float(packet["L"]),
volume=trade_volume,
datetime=generate_datetime(packet["T"]),
gateway_name=self.gateway_name,
)
self.gateway.on_trade(trade)
class BinanceDataWebsocketApi(WebsocketClient):
""""""
def __init__(self, gateway):
""""""
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.ticks = {}
def connect(self, proxy_host: str, proxy_port: int):
""""""
self.proxy_host = proxy_host
self.proxy_port = proxy_port
def on_connected(self):
""""""
self.gateway.write_log("行情Websocket API連線重新整理")
def subscribe(self, req: SubscribeRequest):
""""""
if req.symbol not in symbol_name_map:
self.gateway.write_log(f"找不到該合約程式碼{req.symbol}")
return
# Create tick buf data
tick = TickData(
symbol=req.symbol,
name=symbol_name_map.get(req.symbol, ""),
exchange=Exchange.BINANCE,
datetime=datetime.now(CHINA_TZ),
gateway_name=self.gateway_name,
)
self.ticks[req.symbol] = tick
# Close previous connection
if self._active:
self.stop()
self.join()
# Create new connection
channels = []
for ws_symbol in self.ticks.keys():
channels.append(ws_symbol + "@ticker")
channels.append(ws_symbol + "@depth5")
url = WEBSOCKET_DATA_HOST + "/".join(channels)
self.init(url, self.proxy_host, self.proxy_port)
self.start()
def on_packet(self, packet):
""""""
stream = packet["stream"]
data = packet["data"]
symbol, channel = stream.split("@")
tick = self.ticks[symbol]
if channel == "ticker":
tick.volume = float(data['v'])
tick.open_price = float(data['o'])
tick.high_price = float(data['h'])
tick.low_price = float(data['l'])
tick.last_price = float(data['c'])
tick.datetime = generate_datetime(float(data['E']))
else:
bids = data["bids"]
for n in range(5):
price, volume = bids[n]
tick.__setattr__("bid_price_" + str(n + 1), float(price))
tick.__setattr__("bid_volume_" + str(n + 1), float(volume))
asks = data["asks"]
for n in range(5):
price, volume = asks[n]
tick.__setattr__("ask_price_" + str(n + 1), float(price))
tick.__setattr__("ask_volume_" + str(n + 1), float(volume))
if tick.last_price:
self.gateway.on_tick(copy(tick))
def generate_datetime(timestamp: float) -> datetime:
""""""
dt = datetime.fromtimestamp(timestamp / 1000)
dt = dt.replace(tzinfo=CHINA_TZ)
return dt
|
py | 1a4629161ea2ebb526f6e4a10d3124a3e52a4fe0 | """Module to hold constants for testing."""
import os
from esa import SAW
# Handle pathing.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
CASE_DIR = os.path.join(THIS_DIR, 'cases')
DUMMY_CASE = os.path.join(CASE_DIR, 'dummy_case.pwb')
DATA_DIR = os.path.join(THIS_DIR, 'data')
SNIPPET_DIR = os.path.join(THIS_DIR, '..', 'docs', 'rst', 'snippets')
SNIPPET_FILES = [os.path.join(SNIPPET_DIR, x) for x in
os.listdir(SNIPPET_DIR) if x.endswith('.rst')]
# Use the dummy case to get the version of Simulator.
saw = SAW(DUMMY_CASE)
VERSION = saw.version
saw.exit()
del saw
# Path to IEEE 14 bus model.
PATH_14 = os.path.join(CASE_DIR, 'ieee_14',
'IEEE 14 bus_pws_version_{}.pwb'.format(VERSION))
# TODO: Update all PWD files with versions when we create them.
PATH_14_PWD = os.path.join(CASE_DIR, 'ieee_14', 'IEEE 14 bus.pwd')
# Path to the Texas 2000 bus model.
PATH_2000 = os.path.join(CASE_DIR, 'tx2000',
'tx2000_base_pws_version_{}.pwb'.format(VERSION))
# Path to the WSCC model.
PATH_9 = os.path.join(CASE_DIR, 'wscc_9',
'WSCC 9 bus_pws_version_{}.pwb'.format(VERSION))
# Aux file for filtering buses by area.
AREA_AUX_FILE = os.path.join(THIS_DIR, 'area_filter.aux')
# Map cases for doc testing.
CASE_MAP = {'14': PATH_14, '2000': PATH_2000}
# Path to file containing lines for one of the examples.
CANDIDATE_LINES = os.path.join(DATA_DIR, 'CandidateLines.csv')
|
py | 1a46293910a8e251dfba12864862418ce26d9ffb | """Example minimal input plugin for the Mjolnir-Config-Template."""
# Local imports
import brokkr.pipeline.baseinput
class ExampleMinimalInput(brokkr.pipeline.baseinput.ValueInputStep):
def __init__(
self,
example_argument=True,
**value_input_kwargs):
super().__init__(binary_decoder=False, **value_input_kwargs)
# YOUR INIT LOGIC AND ARGUMENT HANDLING HERE
self._example_attribute = example_argument
def read_raw_data(self, input_data=None):
# YOUR DATA READING LOGIC HERE
if not self._example_attribute:
return None
raw_data = []
for data_type in self.data_types:
try:
raw_data_value = data_type.example_value
except Exception as e:
self.logger.error("%s occurred: %s", type(e).__name__, e)
raw_data_value = None
raw_data.append(raw_data_value)
return raw_data
|
py | 1a462a847f3c03da5191a09b916d1fce70184379 | """
BALLAST: Builder Assistant to Lay out, Label and Arrange Spectra
Together
This is a simple program to combine and display spectra together.
"""
import sys
import os
import re
import argparse
import typing as tp
import configparser as cfg
from math import *
import numpy as np
import matplotlib.pyplot as plt
from estampes.base.spectrum import Spectrum
from estampes.tools.char import convert_expr
from estampes.visual.plotspec import SpecLayout
def fscale(expr: str, var: str) -> tp.Callable[[float], float]:
"""Returns a scaling function.
Analyzes the mathematical expression in `expr` and returns a
function compatible with `var`.
Parameters
----------
expr
Mathematical expression.
var
Variable of interest.
Returns
-------
function
Mathematical function
Raises
------
NameError
Unsupported mathematical function.
"""
try:
_expr = convert_expr(expr, var, natural=True)
except ValueError:
return NameError('Wrong mathematical functions detected.')
return eval('lambda {}: {}'.format(var, _expr))
def build_opts(parser: argparse.ArgumentParser) -> tp.NoReturn:
"""Builds commandline options.
Builds commandline options inside input `parser`.
Parameters
----------
parser
Parser to update.
"""
parser.add_argument('optfile', nargs='?',
help='Option file (INI style).')
# parser.add_argument('-o', '--output',
# help='Output file.')
parser.add_argument('-c', '--colors', action='append',
help='Spectral colors.')
msg = '''\
Colors of the spectra. By default, it follows the order of input files.
It is possible to change the order by putting a number followed by ":".
Ex. '3:Test' means that the label 'Test' is for the 3rd file (start at 1).
'r'/'e'/'0' refers to the reference data.
'''
parser.add_argument('-i', '--inpfile', action='append',
help='Input data file.')
msg = '''\
Labels for the legend. By default, it follows the order of input files.
It is possible to change the order by putting a number followed by ":".
Ex. '3:Test' means that the label 'Test' is for the 3rd file (start at 1).
'r'/'e'/'0' refers to the reference data.
'''
parser.add_argument('-l', '--label', action='append',
help=msg)
parser.add_argument('-r', '--refdata',
help='Reference spectrum file.')
def parse_args(args: tp.Sequence[str]) -> argparse.Namespace:
"""Parses arguments.
Parses commandline arguments
Parameters
----------
args
Commandline arguments
Returns
-------
:obj:`argparse.Namespace`
Object holding results as attributes
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
build_opts(parser)
return parser.parse_args(args)
def parse_subid(ident: str, ncols: int = 1
) -> tp.Tuple[tp.Union[int, tp.Tuple[int, int]],
tp.Union[int, tp.Tuple[int, int]]]:
"""Parses a subplot identifier.
Takes a subplot identifier and returns the corresponding row and
column.
Parameters
----------
ident
Identifier string.
ncols
Number of columns.
Returns
-------
int, tuple
Row index (starting from 1) or interval as tuple of integers.
int, tuple
Column index (starting from 1) or interval as tuple of integers.
Raises
------
ValueError
Unable to parse the subplot specification.
"""
def split_coord(coord: str) -> tp.Union[int, tp.Tuple[int, int]]:
"""Splits correctly a single coordinate."""
if not coord.strip():
return 1
else:
res = coord.split('-')
if len(res) == 2:
if not res[0].strip():
i = 1
else:
i = max(int(res[0]), 1)
if not res[1].strip():
j = -1
else:
j = max(int(res[1]), 1)
if i == j:
return i
else:
return (i, j)
else:
return max(int(res[0]), 1)
grid = ident.split(',')
if len(grid) == 2:
row = split_coord(grid[0])
col = split_coord(grid[1])
elif len(grid) == 1:
i = int(grid)
row = max(int(ceil(i/ncols)), 1)
col = max(i - (row-1)*ncols, 1)
else:
raise ValueError('Incorrect subplot specification.')
return row, col
def parse_inifile(fname: str
) -> tp.Tuple[tp.Dict[str, tp.Any],
tp.List[tp.List[SpecLayout]],
tp.Dict[str, tp.Any]]:
"""Parses INI file.
Parses a INI configuration file.
Parameters
----------
fname
Filename.
Returns
-------
dict
Figure data.
list
List of lists of spectrum layout parameters (grif format).
dict
Curves data.
Raises
------
FileNotFoundError
INI file or input file missing.
ValueError
Incorrect parameter.
"""
if not os.path.exists(fname):
raise FileNotFoundError('Missing INI file')
opts = cfg.ConfigParser()
opts.read(fname)
secs = {key.strip().lower(): key for key in opts.sections()}
figdat = {
'title': None,
'geom': None,
'shareaxes': False,
'subp': (1, 1),
'fname': None,
'show': True
}
if 'figure' in secs:
optsec = opts[secs['figure']]
figdat['title'] = optsec.get('maintitle', fallback=None)
figdat['fname'] = optsec.get('imagefile', fallback=None)
figdat['show'] = optsec.getboolean('showfigure', fallback=True)
res = optsec.get('mergeaxes', fallback='None').lower()
if res == 'none':
val = False
elif res == 'x':
val = 'X'
elif res == 'y':
val = 'Y'
elif res == 'all':
val = True
else:
raise ValueError('Unrecognized value for MergeAxes')
figdat['shareaxes'] = val
optkey = optsec.get('subplots', None)
if optkey is not None:
res = optkey.replace('(', '').replace(')', '').split(',')
if len(res) == 1:
val = (max(int(res[0]), 1), 1)
else:
val = (max(int(res[0]), 1), max(int(res[1]), 1))
else:
val = None
else:
val = None
if val is not None:
figdat['subp'] = val
# nrows and ncols are needed for the subplot specifications,
# they must not be changed
nrows, ncols = figdat['subp']
figdat['nums'] = nrows * ncols
# Check geometry now since it may be proportional to number of rows/cols
if 'figure' in secs:
optkey = optsec.get('geometry', None)
if optkey is not None:
res = optkey.replace('(', '').replace(')', '').split(',')
if len(res) == 1:
raise ValueError('Incorrect value for geometry.')
if '*' in res[0] or 'x' in res[0]:
val1 = float(res[0].replace('x', '').replace('*', ''))*ncols
else:
val1 = float(res[0])
if '*' in res[1] or 'x' in res[1]:
val2 = float(res[1].replace('x', '').replace('*', ''))*nrows
else:
val2 = float(res[1])
val = (val1, val2)
figdat['geom'] = val
spcdat = []
for _ in range(nrows):
spcdat.append([None for j in range(ncols)])
# The layout system works in a slightly different way than curves
# Besides using defaults, users can use the generic [layout] to define
# a common layout.
# We first build some default setup, which will be used for all others.
# The keys correspond to SpecLayout
spckeys = {
'title': ('title', ),
'xleft': ('xleft', 'xmin'),
'xright': ('xright', 'xmax'),
'ytop': ('ytop', 'ymax'),
'ybottom': ('ybottom', 'ymin'),
'xscale': ('xscale', ),
'yscale': ('yscale', ),
'xlabel': ('xlabel', ),
'ylabel': ('ylabel', ),
'legpos': ('legend', ),
'legcol': ('legend_cols', ),
'plottag': ('panel', )
}
spcbase = {
'title': None,
'xleft': None,
'xright': None,
'ytop': None,
'ybottom': None,
'xscale': 'linear',
'yscale': 'linear',
'xlabel': None,
'ylabel': None,
'legpos': 'best',
'legcol': 1,
'plottag': None
}
if 'layout' in secs:
optsec = opts[secs['layout']]
for key in spckeys:
for alias in spckeys[key]:
if alias in optsec:
spcbase[key] = optsec[alias]
break
for sec in secs:
if sec.startswith('layout'):
res = sec.split(':')
if len(res) == 2: # Ignore default case here
row, col = parse_subid(res[1], ncols)
if isinstance(row, tuple) or isinstance(col, tuple):
msg = 'Subplot ranges not supported in layout specs.'
raise ValueError(msg)
if row > nrows or col > ncols:
break
# correct to Python indexes
row -= 1
col -= 1
optsec = opts[secs[sec]]
val = {}
for key in spckeys:
for alias in spckeys[key]:
if alias in optsec:
val[key] = optsec[alias]
break
else:
val[key] = spcbase[key]
spcdat[row][col] = SpecLayout(**val)
for row in range(nrows):
for col in range(ncols):
if spcdat[row][col] is None:
spcdat[row][col] = SpecLayout(**spcbase)
# If axes merged, removed unnecessary labels
if figdat['shareaxes'] in ('Y', True) and ncols > 1:
for i in range(nrows):
for j in range(1, ncols):
spcdat[i][j].ylabel = None
if figdat['shareaxes'] in ('X', True) and nrows > 1:
for i in range(nrows-1):
for j in range(ncols):
spcdat[i][j].xlabel = None
curves = {}
for sec in secs:
if sec.startswith('curve'):
res = sec.split(':', maxsplit=1)
if res[0] != 'curve':
print(sec, 'will be ignored as a curve definition.')
continue # This is not a right keyword, ignore.
if len(res) != 2:
key = ' '
else:
key = secs[sec].split(':', maxsplit=1)[1].strip()
optsec = opts[secs[sec]]
# Check if curve to be shown
if not optsec.getboolean('show', fallback=True):
continue
# Subplot - check if subplot within range
res = optsec.get('subplot', fallback=None)
if res is not None:
val1 = parse_subid(res, ncols)
val = [[None, None], [None, None]]
for i, item in enumerate(val1):
if isinstance(item, int):
val[i] = (item-1, item-1)
else:
val[i][0] = item[0] - 1
if item[1] == -1:
if i == 0:
val[i][1] = nrows - 1
else:
val[i][1] = ncols - 1
else:
val[i][1] = item[1] - 1
row, col = val
if row[-1] >= nrows or col[-1] >= ncols:
continue
curves[key] = {'subplot': (row, col)}
else:
curves[key] = {'subplot': ((0, nrows-1), (0, ncols-1))}
if 'file' not in optsec:
print(f'WARNING: Missing file for "{sec}". Ignoring.')
continue
elif not os.path.exists(optsec['file']):
fmt = 'ERROR: File "{}" not found in "{}".'
print(fmt.format(optsec['file'], sec))
spc = optsec.get('spectroscopy', fallback=None)
lvl = optsec.get('level', fallback=None)
if spc is None or lvl is None:
raise ValueError('Spectroscopy not defined')
yid = optsec.get('yaxis', None)
if yid is not None:
yid = 'y' + yid
curves[key]['data'] = Spectrum(optsec['file'], spc, lvl, yid)
if optsec.getboolean('broaden', fallback=False):
func = optsec.get('function', None)
hwhm = optsec.getfloat('hwhm', fallback=10.)
xmin = optsec.getfloat('newxmin', fallback=None)
xmax = optsec.getfloat('newxmax', fallback=None)
xres = optsec.getfloat('grain', fallback=4.)
curves[key]['data'].set_broadening(hwhm, func, 'default', xres,
xmin, xmax)
vizdata = {}
for item in ('color', 'linestyle', 'linewidth'):
if optsec.get(item, False):
vizdata[item] = optsec.get(item, False)
if vizdata:
curves[key]['data'].set_display(**vizdata)
if optsec.get('label', None) is not None:
curves[key]['data'].label = optsec.get('label')
curves[key]['xshift'] = optsec.getfloat('xshift', fallback=None)
res = optsec.get('xscale', None)
if res is not None:
data = res.split(',')
try:
curves[key]['xscale'] = fscale(data[-1], 'x')
except NameError:
msg = 'Incorrect scaling factor for X'
raise ValueError(msg) from None
if len(data) > 1:
val = data[0].lower()
if val in ('rel', 'relative'):
curves[key]['xrelscale'] = True
elif val in ('abs', 'absolute'):
curves[key]['xrelscale'] = False
else:
msg = 'Incorrect scaling method for X'
raise ValueError(msg)
else:
curves[key]['xrelscale'] = False
else:
curves[key]['xscale'] = None
res = optsec.get('yshift', None)
if res is not None:
try:
val = float(res)
except ValueError:
if res.lower() in ('base', 'baseline'):
val = 'base'
else:
msg = 'Unsupported value for YShift'
raise ValueError(msg) from None
else:
val = None
curves[key]['yshift'] = val
res = optsec.get('yscale', None)
if res is not None:
data = res.split(',')
try:
curves[key]['yscale'] = fscale(data[-1], 'y')
except NameError:
msg = 'Incorrect scaling factor for Y'
raise ValueError(msg) from None
if len(data) > 1:
val = data[0].lower()
if val in ('rel', 'relative'):
curves[key]['yrelscale'] = True
elif val in ('abs', 'absolute'):
curves[key]['yrelscale'] = False
else:
msg = 'Incorrect scaling method for Y'
raise ValueError(msg)
else:
curves[key]['yrelscale'] = True
else:
curves[key]['yscale'] = None
curves[key]['ynorm'] = optsec.getboolean('normalize',
fallback=False)
if 'outputfile' in optsec:
curves[key]['outfile'] = \
optsec.get('outputfile').format(curve=key)
return figdat, spcdat, curves
def main() -> tp.NoReturn:
"""Main function.
"""
args = parse_args(sys.argv[1:])
if not args.inpfile and not args.optfile:
print('ERROR: Missing files or option file.')
sys.exit(2)
elif args.inpfile and args.optfile:
msg = 'ERROR: Option file and single files cannot be treated' \
+ ' together'
print(msg)
sys.exit(2)
elif args.inpfile:
print('ERROR: Files in input not yet supported')
else:
figdata, spcdata, curves = parse_inifile(args.optfile)
nrows, ncols = figdata['subp']
y0lines = np.full((nrows, ncols), False)
pars = {'tight_layout': True}
res = figdata['shareaxes']
if res == 'X' or res is True:
pars['sharex'] = True
if 'gridspec_kw' not in pars:
pars['gridspec_kw'] = {}
pars['gridspec_kw']['hspace'] = 0.0
if res == 'Y' or res is True:
pars['sharey'] = True
if 'gridspec_kw' not in pars:
pars['gridspec_kw'] = {}
pars['gridspec_kw']['wspace'] = 0.0
fig, subp = plt.subplots(nrows, ncols, **pars)
if figdata['geom'] is not None:
fig.set_size_inches(figdata['geom'])
# Build the curves, one at a time and then include in all relevant
# plot to avoid multiple iterations of heavy operations like broaden.
for idcurve, key in enumerate(curves):
xaxis = np.array(curves[key]['data'].xaxis)
if curves[key]['xscale'] is not None:
if curves[key]['xrelscale']:
shift = min(xaxis, key=lambda x: abs(x))
xaxis -= shift
func = np.vectorize(curves[key]['xscale'])
xaxis = func(xaxis)
if curves[key]['xrelscale']:
xaxis += func(shift)
if curves[key]['xshift'] is not None:
xaxis += curves[key]['xshift']
yaxis = np.array(curves[key]['data'].yaxis)
ymin = np.min(yaxis)
ymax = np.max(yaxis)
add_y0 = ymin*ymax < 0 and (
min(abs(ymin), ymax)/max(abs(ymin), ymax) > .1)
if curves[key]['yscale'] is not None:
if curves[key]['yrelscale']:
shift = min(yaxis, key=lambda x: abs(x))
yaxis -= shift
func = np.vectorize(curves[key]['yscale'])
yaxis = func(yaxis)
if curves[key]['yrelscale']:
yaxis += func(shift)
if curves[key]['ynorm']:
yshift = min(yaxis, key=lambda x: abs(x))
yaxis -= yshift
ymax = np.max(np.abs(yaxis))
yaxis /= ymax
yaxis += yshift/ymax
if curves[key]['yshift'] is not None:
if curves[key]['yshift'] == 'base':
if ymin*ymax >= 0:
if ymin >= 0:
yshift = - ymin
else:
yshift = + ymax
else:
yshift = 0
else:
yshift = curves[key]['yshift']
yaxis += yshift
stick = curves[key]['data'].get_broadening('func') == 'stick'
if 'outfile' in curves[key]:
fmt = '{:12.5f}, {:15.6e}\n'
with open(curves[key]['outfile'], 'w') as fobj:
for i in range(len(xaxis)):
fobj.write(fmt.format(xaxis[i], yaxis[i]))
data = {}
if curves[key]['data'].label is not None:
data['label'] = curves[key]['data'].label
if curves[key]['data'].linecolor is not None:
data['color'] = curves[key]['data'].linecolor
elif stick:
# stick is done with vertical lines, always black by default
# For this reason, we set a color. Otherwise, let the normal
# plotting tools select automatically.
data['color'] = 'C{:d}'.format(idcurve)
if curves[key]['data'].linewidth is not None:
data['linewidth'] = curves[key]['data'].linewidth
if not stick and curves[key]['data'].linestyle is not None:
data['linestyle'] = curves[key]['data'].linestyle
irow, icol = curves[key]['subplot']
for row in range(irow[0], min(irow[1]+1, nrows)):
for col in range(icol[0], min(icol[1]+1, ncols)):
y0lines[row, col] = y0lines[row, col] or add_y0
if nrows > 1 and ncols > 1:
sub = subp[row, col]
elif nrows > 1:
sub = subp[row]
elif ncols > 1:
sub = subp[col]
else:
sub = subp
if stick:
zeros = np.zeros(len(yaxis))
sub.vlines(xaxis, zeros, yaxis, **data)
else:
sub.plot(xaxis, yaxis, **data)
# Now set the plot grid.
for row in range(nrows):
for col in range(ncols):
if nrows > 1 and ncols > 1:
sub = subp[row, col]
elif nrows > 1:
sub = subp[row]
elif ncols > 1:
sub = subp[col]
else:
sub = subp
sub.legend()
if y0lines[row, col]:
sub.axhline(0, c='.5', zorder=-10.0)
spcdata[row][col].set_plot(sub)
if figdata['title'] is not None:
fig.suptitle(figdata['title'], fontweight='bold')
if figdata['fname'] is not None:
plt.savefig(figdata['fname'], bbox_inches='tight')
if figdata['show']:
plt.show()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.