text
stringlengths 213
32.3k
|
---|
from __future__ import division, print_function
"""
Terminal Escape Sequences for input and display
"""
import re
try:
from urwid import str_util
except ImportError:
from urwid import old_str_util as str_util
from urwid.compat import bytes, bytes3
# NOTE: because of circular imports (urwid.util -> urwid.escape -> urwid.util)
# from urwid.util import is_mouse_event -- will not work here
import urwid.util
within_double_byte = str_util.within_double_byte
SO = "\x0e"
SI = "\x0f"
IBMPC_ON = "\x1b[11m"
IBMPC_OFF = "\x1b[10m"
DEC_TAG = "0"
DEC_SPECIAL_CHARS = u'▮◆▒␉␌␍␊°±␋┘┐┌└┼⎺⎻─⎼⎽├┤┴┬│≤≥π≠£·'
ALT_DEC_SPECIAL_CHARS = u"_`abcdefghijklmnopqrstuvwxyz{|}~"
DEC_SPECIAL_CHARMAP = {}
assert len(DEC_SPECIAL_CHARS) == len(ALT_DEC_SPECIAL_CHARS), repr((DEC_SPECIAL_CHARS, ALT_DEC_SPECIAL_CHARS))
for c, alt in zip(DEC_SPECIAL_CHARS, ALT_DEC_SPECIAL_CHARS):
DEC_SPECIAL_CHARMAP[ord(c)] = SO + alt + SI
SAFE_ASCII_DEC_SPECIAL_RE = re.compile(u"^[ -~%s]*$" % DEC_SPECIAL_CHARS)
DEC_SPECIAL_RE = re.compile(u"[%s]" % DEC_SPECIAL_CHARS)
###################
## Input sequences
###################
class MoreInputRequired(Exception):
pass
def escape_modifier( digit ):
mode = ord(digit) - ord("1")
return "shift "*(mode&1) + "meta "*((mode&2)//2) + "ctrl "*((mode&4)//4)
input_sequences = [
('[A','up'),('[B','down'),('[C','right'),('[D','left'),
('[E','5'),('[F','end'),('[G','5'),('[H','home'),
('[1~','home'),('[2~','insert'),('[3~','delete'),('[4~','end'),
('[5~','page up'),('[6~','page down'),
('[7~','home'),('[8~','end'),
('[[A','f1'),('[[B','f2'),('[[C','f3'),('[[D','f4'),('[[E','f5'),
('[11~','f1'),('[12~','f2'),('[13~','f3'),('[14~','f4'),
('[15~','f5'),('[17~','f6'),('[18~','f7'),('[19~','f8'),
('[20~','f9'),('[21~','f10'),('[23~','f11'),('[24~','f12'),
('[25~','f13'),('[26~','f14'),('[28~','f15'),('[29~','f16'),
('[31~','f17'),('[32~','f18'),('[33~','f19'),('[34~','f20'),
('OA','up'),('OB','down'),('OC','right'),('OD','left'),
('OH','home'),('OF','end'),
('OP','f1'),('OQ','f2'),('OR','f3'),('OS','f4'),
('Oo','/'),('Oj','*'),('Om','-'),('Ok','+'),
('[Z','shift tab'),
('On', '.'),
('[200~', 'begin paste'), ('[201~', 'end paste'),
] + [
(prefix + letter, modifier + key)
for prefix, modifier in zip('O[', ('meta ', 'shift '))
for letter, key in zip('abcd', ('up', 'down', 'right', 'left'))
] + [
("[" + digit + symbol, modifier + key)
for modifier, symbol in zip(('shift ', 'meta '), '$^')
for digit, key in zip('235678',
('insert', 'delete', 'page up', 'page down', 'home', 'end'))
] + [
('O' + chr(ord('p')+n), str(n)) for n in range(10)
] + [
# modified cursor keys + home, end, 5 -- [#X and [1;#X forms
(prefix+digit+letter, escape_modifier(digit) + key)
for prefix in ("[", "[1;")
for digit in "12345678"
for letter,key in zip("ABCDEFGH",
('up','down','right','left','5','end','5','home'))
] + [
# modified F1-F4 keys -- O#X form
("O"+digit+letter, escape_modifier(digit) + key)
for digit in "12345678"
for letter,key in zip("PQRS",('f1','f2','f3','f4'))
] + [
# modified F1-F13 keys -- [XX;#~ form
("["+str(num)+";"+digit+"~", escape_modifier(digit) + key)
for digit in "12345678"
for num,key in zip(
(3,5,6,11,12,13,14,15,17,18,19,20,21,23,24,25,26,28,29,31,32,33,34),
('delete', 'page up', 'page down',
'f1','f2','f3','f4','f5','f6','f7','f8','f9','f10','f11',
'f12','f13','f14','f15','f16','f17','f18','f19','f20'))
] + [
# mouse reporting (special handling done in KeyqueueTrie)
('[M', 'mouse'),
# report status response
('[0n', 'status ok')
]
class KeyqueueTrie(object):
def __init__( self, sequences ):
self.data = {}
for s, result in sequences:
assert type(result) != dict
self.add(self.data, s, result)
def add(self, root, s, result):
assert type(root) == dict, "trie conflict detected"
assert len(s) > 0, "trie conflict detected"
if ord(s[0]) in root:
return self.add(root[ord(s[0])], s[1:], result)
if len(s)>1:
d = {}
root[ord(s[0])] = d
return self.add(d, s[1:], result)
root[ord(s)] = result
def get(self, keys, more_available):
result = self.get_recurse(self.data, keys, more_available)
if not result:
result = self.read_cursor_position(keys, more_available)
return result
def get_recurse(self, root, keys, more_available):
if type(root) != dict:
if root == "mouse":
return self.read_mouse_info(keys,
more_available)
return (root, keys)
if not keys:
# get more keys
if more_available:
raise MoreInputRequired()
return None
if keys[0] not in root:
return None
return self.get_recurse(root[keys[0]], keys[1:], more_available)
def read_mouse_info(self, keys, more_available):
if len(keys) < 3:
if more_available:
raise MoreInputRequired()
return None
b = keys[0] - 32
x, y = (keys[1] - 33)%256, (keys[2] - 33)%256 # supports 0-255
prefix = ""
if b & 4: prefix = prefix + "shift "
if b & 8: prefix = prefix + "meta "
if b & 16: prefix = prefix + "ctrl "
if (b & MOUSE_MULTIPLE_CLICK_MASK)>>9 == 1: prefix = prefix + "double "
if (b & MOUSE_MULTIPLE_CLICK_MASK)>>9 == 2: prefix = prefix + "triple "
# 0->1, 1->2, 2->3, 64->4, 65->5
button = ((b&64)/64*3) + (b & 3) + 1
if b & 3 == 3:
action = "release"
button = 0
elif b & MOUSE_RELEASE_FLAG:
action = "release"
elif b & MOUSE_DRAG_FLAG:
action = "drag"
elif b & MOUSE_MULTIPLE_CLICK_MASK:
action = "click"
else:
action = "press"
return ( (prefix + "mouse " + action, button, x, y), keys[3:] )
def read_cursor_position(self, keys, more_available):
"""
Interpret cursor position information being sent by the
user's terminal. Returned as ('cursor position', x, y)
where (x, y) == (0, 0) is the top left of the screen.
"""
if not keys:
if more_available:
raise MoreInputRequired()
return None
if keys[0] != ord('['):
return None
# read y value
y = 0
i = 1
for k in keys[i:]:
i += 1
if k == ord(';'):
if not y:
return None
break
if k < ord('0') or k > ord('9'):
return None
if not y and k == ord('0'):
return None
y = y * 10 + k - ord('0')
if not keys[i:]:
if more_available:
raise MoreInputRequired()
return None
# read x value
x = 0
for k in keys[i:]:
i += 1
if k == ord('R'):
if not x:
return None
return (("cursor position", x-1, y-1), keys[i:])
if k < ord('0') or k > ord('9'):
return None
if not x and k == ord('0'):
return None
x = x * 10 + k - ord('0')
if not keys[i:]:
if more_available:
raise MoreInputRequired()
return None
# This is added to button value to signal mouse release by curses_display
# and raw_display when we know which button was released. NON-STANDARD
MOUSE_RELEASE_FLAG = 2048
# This 2-bit mask is used to check if the mouse release from curses or gpm
# is a double or triple release. 00 means single click, 01 double,
# 10 triple. NON-STANDARD
MOUSE_MULTIPLE_CLICK_MASK = 1536
# This is added to button value at mouse release to differentiate between
# single, double and triple press. Double release adds this times one,
# triple release adds this times two. NON-STANDARD
MOUSE_MULTIPLE_CLICK_FLAG = 512
# xterm adds this to the button value to signal a mouse drag event
MOUSE_DRAG_FLAG = 32
#################################################
# Build the input trie from input_sequences list
input_trie = KeyqueueTrie(input_sequences)
#################################################
_keyconv = {
-1:None,
8:'backspace',
9:'tab',
10:'enter',
13:'enter',
127:'backspace',
# curses-only keycodes follow.. (XXX: are these used anymore?)
258:'down',
259:'up',
260:'left',
261:'right',
262:'home',
263:'backspace',
265:'f1', 266:'f2', 267:'f3', 268:'f4',
269:'f5', 270:'f6', 271:'f7', 272:'f8',
273:'f9', 274:'f10', 275:'f11', 276:'f12',
277:'shift f1', 278:'shift f2', 279:'shift f3', 280:'shift f4',
281:'shift f5', 282:'shift f6', 283:'shift f7', 284:'shift f8',
285:'shift f9', 286:'shift f10', 287:'shift f11', 288:'shift f12',
330:'delete',
331:'insert',
338:'page down',
339:'page up',
343:'enter', # on numpad
350:'5', # on numpad
360:'end',
}
def process_keyqueue(codes, more_available):
"""
codes -- list of key codes
more_available -- if True then raise MoreInputRequired when in the
middle of a character sequence (escape/utf8/wide) and caller
will attempt to send more key codes on the next call.
returns (list of input, list of remaining key codes).
"""
code = codes[0]
if code >= 32 and code <= 126:
key = chr(code)
return [key], codes[1:]
if code in _keyconv:
return [_keyconv[code]], codes[1:]
if code >0 and code <27:
return ["ctrl %s" % chr(ord('a')+code-1)], codes[1:]
if code >27 and code <32:
return ["ctrl %s" % chr(ord('A')+code-1)], codes[1:]
em = str_util.get_byte_encoding()
if (em == 'wide' and code < 256 and
within_double_byte(chr(code),0,0)):
if not codes[1:]:
if more_available:
raise MoreInputRequired()
if codes[1:] and codes[1] < 256:
db = chr(code)+chr(codes[1])
if within_double_byte(db, 0, 1):
return [db], codes[2:]
if em == 'utf8' and code>127 and code<256:
if code & 0xe0 == 0xc0: # 2-byte form
need_more = 1
elif code & 0xf0 == 0xe0: # 3-byte form
need_more = 2
elif code & 0xf8 == 0xf0: # 4-byte form
need_more = 3
else:
return ["<%d>"%code], codes[1:]
for i in range(need_more):
if len(codes)-1 <= i:
if more_available:
raise MoreInputRequired()
else:
return ["<%d>"%code], codes[1:]
k = codes[i+1]
if k>256 or k&0xc0 != 0x80:
return ["<%d>"%code], codes[1:]
s = bytes3(codes[:need_more+1])
assert isinstance(s, bytes)
try:
return [s.decode("utf-8")], codes[need_more+1:]
except UnicodeDecodeError:
return ["<%d>"%code], codes[1:]
if code >127 and code <256:
key = chr(code)
return [key], codes[1:]
if code != 27:
return ["<%d>"%code], codes[1:]
result = input_trie.get(codes[1:], more_available)
if result is not None:
result, remaining_codes = result
return [result], remaining_codes
if codes[1:]:
# Meta keys -- ESC+Key form
run, remaining_codes = process_keyqueue(codes[1:],
more_available)
if urwid.util.is_mouse_event(run[0]):
return ['esc'] + run, remaining_codes
if run[0] == "esc" or run[0].find("meta ") >= 0:
return ['esc']+run, remaining_codes
return ['meta '+run[0]]+run[1:], remaining_codes
return ['esc'], codes[1:]
####################
## Output sequences
####################
ESC = "\x1b"
CURSOR_HOME = ESC+"[H"
CURSOR_HOME_COL = "\r"
APP_KEYPAD_MODE = ESC+"="
NUM_KEYPAD_MODE = ESC+">"
SWITCH_TO_ALTERNATE_BUFFER = ESC+"7"+ESC+"[?47h"
RESTORE_NORMAL_BUFFER = ESC+"[?47l"+ESC+"8"
#RESET_SCROLL_REGION = ESC+"[;r"
#RESET = ESC+"c"
REPORT_STATUS = ESC + "[5n"
REPORT_CURSOR_POSITION = ESC+"[6n"
INSERT_ON = ESC + "[4h"
INSERT_OFF = ESC + "[4l"
def set_cursor_position( x, y ):
assert type(x) == int
assert type(y) == int
return ESC+"[%d;%dH" %(y+1, x+1)
def move_cursor_right(x):
if x < 1: return ""
return ESC+"[%dC" % x
def move_cursor_up(x):
if x < 1: return ""
return ESC+"[%dA" % x
def move_cursor_down(x):
if x < 1: return ""
return ESC+"[%dB" % x
HIDE_CURSOR = ESC+"[?25l"
SHOW_CURSOR = ESC+"[?25h"
MOUSE_TRACKING_ON = ESC+"[?1000h"+ESC+"[?1002h"
MOUSE_TRACKING_OFF = ESC+"[?1002l"+ESC+"[?1000l"
DESIGNATE_G1_SPECIAL = ESC+")0"
ERASE_IN_LINE_RIGHT = ESC+"[K"
|
from __future__ import print_function
import os
import platform
import sys
from glob import glob
from optparse import OptionParser
lib_suffix = 'so'
if (sys.platform == 'darwin'):
lib_suffix = 'dylib'
link_static = 'ROS_BOOST_LINK' in os.environ and os.environ['ROS_BOOST_LINK'] == 'static'
if (link_static):
lib_suffix = 'a'
no_L_or_I = 'ROS_BOOST_NO_L_OR_I' in os.environ
boost_version = None
if ('ROS_BOOST_VERSION' in os.environ and len(os.environ['ROS_BOOST_VERSION']) > 0):
ver = os.environ['ROS_BOOST_VERSION']
ver = ver.split('.')
boost_version = [int(v) for v in ver]
if (len(boost_version) == 2):
boost_version.append(0)
def print_usage_and_exit():
print('Usage: rosboost-cfg --lflags [thread,regex,graph,...]')
print(' rosboost-cfg --cflags')
print(' rosboost-cfg --libs [thread,regex,graph,...]')
print(' rosboost-cfg --include_dirs')
print(' rosboost-cfg --lib_dirs')
print(' rosboost-cfg --root')
sys.exit(1)
class BoostError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Version(object):
def __init__(self, major, minor, patch, root, include_dir, lib_dir, is_default_search_location):
self.major = major
self.minor = minor
self.patch = patch
self.root = root
self.include_dir = include_dir
self.lib_dir = lib_dir
self.is_default_search_location = is_default_search_location
self.is_system_install = os.path.split(self.include_dir)[0] == self.root
def __cmp__(self, other):
if (self.major != other.major):
if self.major < other.major:
return -1
else:
return 1
if (self.minor != other.minor):
if self.minor < other.minor:
return -1
else:
return 1
if (self.patch != other.patch):
if self.patch < other.patch:
return -1
else:
return 1
return 0
def __repr__(self):
return repr((self.major, self.minor, self.patch, self.root, self.include_dir, self.is_default_search_location, self.is_system_install))
def find_lib_dir(root_dir, multiarch=''):
# prefer lib64 unless explicitly specified in the environment
if ('ROS_BOOST_LIB_DIR_NAME' in os.environ):
possible_dirs = [os.path.join(root_dir, os.environ['ROS_BOOST_LIB_DIR_NAME'])]
else:
possible_dirs = [os.path.join(root_dir, 'lib64'), os.path.join(root_dir, 'lib')]
if multiarch:
possible_dirs = [os.path.join(root_dir, 'lib/%s' % multiarch)] + possible_dirs
for p in possible_dirs:
glob_files = glob('%s*' % (os.path.join(p, 'libboost*')))
if (len(glob_files) > 0):
return p
return None
def extract_versions(dir, is_default_search_location, multiarch=''):
version_paths = [os.path.join(dir, 'version.hpp'),
os.path.join(dir, 'boost', 'version.hpp')]
glob_dirs = glob('%s*' % (os.path.join(dir, 'boost-')))
[version_paths.append(os.path.join(gdir, 'boost', 'version.hpp')) for gdir in glob_dirs]
versions = []
for p in version_paths:
ver_string = ''
if (os.path.isfile(p)):
fh = open(p, 'r')
lines = fh.readlines()
fh.close()
for line in lines:
if line.find('#define BOOST_VERSION ') > -1:
def_string = line.split()
ver_string = def_string[2]
ver_int = int(ver_string)
patch = ver_int % 100
minor = ver_int / 100 % 1000
major = ver_int / 100000
include_dir = os.path.split(os.path.split(p)[0])[0]
root_dir = os.path.split(dir)[0]
lib_dir = find_lib_dir(root_dir, multiarch)
versions.append(Version(major, minor, patch, root_dir, include_dir, lib_dir, is_default_search_location))
return versions
def find_versions(search_paths, multiarch=''):
vers = []
for path, system in search_paths:
path = os.path.join(path, 'include')
pvers = extract_versions(path, system, multiarch)
[vers.append(ver) for ver in pvers]
if (len(vers) == 0):
return None
if (boost_version is not None):
for v in vers:
if (v.major == boost_version[0] and v.minor == boost_version[1] and v.patch == boost_version[2]):
return [v]
raise BoostError('Could not find boost version %s required by ROS_BOOST_VERSION environment variable' % (boost_version))
vers.sort()
return vers
def find_boost(search_paths, multiarch=''):
result = find_versions(search_paths, multiarch)
if result is None:
return None
if len(result) > 1:
sys.stderr.write("WARN, found multiple boost versions '%s', using latest" % result)
return result[-1]
def search_paths(sysroot):
_search_paths = [(sysroot+'/usr', True),
(sysroot+'/usr/local', True),
(None if 'INCLUDE_DIRS' not in os.environ else os.environ['INCLUDE_DIRS'], True),
(None if 'CPATH' not in os.environ else os.environ['CPATH'], True),
(None if 'C_INCLUDE_PATH' not in os.environ else os.environ['C_INCLUDE_PATH'], True),
(None if 'CPLUS_INCLUDE_PATH' not in os.environ else os.environ['CPLUS_INCLUDE_PATH'], True),
(None if 'ROS_BOOST_ROOT' not in os.environ else os.environ['ROS_BOOST_ROOT'], False)]
search_paths = []
for (str, system) in _search_paths:
if (str is not None):
dirs = str.split(':')
for dir in dirs:
if (len(dir) > 0):
if (dir.endswith('/include')):
dir = dir[:-len('/include')]
search_paths.append((dir, system))
return search_paths
def lib_dir(ver):
return ver.lib_dir
def find_lib(ver, name, full_lib=link_static):
global lib_suffix
global link_static
dynamic_search_paths = []
static_search_paths = []
if (ver.is_system_install):
dynamic_search_paths = ['libboost_%s-mt.%s' % (name, lib_suffix),
'libboost_%s.%s' % (name, lib_suffix)]
static_search_paths = ['libboost_%s-mt.a' % (name),
'libboost_%s.a' % (name)]
else:
dynamic_search_paths = ['libboost_%s*%s_%s*.%s' % (name, ver.major, ver.minor, lib_suffix),
'libboost_%s-mt*.%s' % (name, lib_suffix),
'libboost_%s*.%s' % (name, lib_suffix)]
static_search_paths = ['libboost_%s*%s_%s*.a' % (name, ver.major, ver.minor),
'libboost_%s-mt*.a' % (name),
'libboost_%s*.a' % (name)]
# Boost.Python needs some special handling on some systems (Karmic), since it may have per-python-version libs
if (name == 'python'):
python_ver = platform.python_version().split('.')
dynamic_search_paths = ['libboost_%s-mt-py%s%s.%s' % (name, python_ver[0], python_ver[1], lib_suffix),
'libboost_%s-py%s%s.%s' % (name, python_ver[0], python_ver[1], lib_suffix)] + dynamic_search_paths
static_search_paths = ['libboost_%s-mt-py%s%s.a' % (name, python_ver[0], python_ver[1]),
'libboost_%s-py%s%s.a' % (name, python_ver[0], python_ver[1])] + static_search_paths
search_paths = static_search_paths if link_static else dynamic_search_paths
dir = lib_dir(ver)
if dir is None:
raise BoostError('Could not locate library [%s], version %s' % (name, ver))
for p in search_paths:
globstr = os.path.join(dir, p)
libs = glob(globstr)
if (len(libs) > 0):
if (full_lib):
return libs[0]
else:
return os.path.basename(libs[0])
raise BoostError('Could not locate library [%s], version %s in lib directory [%s]' % (name, ver, dir))
def include_dirs(ver, prefix=''):
if ver.is_system_install or no_L_or_I:
return ''
return ' %s%s' % (prefix, ver.include_dir)
def cflags(ver):
return include_dirs(ver, '-I')
def lib_dir_flags(ver):
if not ver.is_default_search_location:
dir = lib_dir(ver)
return ' -L%s -Wl,-rpath,%s' % (dir, dir)
return ''
def lib_flags(ver, name):
lib = find_lib(ver, name)
if (link_static):
return ' %s' % (lib)
else:
# Cut off "lib" and extension (.so/.a/.dylib/etc.)
return ' -l%s' % (os.path.splitext(lib)[0][len('lib'):])
def lflags(ver, libs):
s = lib_dir_flags(ver) + ' '
for lib in libs:
s += lib_flags(ver, lib) + ' '
return s
def libs(ver, libs):
s = ''
for lib in libs:
s += find_lib(ver, lib, True) + ' '
return s
def lib_dirs(ver):
if (ver.is_default_search_location or no_L_or_I):
return ''
return lib_dir(ver)
OPTIONS = ['libs', 'include_dirs', 'lib_dirs', 'cflags', 'lflags', 'root', 'print_versions', 'version']
def check_one_option(options, key):
for k in dir(options):
if (k in OPTIONS):
v = getattr(options, k)
if (k != key and v):
raise BoostError('Only one option (excepting sysroot) is allowed at a time')
def main():
if (len(sys.argv) < 2):
print_usage_and_exit()
parser = OptionParser()
parser.add_option('-l', '--libs', dest='libs', type='string', help='')
parser.add_option('-i', '--include_dirs', dest='include_dirs', action='store_true', default=False, help='')
parser.add_option('-d', '--lib_dirs', dest='lib_dirs', action='store_true', help='')
parser.add_option('-c', '--cflags', dest='cflags', action='store_true', default=False, help='')
parser.add_option('-f', '--lflags', dest='lflags', type='string', help='')
parser.add_option('-r', '--root', dest='root', action='store_true', default=False, help='')
parser.add_option('-p', '--print_versions', dest='print_versions', action='store_true', default=False, help='')
parser.add_option('-v', '--version', dest='version', action='store_true', default=False, help='')
parser.add_option('-s', '--sysroot', dest='sysroot', type='string', default='', help='Location of the system root (usually toolchain root).')
parser.add_option('-m', '--multiarch', dest='multiarch', type='string', default='', help="Name of multiarch to search below 'lib' folder for libraries.")
(options, args) = parser.parse_args()
if (options.print_versions):
check_one_option(options, 'print_versions')
for ver in find_versions(search_paths(options.sysroot), options.multiarch):
print('%s.%s.%s root=%s include_dir=%s' % (ver.major, ver.minor, ver.patch, ver.root, ver.include_dir))
return
ver = find_boost(search_paths(options.sysroot), options.multiarch)
if ver is None:
raise BoostError('Cannot find boost in any of %s' % search_paths(options.sysroot))
sys.exit(0)
if options.version:
check_one_option(options, 'version')
print('%s.%s.%s root=%s include_dir=%s' % (ver.major, ver.minor, ver.patch, ver.root, ver.include_dir))
return
if ver.major < 1 or (ver.major == 1 and ver.minor < 37):
raise BoostError('Boost version %s.%s.%s does not meet the minimum requirements of boost 1.37.0' % (ver.major, ver.minor, ver.patch))
output = ''
if (options.root):
check_one_option(options, 'root')
output = ver.root
elif (options.libs):
check_one_option(options, 'libs')
output = libs(ver, options.libs.split(','))
elif (options.include_dirs):
check_one_option(options, 'include_dirs')
output = include_dirs(ver)
elif (options.lib_dirs):
check_one_option(options, 'lib_dirs')
output = lib_dirs(ver)
elif (options.cflags):
check_one_option(options, 'cflags')
output = cflags(ver)
elif (options.lflags):
check_one_option(options, 'lflags')
output = lflags(ver, options.lflags.split(','))
else:
print_usage_and_exit()
print(output.strip())
if __name__ == '__main__':
main()
|
import copy
import diamond.collector
import json
import urllib2
from urlparse import urlparse
import diamond.collector
from diamond.collector import str_to_bool
class MesosCollector(diamond.collector.Collector):
def __init__(self, config=None, handlers=[], name=None, configfile=None):
self.known_frameworks = {}
self.executors_prev_read = {}
super(MesosCollector, self).__init__(config, handlers, name, configfile)
def process_config(self):
super(MesosCollector, self).process_config()
self.master = str_to_bool(self.config['master'])
def get_default_config_help(self):
config_help = super(MesosCollector,
self).get_default_config_help()
config_help.update({
'host': 'Hostname, using http scheme by default. For https pass '
'e.g. "https://localhost"',
'port': 'Port (default is 5050; set to 5051 for mesos-agent)',
'master': 'True if host is master (default is True).'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MesosCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 5050,
'path': 'mesos',
'master': True
})
return config
def collect(self):
if json is None:
self.log.error('Unable to import json')
return
self._collect_metrics_snapshot()
if not self.master:
self._collect_slave_state()
self._collect_slave_statistics()
def _collect_metrics_snapshot(self):
result = self._get('metrics/snapshot')
if not result:
return
for key in result:
value = result[key]
self.publish(key.replace('/', '.'),
value, precision=self._precision(value))
def _collect_slave_state(self):
# slave(1) is generated here
# https://github.com/apache/mesos/blob/1.1.0/src/slave/slave.cpp#L153
# https://github.com/apache/mesos/blob/1.1.0/3rdparty/libprocess/src/process.cpp#L165
result = self._get('slave(1)/state')
if not result:
return
for framework in result['frameworks']:
self.known_frameworks[framework['id']] = framework['name']
for key in ['failed_tasks', 'finished_tasks', 'staged_tasks',
'started_tasks', 'lost_tasks']:
value = result.get(key)
if value is not None:
self.publish(key, value, precision=self._precision(value))
def _add_cpu_usage(self, cur_read):
"""Compute cpu usage based on cpu time spent compared to elapsed time
"""
for executor_id, cur_data in cur_read.items():
if executor_id in self.executors_prev_read:
prev_data = self.executors_prev_read[executor_id]
prev_stats = prev_data['statistics']
cur_stats = cur_data['statistics']
# from sum of current cpus time subtract previous sum
cpus_time_diff_s = cur_stats['cpus_user_time_secs']
cpus_time_diff_s += cur_stats['cpus_system_time_secs']
cpus_time_diff_s -= prev_stats['cpus_user_time_secs']
cpus_time_diff_s -= prev_stats['cpus_system_time_secs']
ts_diff = cur_stats['timestamp'] - prev_stats['timestamp']
if ts_diff != 0:
cur_stats['cpus_utilisation'] = cpus_time_diff_s / ts_diff
self.executors_prev_read[executor_id] = cur_read[executor_id]
def _add_cpu_percent(self, cur_read):
"""Compute cpu percent basing on the provided utilisation
"""
for executor_id, cur_data in cur_read.items():
stats = cur_data['statistics']
cpus_limit = stats.get('cpus_limit')
cpus_utilisation = stats.get('cpus_utilisation')
if cpus_utilisation and cpus_limit != 0:
stats['cpus_percent'] = cpus_utilisation / cpus_limit
def _add_mem_percent(self, cur_read):
"""Compute memory percent utilisation based on the
mem_rss_bytes and mem_limit_bytes
"""
for executor_id, cur_data in cur_read.items():
stats = cur_data['statistics']
mem_rss_bytes = stats.get('mem_rss_bytes')
mem_limit_bytes = stats.get('mem_limit_bytes')
if mem_rss_bytes and mem_limit_bytes != 0:
stats['mem_percent'] = mem_rss_bytes / float(mem_limit_bytes)
def _group_and_publish_tasks_statistics(self, result):
"""This function group statistics of same tasks by adding them.
It also add 'instances_count' statistic to get information about
how many instances is running on the server
Args:
result: result of mesos query. List of dictionaries with
'executor_id', 'framework_id' as a strings and 'statistics'
as dictionary of labeled numbers
"""
for i in result:
executor_id = i['executor_id']
i['executor_id'] = executor_id[:executor_id.rfind('.')]
i['statistics']['instances_count'] = 1
r = {}
for i in result:
executor_id = i['executor_id']
r[executor_id] = r.get(executor_id, {})
r[executor_id]['framework_id'] = i['framework_id']
r[executor_id]['statistics'] = r[executor_id].get('statistics', {})
r[executor_id]['statistics'] = self._sum_statistics(
i['statistics'], r[executor_id]['statistics'])
self._add_cpu_usage(r)
self._add_cpu_percent(r)
self._add_mem_percent(r)
self._publish(r)
def _sum_statistics(self, x, y):
stats = set(x) | set(y)
summed_stats = dict([(key, x.get(key, 0) + y.get(key, 0))
for key in stats])
return summed_stats
def _collect_slave_statistics(self):
result = self._get('monitor/statistics')
if not result:
return
result_copy = copy.deepcopy(result)
self._group_and_publish_tasks_statistics(result)
self._publish_tasks_statistics(result_copy)
def _get_url(self, path):
parsed = urlparse(self.config['host'])
scheme = parsed.scheme or 'http'
host = parsed.hostname or self.config['host']
return "%s://%s:%s/%s" % (
scheme, host, self.config['port'], path)
def _get(self, path):
"""
Execute a Mesos API call.
"""
url = self._get_url(path)
try:
response = urllib2.urlopen(url)
except Exception as err:
self.log.error("%s: %s", url, err)
return False
try:
doc = json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from Mesos as a"
" json object")
return False
return doc
def _precision(self, value):
"""
Return the precision of the number
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _sanitize_metric_name(self, name):
return name.replace('.', '_').replace('/', '_')
def _publish_tasks_statistics(self, result):
for executor in result:
parts = executor['executor_id'].rsplit('.', 1)
executor_id = '%s.%s' % (self._sanitize_metric_name(parts[0]),
parts[1])
metrics = {executor_id: {}}
metrics[executor_id]['framework_id'] = executor['framework_id']
metrics[executor_id]['statistics'] = executor['statistics']
self._add_cpu_usage(metrics)
self._add_cpu_percent(metrics)
self._add_mem_percent(metrics)
self._publish(metrics, False)
def _publish(self, result, sanitize_executor_id=True):
for executor_id, executor in result.iteritems():
executor_statistics = executor['statistics']
for key in executor_statistics:
value = executor_statistics[key]
framework_id = self.known_frameworks[executor['framework_id']]
framework = self._sanitize_metric_name(framework_id)
if sanitize_executor_id:
executor_name = self._sanitize_metric_name(executor_id)
else:
executor_name = executor_id
metric = 'frameworks.%s.executors.%s.%s' % (
framework, executor_name, key)
self.publish(metric, value, precision=self._precision(value))
|
import sys
try:
import iptc
except TypeError:
print(
"Failed to import iptc. This happens sometimes during a python upgrade or during bootstrapping"
)
sys.exit(0)
from paasta_tools import iptables
from paasta_tools.utils import get_docker_client
def get_container_from_dport(dport, docker_client):
for container in docker_client.containers():
try:
ports = container["Ports"]
for port in ports:
if "PublicPort" in port:
if port["PublicPort"] == int(dport):
return container
except KeyError:
print(ports)
pass
def target_rule_to_dport(rule):
try:
# (('tcp', (('dport', ('31493',)),)),)
return rule.matches[0][1][0][1][0]
except IndexError:
return None
def kill_containers_with_duplicate_iptables_rules(docker_client):
chain_name = "DOCKER"
table = iptc.Table(iptc.Table.NAT)
chain = iptc.Chain(table, chain_name)
targets_seen = {}
raw_rules_seen = {}
for iptables_rule in chain.rules:
rule = iptables.Rule.from_iptc(iptables_rule)
target = rule.target_parameters
if target not in targets_seen:
targets_seen[target] = rule
raw_rules_seen[target] = iptables_rule
else:
dport = target_rule_to_dport(rule)
if dport is None:
continue
print(
"This is the second time we've seen a rule with the same target_parameters!"
)
print(rule)
container1 = get_container_from_dport(dport, docker_client)
print("The other rule with that target is:")
print(targets_seen[target])
dport2 = target_rule_to_dport(targets_seen[target])
container2 = get_container_from_dport(dport2, docker_client)
if container1 is None:
print(
"We have a duplicate iptables rule going to a container1, but no container1!"
)
print(rule)
print("Deleting this rule")
chain.delete_rule(iptables_rule)
elif container2 is None:
print(
"We have a iptables rule going to a container2, but no container2!"
)
print(targets_seen[target])
print("Deleting this rule")
chain.delete_rule(raw_rules_seen[target])
elif container1["Id"] == container2["Id"]:
print("The same container is getting traffic for both ports!")
print(container1)
print("Killing the container")
docker_client.kill(container1["Id"])
print("Deleting both iptables rules")
chain.delete_rule(iptables_rule)
chain.delete_rule(raw_rules_seen[target])
elif container1["Id"] != container2["Id"]:
print(
"These are two different containers, which means we have duplicate ips:"
)
print(container1)
print(container2)
print("Not sure which to kill, killing both")
docker_client.kill(container1["Id"])
docker_client.kill(container2["Id"])
print("Deleting the both iptables rules for good measure")
chain.delete_rule(iptables_rule)
chain.delete_rule(raw_rules_seen[target])
else:
print("Something unexpected went wrong. Exiting 1")
sys.exit(1)
def main():
docker_client = get_docker_client()
kill_containers_with_duplicate_iptables_rules(docker_client)
if __name__ == "__main__":
sys.exit(main())
|
import urllib.parse
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QUrl
class PastebinClient(QObject):
"""A client for Stikked pastebins using HTTPClient.
Attributes:
_client: The HTTPClient used.
Class attributes:
API_URL: The base API URL.
Signals:
success: Emitted when the paste succeeded.
arg: The URL of the paste, as string.
error: Emitted when the paste failed.
arg: The error message, as string.
"""
API_URL = 'https://crashes.qutebrowser.org/api/'
MISC_API_URL = 'https://paste.the-compiler.org/api/'
success = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, client, parent=None, api_url=API_URL):
"""Constructor.
Args:
client: The HTTPClient to use. Will be reparented.
api_url: The Stikked pastebin endpoint to use.
"""
super().__init__(parent)
client.setParent(self)
client.error.connect(self.error)
client.success.connect(self.on_client_success)
self._client = client
self._api_url = api_url
def paste(self, name, title, text, parent=None, private=False):
"""Paste the text into a pastebin and return the URL.
Args:
name: The username to post as.
title: The post title.
text: The text to post.
parent: The parent paste to reply to.
private: Whether to paste privately.
"""
data = {
'text': text,
'title': title,
'name': name,
'apikey': 'ihatespam',
}
if parent is not None:
data['reply'] = parent
if private:
data['private'] = '1'
url = QUrl(urllib.parse.urljoin(self._api_url, 'create'))
self._client.post(url, data)
@pyqtSlot(str)
def on_client_success(self, data):
"""Process the data and finish when the client finished.
Args:
data: A string with the received data.
"""
if data.startswith('http://') or data.startswith('https://'):
self.success.emit(data)
else:
self.error.emit("Invalid data received in reply!")
|
import ldap
from flask import current_app
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.common.utils import validate_conf, get_psuedo_random_string
class LdapPrincipal:
"""
Provides methods for authenticating against an LDAP server.
"""
def __init__(self, args):
self._ldap_validate_conf()
# setup ldap config
if not args["username"]:
raise Exception("missing ldap username")
if not args["password"]:
self.error_message = "missing ldap password"
raise Exception("missing ldap password")
self.ldap_principal = args["username"]
self.ldap_email_domain = current_app.config.get("LDAP_EMAIL_DOMAIN", None)
if "@" not in self.ldap_principal:
self.ldap_principal = "%s@%s" % (
self.ldap_principal,
self.ldap_email_domain,
)
self.ldap_username = args["username"]
if "@" in self.ldap_username:
self.ldap_username = args["username"].split("@")[0]
self.ldap_password = args["password"]
self.ldap_server = current_app.config.get("LDAP_BIND_URI", None)
self.ldap_base_dn = current_app.config.get("LDAP_BASE_DN", None)
self.ldap_use_tls = current_app.config.get("LDAP_USE_TLS", False)
self.ldap_cacert_file = current_app.config.get("LDAP_CACERT_FILE", None)
self.ldap_default_role = current_app.config.get("LEMUR_DEFAULT_ROLE", None)
self.ldap_required_group = current_app.config.get("LDAP_REQUIRED_GROUP", None)
self.ldap_groups_to_roles = current_app.config.get("LDAP_GROUPS_TO_ROLES", None)
self.ldap_is_active_directory = current_app.config.get(
"LDAP_IS_ACTIVE_DIRECTORY", False
)
self.ldap_attrs = ["memberOf"]
self.ldap_client = None
self.ldap_groups = None
def _update_user(self, roles):
"""
create or update a local user instance.
"""
# try to get user from local database
user = user_service.get_by_email(self.ldap_principal)
# create them a local account
if not user:
user = user_service.create(
self.ldap_username,
get_psuedo_random_string(),
self.ldap_principal,
True,
"", # thumbnailPhotoUrl
list(roles),
)
else:
# we add 'lemur' specific roles, so they do not get marked as removed
for ur in user.roles:
if not ur.third_party:
roles.add(ur)
# update any changes to the user
user_service.update(
user.id,
self.ldap_username,
self.ldap_principal,
user.active,
user.profile_picture,
list(roles),
)
return user
def _authorize(self):
"""
check groups and roles to confirm access.
return a list of roles if ok.
raise an exception on error.
"""
if not self.ldap_principal:
return None
if self.ldap_required_group:
# ensure the user has the required group in their group list
if self.ldap_required_group not in self.ldap_groups:
return None
roles = set()
if self.ldap_default_role:
role = role_service.get_by_name(self.ldap_default_role)
if role:
if not role.third_party:
role = role_service.set_third_party(role.id, third_party_status=True)
roles.add(role)
# update their 'roles'
role = role_service.get_by_name(self.ldap_principal)
if not role:
description = "auto generated role based on owner: {0}".format(
self.ldap_principal
)
role = role_service.create(
self.ldap_principal, description=description, third_party=True
)
if not role.third_party:
role = role_service.set_third_party(role.id, third_party_status=True)
roles.add(role)
if not self.ldap_groups_to_roles:
return roles
for ldap_group_name, role_name in self.ldap_groups_to_roles.items():
role = role_service.get_by_name(role_name)
if role:
if ldap_group_name in self.ldap_groups:
current_app.logger.debug(
"assigning role {0} to ldap user {1}".format(
self.ldap_principal, role
)
)
if not role.third_party:
role = role_service.set_third_party(
role.id, third_party_status=True
)
roles.add(role)
return roles
def authenticate(self):
"""
orchestrate the ldap login.
raise an exception on error.
"""
self._bind()
roles = self._authorize()
if not roles:
raise Exception("ldap authorization failed")
return self._update_user(roles)
def _bind(self):
"""
authenticate an ldap user.
list groups for a user.
raise an exception on error.
"""
if "@" not in self.ldap_principal:
self.ldap_principal = "%s@%s" % (
self.ldap_principal,
self.ldap_email_domain,
)
ldap_filter = "userPrincipalName=%s" % self.ldap_principal
# query ldap for auth
try:
# build a client
if not self.ldap_client:
self.ldap_client = ldap.initialize(self.ldap_server)
# perform a synchronous bind
self.ldap_client.set_option(ldap.OPT_REFERRALS, 0)
if self.ldap_use_tls:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
self.ldap_client.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
self.ldap_client.set_option(ldap.OPT_X_TLS, ldap.OPT_X_TLS_DEMAND)
self.ldap_client.set_option(ldap.OPT_X_TLS_DEMAND, True)
self.ldap_client.set_option(ldap.OPT_DEBUG_LEVEL, 255)
if self.ldap_cacert_file:
self.ldap_client.set_option(
ldap.OPT_X_TLS_CACERTFILE, self.ldap_cacert_file
)
self.ldap_client.simple_bind_s(self.ldap_principal, self.ldap_password)
except ldap.INVALID_CREDENTIALS:
self.ldap_client.unbind()
raise Exception("The supplied ldap credentials are invalid")
except ldap.SERVER_DOWN:
raise Exception("ldap server unavailable")
except ldap.LDAPError as e:
raise Exception("ldap error: {0}".format(e))
if self.ldap_is_active_directory:
# Lookup user DN, needed to search for group membership
userdn = self.ldap_client.search_s(
self.ldap_base_dn,
ldap.SCOPE_SUBTREE,
ldap_filter,
["distinguishedName"],
)[0][1]["distinguishedName"][0]
userdn = userdn.decode("utf-8")
# Search all groups that have the userDN as a member
groupfilter = "(&(objectclass=group)(member:1.2.840.113556.1.4.1941:={0}))".format(
userdn
)
lgroups = self.ldap_client.search_s(
self.ldap_base_dn, ldap.SCOPE_SUBTREE, groupfilter, ["cn"]
)
# Create a list of group CN's from the result
self.ldap_groups = []
for group in lgroups:
(dn, values) = group
if type(values) == dict:
self.ldap_groups.append(values["cn"][0].decode("utf-8"))
else:
lgroups = self.ldap_client.search_s(
self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs
)[0][1]["memberOf"]
# lgroups is a list of utf-8 encoded strings
# convert to a single string of groups to allow matching
self.ldap_groups = b"".join(lgroups).decode("ascii")
self.ldap_client.unbind()
def _ldap_validate_conf(self):
"""
Confirms required ldap config settings exist.
"""
required_vars = ["LDAP_BIND_URI", "LDAP_BASE_DN", "LDAP_EMAIL_DOMAIN"]
validate_conf(current_app, required_vars)
|
from datetime import timedelta
import mbddns
import voluptuous as vol
from homeassistant.const import (
CONF_DOMAIN,
CONF_HOST,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
DOMAIN = "mythicbeastsdns"
DEFAULT_INTERVAL = timedelta(minutes=10)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize the Mythic Beasts component."""
domain = config[DOMAIN][CONF_DOMAIN]
password = config[DOMAIN][CONF_PASSWORD]
host = config[DOMAIN][CONF_HOST]
update_interval = config[DOMAIN][CONF_SCAN_INTERVAL]
session = async_get_clientsession(hass)
result = await mbddns.update(domain, password, host, session=session)
if not result:
return False
async def update_domain_interval(now):
"""Update the DNS entry."""
await mbddns.update(domain, password, host, session=session)
async_track_time_interval(hass, update_domain_interval, update_interval)
return True
|
from __future__ import print_function
from logilab.common import modutils, Execute as spawn
from logilab.common.astutils import *
import os.path
MY_DICT = {}
def global_access(key, val):
"""function test"""
local = 1
MY_DICT[key] = val
for i in val:
if i:
del MY_DICT[i]
continue
else:
break
else:
print('!!!')
class YO:
"""hehe"""
a=1
def __init__(self):
try:
self.yo = 1
except ValueError as ex:
pass
except (NameError, TypeError):
raise XXXError()
except:
raise
#print('*****>',YO.__dict__)
class YOUPI(YO):
class_attr = None
def __init__(self):
self.member = None
def method(self):
"""method test"""
global MY_DICT
try:
MY_DICT = {}
local = None
autre = [a for a, b in MY_DICT if b]
if b in autre:
print('yo', end=' ')
elif a in autre:
print('hehe')
global_access(local, val=autre)
finally:
return local
def static_method():
"""static method test"""
assert MY_DICT, '???'
static_method = staticmethod(static_method)
def class_method(cls):
"""class method test"""
exec(a, b)
class_method = classmethod(class_method)
|
from . import SleepIQSensor
from .const import DOMAIN, SENSOR_TYPES, SIDES, SLEEP_NUMBER
ICON = "mdi:bed"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SleepIQ sensors."""
if discovery_info is None:
return
data = hass.data[DOMAIN]
data.update()
dev = []
for bed_id, bed in data.beds.items():
for side in SIDES:
if getattr(bed, side) is not None:
dev.append(SleepNumberSensor(data, bed_id, side))
add_entities(dev)
class SleepNumberSensor(SleepIQSensor):
"""Implementation of a SleepIQ sensor."""
def __init__(self, sleepiq_data, bed_id, side):
"""Initialize the sensor."""
SleepIQSensor.__init__(self, sleepiq_data, bed_id, side)
self._state = None
self.type = SLEEP_NUMBER
self._name = SENSOR_TYPES[self.type]
self.update()
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data from SleepIQ and updates the states."""
SleepIQSensor.update(self)
self._state = self.side.sleep_number
|
from django.core import mail
from django.urls import reverse
from weblate.auth.models import Permission, Role
from .test_views import ViewTestCase
class NewLangTest(ViewTestCase):
expected_lang_code = "pt_BR"
def create_component(self):
return self.create_po_new_base(new_lang="add")
def test_no_permission(self):
# Remove permission to add translations
Role.objects.get(name="Power user").permissions.remove(
Permission.objects.get(codename="translation.add")
)
# Test there is no add form
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Start new translation")
self.assertContains(response, "permission to start a new translation")
# Test adding fails
response = self.client.post(
reverse("new-language", kwargs=self.kw_component), {"lang": "af"}
)
self.assertEqual(response.status_code, 403)
self.assertFalse(
self.component.translation_set.filter(language__code="af").exists()
)
def test_none(self):
self.component.new_lang = "none"
self.component.save()
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertNotContains(response, "Start new translation")
def test_url(self):
self.component.new_lang = "url"
self.component.save()
self.project.instructions = "http://example.com/instructions"
self.project.save()
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Start new translation")
self.assertContains(response, "http://example.com/instructions")
def test_contact(self):
# Make admin to receive notifications
self.project.add_user(self.anotheruser, "@Administration")
self.component.new_lang = "contact"
self.component.save()
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Start new translation")
self.assertContains(response, "/new-lang/")
response = self.client.post(
reverse("new-language", kwargs=self.kw_component), {"lang": "af"}
)
self.assertRedirects(response, self.component.get_absolute_url())
# Verify mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject, "[Weblate] New language request in Test/Test"
)
def test_add(self):
# Make admin to receive notifications
self.project.add_user(self.anotheruser, "@Administration")
self.assertFalse(
self.component.translation_set.filter(language__code="af").exists()
)
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Start new translation")
self.assertContains(response, "/new-lang/")
lang = {"lang": "af"}
response = self.client.post(
reverse("new-language", kwargs=self.kw_component), lang
)
lang.update(self.kw_component)
self.assertRedirects(response, reverse("translation", kwargs=lang))
self.assertTrue(
self.component.translation_set.filter(language__code="af").exists()
)
# Verify mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject, "[Weblate] New language added to Test/Test"
)
# Not selected language
response = self.client.post(
reverse("new-language", kwargs=self.kw_component), {"lang": ""}, follow=True
)
self.assertContains(response, "Please fix errors in the form")
# Existing language
response = self.client.post(
reverse("new-language", kwargs=self.kw_component),
{"lang": "af"},
follow=True,
)
self.assertContains(response, "Please fix errors in the form")
def test_add_owner(self):
self.component.project.add_user(self.user, "@Administration")
# None chosen
response = self.client.post(
reverse("new-language", kwargs=self.kw_component), follow=True
)
self.assertContains(response, "Please fix errors in the form")
# One chosen
response = self.client.post(
reverse("new-language", kwargs=self.kw_component),
{"lang": "af"},
follow=True,
)
self.assertNotContains(response, "Please fix errors in the form")
# More chosen
response = self.client.post(
reverse("new-language", kwargs=self.kw_component),
{"lang": ["nl", "fr", "uk"]},
follow=True,
)
self.assertNotContains(response, "Please fix errors in the form")
self.assertEqual(
self.component.translation_set.filter(
language__code__in=("af", "nl", "fr", "uk")
).count(),
4,
)
def test_add_rejected(self):
self.component.project.add_user(self.user, "@Administration")
self.component.language_regex = "^cs$"
self.component.save()
# One chosen
response = self.client.post(
reverse("new-language", kwargs=self.kw_component),
{"lang": "af"},
follow=True,
)
self.assertContains(
response, "The given language is filtered by the language filter."
)
def test_add_code(self):
def perform(style, code, expected):
self.component.language_code_style = style
self.component.save()
self.assertFalse(
self.component.translation_set.filter(language__code=code).exists(),
f"Translation with code {code} already exists",
)
self.client.post(
reverse("new-language", kwargs=self.kw_component), {"lang": code}
)
translation = self.component.translation_set.get(language__code=code)
self.assertEqual(translation.language_code, expected)
translation.remove(self.user)
perform("", "pt_BR", self.expected_lang_code)
perform("posix", "pt_BR", "pt_BR")
perform("posix_long", "ms", "ms_MY")
perform("bcp", "pt_BR", "pt-BR")
perform("bcp_long", "ms", "ms-MY")
perform("android", "pt_BR", "pt-rBR")
self.project.language_aliases = "ia_FOO:ia"
self.project.save()
perform("android", "ia", "ia_FOO")
class AndroidNewLangTest(NewLangTest):
expected_lang_code = "pt-rBR"
def create_component(self):
return self.create_android(new_lang="add")
|
from test import unittest
from diamond.metric import Metric
class TestMetric(unittest.TestCase):
def testgetPathPrefix(self):
metric = Metric('servers.com.example.www.cpu.total.idle',
0,
host='com.example.www')
actual_value = metric.getPathPrefix()
expected_value = 'servers'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def testgetPathPrefixCustom(self):
metric = Metric('custom.path.prefix.com.example.www.cpu.total.idle',
0,
host='com.example.www')
actual_value = metric.getPathPrefix()
expected_value = 'custom.path.prefix'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def testgetCollectorPath(self):
metric = Metric('servers.com.example.www.cpu.total.idle',
0,
host='com.example.www')
actual_value = metric.getCollectorPath()
expected_value = 'cpu'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def testgetMetricPath(self):
metric = Metric('servers.com.example.www.cpu.total.idle',
0,
host='com.example.www')
actual_value = metric.getMetricPath()
expected_value = 'total.idle'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
# Test hostname of none
def testgetPathPrefixHostNone(self):
metric = Metric('servers.host.cpu.total.idle',
0)
actual_value = metric.getPathPrefix()
expected_value = 'servers'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def testgetCollectorPathHostNone(self):
metric = Metric('servers.host.cpu.total.idle',
0)
actual_value = metric.getCollectorPath()
expected_value = 'cpu'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def testgetMetricPathHostNone(self):
metric = Metric('servers.host.cpu.total.idle',
0)
actual_value = metric.getMetricPath()
expected_value = 'total.idle'
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def test_parse(self):
metric = Metric('test.parse', 0)
actual_value = str(metric).strip()
expected_value = str(Metric.parse(actual_value)).strip()
message = 'Actual %s, expected %s' % (actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
def test_issue_723(self):
metrics = [
9.97143369909e-05,
'9.97143369909e-05',
0.0000997143369909,
'0.0000997143369909',
]
for precision in xrange(0, 100):
for m in metrics:
metric = Metric('test.723', m, timestamp=0)
actual_value = str(metric).strip()
expected_value = 'test.723 0 0'
message = 'Actual %s, expected %s' % (actual_value,
expected_value)
self.assertEqual(actual_value, expected_value, message)
|
from __future__ import print_function
import argparse
import os
import shutil
import sys
def pprint(path):
if path.startswith(os.environ['HOME']):
return '~' + path.split(os.environ['HOME'], 1)[-1]
return path
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('source', nargs='+', help='one or more files or directories to be copied')
ap.add_argument('dest', help='destination file or folder')
ns = ap.parse_args(args)
files = ns.source
dest = ns.dest
if len(files) > 1:
# Copying multiple files, destination must be an existing directory.
if os.path.isdir(dest):
full_dest = os.path.abspath(dest)
for filef in files:
full_file = os.path.abspath(filef)
file_name = os.path.basename(full_file)
new_name = os.path.join(full_dest, file_name)
try:
if os.path.isdir(full_file):
shutil.copytree(full_file, new_name)
else:
shutil.copy(full_file, new_name)
except Exception as err:
print("cp: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
else:
print("cp: {}: No such directory".format(pprint(dest)), file=sys.stderr)
else:
# Copying a single file to a (pre-existing) directory or a file
filef = files[0]
full_file = os.path.abspath(filef)
file_name = os.path.basename(full_file)
full_dest = os.path.abspath(dest)
new_name = os.path.join(full_dest, file_name)
if os.path.exists(full_file):
try:
if os.path.exists(full_dest):
# Destination already exists
if os.path.isdir(full_dest):
# Destination is a directory
if os.path.isdir(full_file):
shutil.copytree(full_file, new_name)
else:
shutil.copy(full_file, new_name)
else:
# Destination is a file
shutil.copy(full_file, full_dest)
else:
# Destination does not yet exist
if os.path.isdir(full_file):
# Source is a directory, destination should become a directory
shutil.copytree(full_file, full_dest)
else:
# Source is a file, destination should become a file
shutil.copy(full_file, full_dest)
except Exception as err:
print("cp: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
else:
print("cp: {}: No such file".format(pprint(filef)), file=sys.stderr)
if __name__ == "__main__":
main(sys.argv[1:])
|
import os
import os.path as op
import pytest
from mne.utils import (_fetch_file, requires_good_network, catch_logging,
sizeof_fmt)
@pytest.mark.timeout(60)
@requires_good_network
@pytest.mark.parametrize('url', (
'https://raw.githubusercontent.com/mne-tools/mne-python/master/README.rst',
))
def test_fetch_file(url, tmpdir):
"""Test URL retrieval."""
tempdir = str(tmpdir)
archive_name = op.join(tempdir, "download_test")
with catch_logging() as log:
_fetch_file(url, archive_name, timeout=30., verbose=True)
log = log.getvalue()
assert ', resuming at' not in log
with open(archive_name, 'rb') as fid:
data = fid.read()
stop = len(data) // 2
assert 0 < stop < len(data)
with open(archive_name + '.part', 'wb') as fid:
fid.write(data[:stop])
with catch_logging() as log:
_fetch_file(url, archive_name, timeout=30., verbose=True)
log = log.getvalue()
assert ', resuming at %s' % sizeof_fmt(stop) in log
with pytest.raises(Exception, match='Cannot use'):
_fetch_file('NOT_AN_ADDRESS', op.join(tempdir, 'test'), verbose=False)
resume_name = op.join(tempdir, "download_resume")
# touch file
with open(resume_name + '.part', 'w'):
os.utime(resume_name + '.part', None)
_fetch_file(url, resume_name, resume=True, timeout=30.,
verbose=False)
with pytest.raises(ValueError, match='Bad hash value'):
_fetch_file(url, archive_name, hash_='a', verbose=False)
with pytest.raises(RuntimeError, match='Hash mismatch'):
_fetch_file(url, archive_name, hash_='a' * 32, verbose=False)
|
import logging
from typing import Callable, List, Optional
import attr
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import (
ATTR_DISCOVERY_HASH,
ATTR_DISCOVERY_TOPIC,
CONF_CONNECTIONS,
CONF_DEVICE,
CONF_IDENTIFIERS,
CONF_PAYLOAD,
CONF_QOS,
DOMAIN,
cleanup_device_registry,
debug_info,
trigger as mqtt_trigger,
)
from .discovery import MQTT_DISCOVERY_UPDATED, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
CONF_AUTOMATION_TYPE = "automation_type"
CONF_DISCOVERY_ID = "discovery_id"
CONF_SUBTYPE = "subtype"
CONF_TOPIC = "topic"
DEFAULT_ENCODING = "utf-8"
DEVICE = "device"
MQTT_TRIGGER_BASE = {
# Trigger when MQTT message is received
CONF_PLATFORM: DEVICE,
CONF_DOMAIN: DOMAIN,
}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_PLATFORM): DEVICE,
vol.Required(CONF_DOMAIN): DOMAIN,
vol.Required(CONF_DEVICE_ID): str,
vol.Required(CONF_DISCOVERY_ID): str,
vol.Required(CONF_TYPE): cv.string,
vol.Required(CONF_SUBTYPE): cv.string,
}
)
TRIGGER_DISCOVERY_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_AUTOMATION_TYPE): str,
vol.Required(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_PAYLOAD, default=None): vol.Any(None, cv.string),
vol.Required(CONF_TYPE): cv.string,
vol.Required(CONF_SUBTYPE): cv.string,
},
mqtt.validate_device_has_at_least_one_identifier,
)
DEVICE_TRIGGERS = "mqtt_device_triggers"
@attr.s(slots=True)
class TriggerInstance:
"""Attached trigger settings."""
action: AutomationActionType = attr.ib()
automation_info: dict = attr.ib()
trigger: "Trigger" = attr.ib()
remove: Optional[CALLBACK_TYPE] = attr.ib(default=None)
async def async_attach_trigger(self):
"""Attach MQTT trigger."""
mqtt_config = {
mqtt_trigger.CONF_TOPIC: self.trigger.topic,
mqtt_trigger.CONF_ENCODING: DEFAULT_ENCODING,
mqtt_trigger.CONF_QOS: self.trigger.qos,
}
if self.trigger.payload:
mqtt_config[CONF_PAYLOAD] = self.trigger.payload
if self.remove:
self.remove()
self.remove = await mqtt_trigger.async_attach_trigger(
self.trigger.hass,
mqtt_config,
self.action,
self.automation_info,
)
@attr.s(slots=True)
class Trigger:
"""Device trigger settings."""
device_id: str = attr.ib()
discovery_data: dict = attr.ib()
hass: HomeAssistantType = attr.ib()
payload: str = attr.ib()
qos: int = attr.ib()
remove_signal: Callable[[], None] = attr.ib()
subtype: str = attr.ib()
topic: str = attr.ib()
type: str = attr.ib()
trigger_instances: List[TriggerInstance] = attr.ib(factory=list)
async def add_trigger(self, action, automation_info):
"""Add MQTT trigger."""
instance = TriggerInstance(action, automation_info, self)
self.trigger_instances.append(instance)
if self.topic is not None:
# If we know about the trigger, subscribe to MQTT topic
await instance.async_attach_trigger()
@callback
def async_remove() -> None:
"""Remove trigger."""
if instance not in self.trigger_instances:
raise HomeAssistantError("Can't remove trigger twice")
if instance.remove:
instance.remove()
self.trigger_instances.remove(instance)
return async_remove
async def update_trigger(self, config, discovery_hash, remove_signal):
"""Update MQTT device trigger."""
self.remove_signal = remove_signal
self.type = config[CONF_TYPE]
self.subtype = config[CONF_SUBTYPE]
self.payload = config[CONF_PAYLOAD]
self.qos = config[CONF_QOS]
topic_changed = self.topic != config[CONF_TOPIC]
self.topic = config[CONF_TOPIC]
# Unsubscribe+subscribe if this trigger is in use and topic has changed
# If topic is same unsubscribe+subscribe will execute in the wrong order
# because unsubscribe is done with help of async_create_task
if topic_changed:
for trig in self.trigger_instances:
await trig.async_attach_trigger()
def detach_trigger(self):
"""Remove MQTT device trigger."""
# Mark trigger as unknown
self.topic = None
# Unsubscribe if this trigger is in use
for trig in self.trigger_instances:
if trig.remove:
trig.remove()
trig.remove = None
async def _update_device(hass, config_entry, config):
"""Update device registry."""
device_registry = await hass.helpers.device_registry.async_get_registry()
config_entry_id = config_entry.entry_id
device_info = mqtt.device_info_from_config(config[CONF_DEVICE])
if config_entry_id is not None and device_info is not None:
device_info["config_entry_id"] = config_entry_id
device_registry.async_get_or_create(**device_info)
async def async_setup_trigger(hass, config, config_entry, discovery_data):
"""Set up the MQTT device trigger."""
config = TRIGGER_DISCOVERY_SCHEMA(config)
discovery_hash = discovery_data[ATTR_DISCOVERY_HASH]
discovery_id = discovery_hash[1]
remove_signal = None
async def discovery_update(payload):
"""Handle discovery update."""
_LOGGER.info(
"Got update for trigger with hash: %s '%s'", discovery_hash, payload
)
if not payload:
# Empty payload: Remove trigger
_LOGGER.info("Removing trigger: %s", discovery_hash)
debug_info.remove_trigger_discovery_data(hass, discovery_hash)
if discovery_id in hass.data[DEVICE_TRIGGERS]:
device_trigger = hass.data[DEVICE_TRIGGERS][discovery_id]
device_trigger.detach_trigger()
clear_discovery_hash(hass, discovery_hash)
remove_signal()
await cleanup_device_registry(hass, device.id)
else:
# Non-empty payload: Update trigger
_LOGGER.info("Updating trigger: %s", discovery_hash)
debug_info.update_trigger_discovery_data(hass, discovery_hash, payload)
config = TRIGGER_DISCOVERY_SCHEMA(payload)
await _update_device(hass, config_entry, config)
device_trigger = hass.data[DEVICE_TRIGGERS][discovery_id]
await device_trigger.update_trigger(config, discovery_hash, remove_signal)
remove_signal = async_dispatcher_connect(
hass, MQTT_DISCOVERY_UPDATED.format(discovery_hash), discovery_update
)
await _update_device(hass, config_entry, config)
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get_device(
{(DOMAIN, id_) for id_ in config[CONF_DEVICE][CONF_IDENTIFIERS]},
{tuple(x) for x in config[CONF_DEVICE][CONF_CONNECTIONS]},
)
if device is None:
return
if DEVICE_TRIGGERS not in hass.data:
hass.data[DEVICE_TRIGGERS] = {}
if discovery_id not in hass.data[DEVICE_TRIGGERS]:
hass.data[DEVICE_TRIGGERS][discovery_id] = Trigger(
hass=hass,
device_id=device.id,
discovery_data=discovery_data,
type=config[CONF_TYPE],
subtype=config[CONF_SUBTYPE],
topic=config[CONF_TOPIC],
payload=config[CONF_PAYLOAD],
qos=config[CONF_QOS],
remove_signal=remove_signal,
)
else:
await hass.data[DEVICE_TRIGGERS][discovery_id].update_trigger(
config, discovery_hash, remove_signal
)
debug_info.add_trigger_discovery_data(
hass, discovery_hash, discovery_data, device.id
)
async def async_device_removed(hass: HomeAssistant, device_id: str):
"""Handle the removal of a device."""
triggers = await async_get_triggers(hass, device_id)
for trig in triggers:
device_trigger = hass.data[DEVICE_TRIGGERS].pop(trig[CONF_DISCOVERY_ID])
if device_trigger:
discovery_hash = device_trigger.discovery_data[ATTR_DISCOVERY_HASH]
discovery_topic = device_trigger.discovery_data[ATTR_DISCOVERY_TOPIC]
debug_info.remove_trigger_discovery_data(hass, discovery_hash)
device_trigger.detach_trigger()
clear_discovery_hash(hass, discovery_hash)
device_trigger.remove_signal()
mqtt.publish(
hass,
discovery_topic,
"",
retain=True,
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for MQTT devices."""
triggers = []
if DEVICE_TRIGGERS not in hass.data:
return triggers
for discovery_id, trig in hass.data[DEVICE_TRIGGERS].items():
if trig.device_id != device_id or trig.topic is None:
continue
trigger = {
**MQTT_TRIGGER_BASE,
"device_id": device_id,
"type": trig.type,
"subtype": trig.subtype,
"discovery_id": discovery_id,
}
triggers.append(trigger)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
if DEVICE_TRIGGERS not in hass.data:
hass.data[DEVICE_TRIGGERS] = {}
config = TRIGGER_SCHEMA(config)
device_id = config[CONF_DEVICE_ID]
discovery_id = config[CONF_DISCOVERY_ID]
if discovery_id not in hass.data[DEVICE_TRIGGERS]:
hass.data[DEVICE_TRIGGERS][discovery_id] = Trigger(
hass=hass,
device_id=device_id,
discovery_data=None,
remove_signal=None,
type=config[CONF_TYPE],
subtype=config[CONF_SUBTYPE],
topic=None,
payload=None,
qos=None,
)
return await hass.data[DEVICE_TRIGGERS][discovery_id].add_trigger(
action, automation_info
)
|
import diamond.collector
import socket
import re
class ZookeeperCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(ZookeeperCollector, self).get_default_config_help()
config_help.update({
'publish':
"Which rows of 'status' you would like to publish." +
" Telnet host port' and type stats and hit enter to see the " +
" list of possibilities. Leave unset to publish all.",
'hosts':
"List of hosts, and ports to collect. Set an alias by " +
" prefixing the host:port with alias@",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ZookeeperCollector, self).get_default_config()
config.update({
'path': 'zookeeper',
# Which rows of 'status' you would like to publish.
# 'telnet host port' and type mntr and hit enter to see the list of
# possibilities.
# Leave unset to publish all
# 'publish': ''
# Connection settings
'hosts': ['localhost:2181']
})
return config
def get_raw_stats(self, host, port):
data = ''
# connect
try:
if port is None:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(host)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, int(port)))
# request stats
sock.send('mntr\n')
# something big enough to get whatever is sent back
data = sock.recv(4096)
except socket.error:
self.log.exception('Failed to get stats from %s:%s',
host, port)
return data
def get_stats(self, host, port):
# stuff that's always ignored, aren't 'stats'
ignored = ('zk_version', 'zk_server_state')
pid = None
stats = {}
data = self.get_raw_stats(host, port)
# parse stats
for line in data.splitlines():
pieces = line.split()
if pieces[0] in ignored:
continue
stats[pieces[0]] = pieces[1]
# get max connection limit
self.log.debug('pid %s', pid)
try:
cmdline = "/proc/%s/cmdline" % pid
f = open(cmdline, 'r')
m = re.search("-c\x00(\d+)", f.readline())
if m is not None:
self.log.debug('limit connections %s', m.group(1))
stats['limit_maxconn'] = m.group(1)
f.close()
except:
self.log.debug("Cannot parse command line options for zookeeper")
return stats
def collect(self):
hosts = self.config.get('hosts')
# Convert a string config value to be an array
if isinstance(hosts, basestring):
hosts = [hosts]
for host in hosts:
matches = re.search('((.+)\@)?([^:]+)(:(\d+))?', host)
alias = matches.group(2)
hostname = matches.group(3)
port = matches.group(5)
stats = self.get_stats(hostname, port)
# figure out what we're configured to get, defaulting to everything
desired = self.config.get('publish', stats.keys())
# for everything we want
for stat in desired:
if stat in stats:
# we have it
if alias is not None:
self.publish(alias + "." + stat, stats[stat])
else:
self.publish(stat, stats[stat])
else:
# we don't, must be somehting configured in publish so we
# should log an error about it
self.log.error("No such key '%s' available, issue 'stats' "
"for a full list", stat)
|
import re
from datetime import date
from uuid import uuid4
from django import template
from django.contrib.humanize.templatetags.humanize import intcomma
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import gettext, gettext_lazy, ngettext, pgettext
from siphashc import siphash
from weblate.accounts.avatar import get_user_display
from weblate.accounts.models import Profile
from weblate.auth.models import User
from weblate.checks.models import CHECKS
from weblate.checks.utils import highlight_string
from weblate.trans.filter import FILTERS, get_filter_choice
from weblate.trans.models import (
Announcement,
Component,
ContributorAgreement,
Project,
Translation,
)
from weblate.trans.models.translation import GhostTranslation
from weblate.trans.simplediff import html_diff
from weblate.trans.util import get_state_css, split_plural
from weblate.utils.docs import get_doc_url
from weblate.utils.hash import hash_to_checksum
from weblate.utils.markdown import render_markdown
from weblate.utils.stats import BaseStats, ProjectLanguage
from weblate.utils.views import SORT_CHOICES
register = template.Library()
HIGHLIGTH_SPACE = '<span class="hlspace">{}</span>{}'
SPACE_TEMPLATE = '<span class="{}"><span class="sr-only">{}</span></span>'
SPACE_SPACE = SPACE_TEMPLATE.format("space-space", " ")
SPACE_NL = HIGHLIGTH_SPACE.format(SPACE_TEMPLATE.format("space-nl", ""), "<br />")
SPACE_TAB = HIGHLIGTH_SPACE.format(SPACE_TEMPLATE.format("space-tab", "\t"), "")
HL_CHECK = (
'<span class="hlcheck">' '<span class="highlight-number"></span>' "{0}" "</span>"
)
WHITESPACE_RE = re.compile(r"( +| $|^ )")
NEWLINES_RE = re.compile(r"\r\n|\r|\n")
TYPE_MAPPING = {True: "yes", False: "no", None: "unknown"}
# Mapping of status report flags to names
NAME_MAPPING = {
True: gettext_lazy("Good configuration"),
False: gettext_lazy("Bad configuration"),
None: gettext_lazy("Possible configuration"),
}
FLAG_TEMPLATE = '<span title="{0}" class="{1}">{2}</span>'
BADGE_TEMPLATE = '<span class="badge pull-right flip {1}">{0}</span>'
PERM_TEMPLATE = """
<td>
<input type="checkbox"
class="set-group"
data-placement="bottom"
data-username="{0}"
data-group="{1}"
data-name="{2}"
{3} />
</td>
"""
SOURCE_LINK = """
<a href="{0}" target="_blank" rel="noopener noreferrer"
class="wrap-text" dir="ltr">{1}</a>
"""
def replace_whitespace(match):
spaces = match.group(1).replace(" ", SPACE_SPACE)
return HIGHLIGTH_SPACE.format(spaces, "")
def fmt_whitespace(value):
"""Format whitespace so that it is more visible."""
# Highlight exta whitespace
value = WHITESPACE_RE.sub(replace_whitespace, value)
# Highlight tabs
value = value.replace("\t", SPACE_TAB.format(gettext("Tab character")))
# Highlight whitespace inside tags (ins/del)
value = value.replace("> <", f">{SPACE_SPACE}<")
return value
def fmt_diff(value, diff, idx):
"""Format diff if there is any."""
if diff is None:
return escape(value)
return html_diff((diff[idx]), value)
def fmt_highlights(raw_value, value, unit):
"""Format check highlights."""
if unit is None:
return value
highlights = highlight_string(raw_value, unit)
start_search = 0
for highlight in highlights:
htext = escape(highlight[2])
find_highlight = value.find(htext, start_search)
if find_highlight >= 0:
newpart = HL_CHECK.format(htext)
next_part = value[(find_highlight + len(htext)) :]
value = value[:find_highlight] + newpart + next_part
start_search = find_highlight + len(newpart)
return value
def fmt_search(value, search_match, match):
"""Format search match."""
if search_match:
search_match = escape(search_match)
if match == "search":
# Since the search ignored case, we need to highlight any
# combination of upper and lower case we find.
return re.sub(
r"(" + re.escape(search_match) + ")",
r'<span class="hlmatch">\1</span>',
value,
flags=re.IGNORECASE,
)
if match in ("replacement", "replaced"):
return value.replace(
search_match, f'<span class="{match}">{search_match}</span>'
)
return value
@register.inclusion_tag("snippets/format-translation.html")
def format_translation(
value,
language,
plural=None,
diff=None,
search_match=None,
simple=False,
wrap=False,
num_plurals=2,
unit=None,
match="search",
):
"""Nicely formats translation text possibly handling plurals or diff."""
# Split plurals to separate strings
plurals = split_plural(value)
if plural is None:
plural = language.plural
# Show plurals?
if int(num_plurals) <= 1:
plurals = plurals[-1:]
# Newline concatenator
newline = SPACE_NL.format(gettext("New line"))
# Split diff plurals
if diff is not None:
diff = split_plural(diff)
# Previous message did not have to be a plural
while len(diff) < len(plurals):
diff.append(diff[0])
# We will collect part for each plural
parts = []
has_content = False
for idx, raw_value in enumerate(plurals):
# HTML escape
value = raw_value
# Content of the Copy to clipboard button
copy = escape(value)
# Format diff if there is any
value = fmt_diff(value, diff, idx)
# Create span for checks highlights
value = fmt_highlights(raw_value, value, unit)
# Format search term
value = fmt_search(value, search_match, match)
# Normalize newlines
value = NEWLINES_RE.sub("\n", value)
# Split string
paras = value.split("\n")
# Format whitespace in each paragraph
paras = [fmt_whitespace(p) for p in paras]
# Show label for plural (if there are any)
title = ""
if len(plurals) > 1:
title = plural.get_plural_name(idx)
# Join paragraphs
content = mark_safe(newline.join(paras))
parts.append({"title": title, "content": content, "copy": copy})
has_content |= bool(content)
return {
"simple": simple,
"wrap": wrap,
"items": parts,
"language": language,
"unit": unit,
"has_content": has_content,
}
@register.simple_tag
def search_name(query):
"""Returns name for a query string."""
return FILTERS.get_search_name(query)
@register.simple_tag
def check_name(check):
"""Return check name, or its id if check is not known."""
try:
return escape(CHECKS[check].name)
except KeyError:
return escape(check)
@register.simple_tag
def check_description(check):
"""Return check description, or its id if check is not known."""
try:
return escape(CHECKS[check].description)
except KeyError:
return escape(check)
@register.simple_tag(takes_context=True)
def documentation(context, page, anchor=""):
"""Return link to Weblate documentation."""
# Use object method get_doc_url if present
if hasattr(page, "get_doc_url"):
return page.get_doc_url(user=context["user"])
return get_doc_url(page, anchor, user=context["user"])
@register.inclusion_tag("documentation-icon.html", takes_context=True)
def documentation_icon(context, page, anchor="", right=False):
return {"right": right, "doc_url": documentation(context, page, anchor)}
@register.inclusion_tag("documentation-icon.html", takes_context=True)
def form_field_doc_link(context, form, field):
if hasattr(form, "get_field_doc"):
return {
"right": False,
"doc_url": get_doc_url(*form.get_field_doc(field), user=context["user"]),
}
return {}
@register.inclusion_tag("message.html")
def show_message(tags, message):
tags = tags.split()
final = []
task_id = None
for tag in tags:
if tag.startswith("task:"):
task_id = tag[5:]
else:
final.append(tag)
return {"tags": " ".join(final), "task_id": task_id, "message": message}
def naturaltime_past(value, now):
"""Handling of past dates for naturaltime."""
# this function is huge
# pylint: disable=too-many-branches,too-many-return-statements
delta = now - value
if delta.days >= 365:
count = delta.days // 365
if count == 1:
return gettext("a year ago")
return ngettext("%(count)s year ago", "%(count)s years ago", count) % {
"count": count
}
if delta.days >= 30:
count = delta.days // 30
if count == 1:
return gettext("a month ago")
return ngettext("%(count)s month ago", "%(count)s months ago", count) % {
"count": count
}
if delta.days >= 14:
count = delta.days // 7
return ngettext("%(count)s week ago", "%(count)s weeks ago", count) % {
"count": count
}
if delta.days > 0:
if delta.days == 7:
return gettext("a week ago")
if delta.days == 1:
return gettext("yesterday")
return ngettext("%(count)s day ago", "%(count)s days ago", delta.days) % {
"count": delta.days
}
if delta.seconds == 0:
return gettext("now")
if delta.seconds < 60:
if delta.seconds == 1:
return gettext("a second ago")
return ngettext(
"%(count)s second ago", "%(count)s seconds ago", delta.seconds
) % {"count": delta.seconds}
if delta.seconds // 60 < 60:
count = delta.seconds // 60
if count == 1:
return gettext("a minute ago")
return ngettext("%(count)s minute ago", "%(count)s minutes ago", count) % {
"count": count
}
count = delta.seconds // 60 // 60
if count == 1:
return gettext("an hour ago")
return ngettext("%(count)s hour ago", "%(count)s hours ago", count) % {
"count": count
}
def naturaltime_future(value, now):
"""Handling of future dates for naturaltime."""
# this function is huge
# pylint: disable=too-many-branches,too-many-return-statements
delta = value - now
if delta.days >= 365:
count = delta.days // 365
if count == 1:
return gettext("a year from now")
return ngettext(
"%(count)s year from now", "%(count)s years from now", count
) % {"count": count}
if delta.days >= 30:
count = delta.days // 30
if count == 1:
return gettext("a month from now")
return ngettext(
"%(count)s month from now", "%(count)s months from now", count
) % {"count": count}
if delta.days >= 14:
count = delta.days // 7
return ngettext(
"%(count)s week from now", "%(count)s weeks from now", count
) % {"count": count}
if delta.days > 0:
if delta.days == 1:
return gettext("tomorrow")
if delta.days == 7:
return gettext("a week from now")
return ngettext(
"%(count)s day from now", "%(count)s days from now", delta.days
) % {"count": delta.days}
if delta.seconds == 0:
return gettext("now")
if delta.seconds < 60:
if delta.seconds == 1:
return gettext("a second from now")
return ngettext(
"%(count)s second from now", "%(count)s seconds from now", delta.seconds
) % {"count": delta.seconds}
if delta.seconds // 60 < 60:
count = delta.seconds // 60
if count == 1:
return gettext("a minute from now")
return ngettext(
"%(count)s minute from now", "%(count)s minutes from now", count
) % {"count": count}
count = delta.seconds // 60 // 60
if count == 1:
return gettext("an hour from now")
return ngettext("%(count)s hour from now", "%(count)s hours from now", count) % {
"count": count
}
@register.filter
def naturaltime(value, now=None):
"""Heavily based on Django's django.contrib.humanize implementation of naturaltime.
For date and time values shows how many seconds, minutes or hours ago compared to
current timestamp returns representing string.
"""
# datetime is a subclass of date
if not isinstance(value, date):
return value
if now is None:
now = timezone.now()
if value < now:
text = naturaltime_past(value, now)
else:
text = naturaltime_future(value, now)
return mark_safe(
'<span title="{}">{}</span>'.format(
escape(value.replace(microsecond=0).isoformat()), escape(text)
)
)
def get_stats(obj):
if isinstance(obj, BaseStats):
return obj
return obj.stats
def translation_progress_data(readonly, approved, translated, fuzzy, checks):
return {
"readonly": f"{readonly:.1f}",
"approved": f"{approved:.1f}",
"good": "{:.1f}".format(max(translated - checks - approved - readonly, 0)),
"checks": f"{checks:.1f}",
"fuzzy": f"{fuzzy:.1f}",
"percent": f"{translated:.1f}",
}
@register.inclusion_tag("progress.html")
def translation_progress(obj):
stats = get_stats(obj)
return translation_progress_data(
stats.readonly_percent,
stats.approved_percent,
stats.translated_percent,
stats.fuzzy_percent,
stats.translated_checks_percent,
)
@register.inclusion_tag("progress.html")
def words_progress(obj):
stats = get_stats(obj)
return translation_progress_data(
stats.readonly_words_percent,
stats.approved_words_percent,
stats.translated_words_percent,
stats.fuzzy_words_percent,
stats.translated_checks_words_percent,
)
@register.simple_tag
def get_state_badge(unit):
"""Return state badge."""
flag = None
if unit.fuzzy:
flag = (pgettext("String state", "Needs editing"), "text-danger")
elif not unit.translated:
flag = (pgettext("String state", "Not translated"), "text-danger")
elif unit.approved:
flag = (pgettext("String state", "Approved"), "text-success")
elif unit.translated:
flag = (pgettext("String state", "Translated"), "text-primary")
if flag is None:
return ""
return mark_safe(BADGE_TEMPLATE.format(*flag))
@register.inclusion_tag("snippets/unit-state.html")
def get_state_flags(unit, detail=False):
"""Return state flags."""
return {
"state": " ".join(get_state_css(unit)),
"unit": unit,
"detail": detail,
}
@register.simple_tag
def get_location_links(profile, unit):
"""Generate links to source files where translation was used."""
ret = []
# Fallback to source unit if it has more information
if not unit.location and unit.source_unit.location:
unit = unit.source_unit
# Do we have any locations?
if not unit.location:
return ""
# Is it just an ID?
if unit.location.isdigit():
return gettext("string ID %s") % unit.location
# Go through all locations separated by comma
for location, filename, line in unit.get_locations():
link = unit.translation.component.get_repoweb_link(
filename, line, profile.editor_link
)
if link is None:
ret.append(escape(location))
else:
ret.append(SOURCE_LINK.format(escape(link), escape(location)))
return mark_safe("\n".join(ret))
@register.simple_tag(takes_context=True)
def announcements(context, project=None, component=None, language=None):
"""Display announcement messages for given context."""
ret = []
user = context["user"]
for announcement in Announcement.objects.context_filter(
project, component, language
):
can_delete = user.has_perm(
"component.edit", announcement.component
) or user.has_perm("project.edit", announcement.project)
ret.append(
render_to_string(
"message.html",
{
"tags": " ".join((announcement.category, "announcement")),
"message": render_markdown(announcement.message),
"announcement": announcement,
"can_delete": can_delete,
},
)
)
return mark_safe("\n".join(ret))
@register.simple_tag(takes_context=True)
def active_tab(context, slug):
active = "active" if slug == context["active_tab_slug"] else ""
return mark_safe(f'class="tab-pane {active}" id="{slug}"')
@register.simple_tag(takes_context=True)
def active_link(context, slug):
if slug == context["active_tab_slug"]:
return mark_safe('class="active"')
return ""
@register.simple_tag
def user_permissions(user, groups):
"""Render checksboxes for user permissions."""
result = []
for group in groups:
checked = ""
if user.groups.filter(pk=group.pk).exists():
checked = ' checked="checked"'
result.append(
PERM_TEMPLATE.format(
escape(user.username), group.pk, escape(group.short_name), checked
)
)
return mark_safe("".join(result))
@register.simple_tag(takes_context=True)
def show_contributor_agreement(context, component):
if not component.agreement:
return ""
if ContributorAgreement.objects.has_agreed(context["user"], component):
return ""
return render_to_string(
"snippets/component/contributor-agreement.html",
{"object": component, "next": context["request"].get_full_path()},
)
@register.simple_tag(takes_context=True)
def get_translate_url(context, obj):
"""Get translate URL based on user preference."""
if isinstance(obj, BaseStats) or not hasattr(obj, "get_translate_url"):
return ""
if context["user"].profile.translate_mode == Profile.TRANSLATE_ZEN:
name = "zen"
else:
name = "translate"
return reverse(name, kwargs=obj.get_reverse_url_kwargs())
@register.simple_tag(takes_context=True)
def get_browse_url(context, obj):
"""Get translate URL based on user preference."""
# Project listing on language page
if "language" in context and isinstance(obj, Project):
return reverse(
"project-language",
kwargs={"lang": context["language"].code, "project": obj.slug},
)
return obj.get_absolute_url()
@register.simple_tag(takes_context=True)
def init_unique_row_id(context):
context["row_uuid"] = uuid4().hex
return ""
@register.simple_tag(takes_context=True)
def get_unique_row_id(context, obj):
"""Get unique row ID for multiline tables."""
return "{}-{}".format(context["row_uuid"], obj.pk)
@register.simple_tag
def get_filter_name(name):
names = dict(get_filter_choice())
return names[name]
def translation_alerts(translation):
if translation.is_source:
yield (
"state/source.svg",
gettext("This translation is used for source strings."),
None,
)
def component_alerts(component):
if component.is_repo_link:
yield (
"state/link.svg",
gettext("This component is linked to the %(target)s repository.")
% {"target": component.linked_component},
None,
)
if component.all_alerts:
yield (
"state/alert.svg",
gettext("Fix this component to clear its alerts."),
component.get_absolute_url() + "#alerts",
)
if component.locked:
yield ("state/lock.svg", gettext("This translation is locked."), None)
if component.in_progress():
yield (
"state/update.svg",
gettext("Updating translation component…"),
reverse("component_progress", kwargs=component.get_reverse_url_kwargs())
+ "?info=1",
)
def project_alerts(project):
if project.has_alerts:
yield (
"state/alert.svg",
gettext("Some of the components within this project have alerts."),
None,
)
if project.locked:
yield ("state/lock.svg", gettext("This translation is locked."), None)
@register.inclusion_tag("trans/embed-alert.html", takes_context=True)
def indicate_alerts(context, obj):
result = []
translation = None
component = None
project = None
global_base = context.get("global_base")
if isinstance(obj, (Translation, GhostTranslation)):
translation = obj
component = obj.component
project = component.project
elif isinstance(obj, Component):
component = obj
project = component.project
elif isinstance(obj, Project):
project = obj
elif isinstance(obj, ProjectLanguage):
project = obj.project
if context["user"].has_perm("project.edit", project):
result.append(
("state/admin.svg", gettext("You administrate this project."), None)
)
if translation:
result.extend(translation_alerts(translation))
if component:
result.extend(component_alerts(component))
elif project:
result.extend(project_alerts(project))
if getattr(obj, "is_ghost", False):
result.append(
("state/ghost.svg", gettext("This translation does not yet exist."), None)
)
elif global_base:
if isinstance(global_base, str):
global_base = getattr(obj, global_base)
stats = get_stats(obj)
count = global_base.source_strings - stats.all
if count:
result.append(
(
"state/ghost.svg",
ngettext(
"%(count)s string is not being translated here.",
"%(count)s strings are not being translated here.",
count,
)
% {"count": count},
None,
)
)
return {"icons": result, "component": component, "project": project}
@register.filter
def markdown(text):
return render_markdown(text)
@register.filter
def choiceval(boundfield):
"""Get literal value from field's choices.
Empty value is returned if value is not selected or invalid.
"""
value = boundfield.value()
if value is None:
return ""
if value is True:
return gettext("enabled")
if not hasattr(boundfield.field, "choices"):
return value
choices = list(boundfield.field.choices)
if choices and hasattr(choices[0][0], "value"):
# Django 3.1+ yields ModelChoiceIteratorValue
choices = {choice.value: value for choice, value in choices}
else:
# Django 3.0
choices = dict(choices)
if isinstance(value, list):
return ", ".join(choices.get(val, val) for val in value)
return choices.get(value, value)
@register.filter
def format_commit_author(commit):
users = User.objects.filter(
social_auth__verifiedemail__email=commit["author_email"]
).distinct()
if len(users) == 1:
return get_user_display(users[0], True, True)
return commit["author_name"]
@register.filter
def percent_format(number):
return pgettext("Translated percents", "%(percent)s%%") % {
"percent": intcomma(int(number))
}
@register.filter
def hash_text(name):
"""Hash text for use in HTML id."""
return hash_to_checksum(siphash("Weblate URL hash", name.encode()))
@register.simple_tag
def sort_choices():
return SORT_CHOICES.items()
@register.simple_tag(takes_context=True)
def render_alert(context, alert):
return alert.render(user=context["user"])
|
import datetime
import logging
import sys
from builtins import str
from uuid import uuid4
from .client import OpenTSDBClient
from .decoder import Decoder
from ...common.interfaces import AbstractPlugin, \
MonitoringDataListener, AggregateResultListener
logger = logging.getLogger(__name__) # pylint: disable=C0103
def chop(data_list, chunk_size):
if sys.getsizeof(str(data_list)) <= chunk_size:
return [data_list]
elif len(data_list) == 1:
logger.warning(
"Too large piece of Telegraf data. Might experience upload problems."
)
return [data_list]
else:
mid = len(data_list) / 2
return chop(data_list[:mid], chunk_size) + chop(
data_list[mid:], chunk_size)
class Plugin(AbstractPlugin, AggregateResultListener, MonitoringDataListener):
SECTION = 'opentsdb'
def __init__(self, core, cfg, name):
AbstractPlugin.__init__(self, core, cfg, name)
self.tank_tag = self.get_option("tank_tag")
self.prefix_metric = self.get_option("prefix_metric")
self._client = None
self.start_time = None
self.end_time = None
self.decoder = Decoder(
self.tank_tag,
str(uuid4()),
self.get_option("custom_tags"),
self.get_option("labeled"),
self.get_option("histograms"),
)
@property
def client(self):
if not self._client:
self._client = OpenTSDBClient(
host=self.get_option("address"),
port=self.get_option("port"),
username=self.get_option("username"),
password=self.get_option("password"),
ssl=self.get_option("ssl"),
verify_ssl=self.get_option("verify_ssl"))
return self._client
def prepare_test(self):
self.core.job.subscribe_plugin(self)
def start_test(self):
self.start_time = datetime.datetime.now()
def end_test(self, retcode):
self.end_time = datetime.datetime.now() + datetime.timedelta(minutes=1)
return retcode
def on_aggregated_data(self, data, stats):
self.client.write(
self.decoder.decode_aggregates(data, stats, self.prefix_metric))
def monitoring_data(self, data_list):
if len(data_list) > 0:
[
self._send_monitoring(chunk)
for chunk in chop(data_list, self.get_option("chunk_size"))
]
def _send_monitoring(self, data):
self.client.write(self.decoder.decode_monitoring(data))
def set_uuid(self, id_):
self.decoder.tags['uuid'] = id_
|
import pytest
from unittest.mock import Mock
from kombu.utils.scheduling import FairCycle, cycle_by_name
class MyEmpty(Exception):
pass
def consume(fun, n):
r = []
for i in range(n):
r.append(fun(Mock(name='callback')))
return r
class test_FairCycle:
def test_cycle(self):
resources = ['a', 'b', 'c', 'd', 'e']
callback = Mock(name='callback')
def echo(r, timeout=None):
return r
# cycle should be ['a', 'b', 'c', 'd', 'e', ... repeat]
cycle = FairCycle(echo, resources, MyEmpty)
for i in range(len(resources)):
assert cycle.get(callback) == resources[i]
for i in range(len(resources)):
assert cycle.get(callback) == resources[i]
def test_cycle_breaks(self):
resources = ['a', 'b', 'c', 'd', 'e']
def echo(r, callback):
if r == 'c':
raise MyEmpty(r)
return r
cycle = FairCycle(echo, resources, MyEmpty)
assert consume(cycle.get, len(resources)) == [
'a', 'b', 'd', 'e', 'a',
]
assert consume(cycle.get, len(resources)) == [
'b', 'd', 'e', 'a', 'b',
]
cycle2 = FairCycle(echo, ['c', 'c'], MyEmpty)
with pytest.raises(MyEmpty):
consume(cycle2.get, 3)
def test_cycle_no_resources(self):
cycle = FairCycle(None, [], MyEmpty)
cycle.pos = 10
with pytest.raises(MyEmpty):
cycle._next()
def test__repr__(self):
assert repr(FairCycle(lambda x: x, [1, 2, 3], MyEmpty))
def test_round_robin_cycle():
it = cycle_by_name('round_robin')(['A', 'B', 'C'])
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('B')
assert it.consume(3) == ['A', 'C', 'B']
it.rotate('A')
assert it.consume(3) == ['C', 'B', 'A']
it.rotate('A')
assert it.consume(3) == ['C', 'B', 'A']
it.rotate('C')
assert it.consume(3) == ['B', 'A', 'C']
def test_priority_cycle():
it = cycle_by_name('priority')(['A', 'B', 'C'])
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('B')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('A')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('A')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('C')
assert it.consume(3) == ['A', 'B', 'C']
def test_sorted_cycle():
it = cycle_by_name('sorted')(['B', 'C', 'A'])
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('B')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('A')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('A')
assert it.consume(3) == ['A', 'B', 'C']
it.rotate('C')
assert it.consume(3) == ['A', 'B', 'C']
|
from flask import jsonify, make_response, request
from functools import wraps
def add_response_headers(headers=None, default_headers=None, cors=False):
"""
Adds a bunch of headers to the Flask responses
:param headers: a dictionary of headers and values to add to the response
:param default_headers: a bunch of default security headers that all websites should have
:return: decorator
"""
if not headers:
headers = {}
if not default_headers:
default_headers = {
'Content-Security-Policy': ("default-src 'none'; base-uri 'none'; "
"form-action 'none'; frame-ancestors 'none'"),
'Referrer-Policy': 'no-referrer',
'Strict-Transport-Security': 'max-age=63072000',
'X-Content-Type-Options': 'nosniff',
'X-Frame-Options': 'DENY',
'X-XSS-Protection': '1; mode=block',
}
headers.update(default_headers)
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
# Don't call the underlying function if the method is OPTIONS
if request.method == 'OPTIONS':
resp = make_response()
else:
resp = make_response(fn(*args, **kwargs))
# Append the CORS headers
if cors:
headers.update({
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': ', '.join(request.url_rule.methods),
'Access-Control-Max-Age': '86400',
})
# Append the headers to the response
for header, value in headers.items():
resp.headers[header] = value
return resp
return wrapper
return decorator
def sanitized_api_response(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
output = fn(*args, **kwargs)
SCAN_VALID_KEYS = ('algorithm_version', 'end_time', 'error', 'grade', 'hidden', 'likelihood_indicator',
'response_headers', 'scan_id', 'score', 'start_time', 'state', 'status_code',
'tests_completed', 'tests_failed', 'tests_passed', 'tests_quantity')
TEST_RESULT_VALID_KEYS = ('error', 'expectation', 'name', 'output', 'pass', 'result',
'score_description', 'score_modifier')
# Convert it to a dict (in case it's a DictRow)
output = dict(output)
if 'tests_quantity' in output: # autodetect that it's a scan
# Rename 'id' to 'result_id':
output['scan_id'] = output.pop('id')
# Remove 'error' if it's null
if output['error'] is None:
del(output['error'])
# Delete any other things that might have made their way into the results
output = {k: output[k] for k in SCAN_VALID_KEYS if k in output}
elif 'content-security-policy' in output: # autodetect that it's a test result
for test in output:
# Delete unnecessary keys
output[test] = {k: output[test][k] for k in output[test] if k in TEST_RESULT_VALID_KEYS}
return jsonify(output)
return wrapper
|
import os
import unittest
import mock
from perfkitbenchmarker.linux_benchmarks import glibc_benchmark
from perfkitbenchmarker.linux_packages import glibc
# Test metadata values
_TEST_GCC_VERSION = '7.4.0'
_TEST_NUM_VMS = 5
class GlibcTestCase(unittest.TestCase):
def setUp(self):
super(GlibcTestCase, self).setUp()
p = mock.patch(glibc_benchmark.__name__ + '.FLAGS')
p.start()
self.addCleanup(p.stop)
def testGetGccVersion(self):
"""Tests regex calls to parse gcc version."""
mock_vm = mock.Mock()
mock_vm.RemoteCommand.return_value = None, 'gcc version 7.4.0 20190909'
self.assertEqual(_TEST_GCC_VERSION, glibc.GetGccVersion(mock_vm))
def CallParseOutput(self, filename, upper_key, results, metadata):
"""Read sample outputs of glibc_benchmark and call ParseOutput function.
Args:
filename: The name of the sample output file
required to run the benchmark.
upper_key: The first dimension key of the glibc_output dict.
results:
A list to which the ParseOutput function will append new samples based
on the glibc output.
metadata: Common metadata to attach to samples.
"""
path = os.path.join(os.path.dirname(__file__), '../data',
filename)
with open(path) as fp:
self.contents = fp.read()
glibc_benchmark.ParseOutput(self.contents, upper_key, results, metadata)
def testParseOutputAttachesCorrectCommonMetadata(self):
"""Tests that a run of ParseOutput attaches the correct common metadata."""
metadata = {
'gcc': _TEST_GCC_VERSION,
'glibc_benchset': glibc_benchmark.glibc_default_benchset,
'glibc_version': glibc.GLIBC_VERSION,
'num_machines': _TEST_NUM_VMS,
}
results = []
upper_key = 'functions'
self.CallParseOutput(
'glibc_bench_output.txt', upper_key, results, metadata)
for sample in results:
result_metadata = sample.metadata
self.assertEqual(result_metadata['gcc'], _TEST_GCC_VERSION)
self.assertEqual(result_metadata['glibc_benchset'],
glibc_benchmark.glibc_default_benchset)
self.assertEqual(result_metadata['glibc_version'], glibc.GLIBC_VERSION)
self.assertEqual(result_metadata['num_machines'], _TEST_NUM_VMS)
def testParseGlibc(self):
results = []
upper_key = 'functions'
self.CallParseOutput(
'glibc_bench_output.txt', upper_key, results, {})
result = {i.metric: i.metadata for i in results}
metadata = result['pthread_once:']
self.assertEqual(63, len(results))
self.assertAlmostEqual(1.72198e+10, metadata['duration'])
self.assertAlmostEqual(3.20756e+09, metadata['iterations'])
self.assertAlmostEqual(9626.89, metadata['max'])
self.assertAlmostEqual(5.198, metadata['min'])
self.assertAlmostEqual(5.3685, metadata['mean'])
def testParseGlibc2(self):
results = []
upper_key = 'math-inlines'
self.CallParseOutput(
'glibc_benchset_output.txt', upper_key, results, {})
result = {i.metric: i.metadata for i in results}
metadata = result['__isnan:inf/nan']
self.assertEqual(42, len(results))
self.assertAlmostEqual(8.42329e+06, metadata['duration'])
self.assertAlmostEqual(500, metadata['iterations'])
self.assertAlmostEqual(16846, metadata['mean'])
def testParseGlibc3(self):
results = []
upper_key = 'functions'
self.CallParseOutput(
'glibc_malloc_output.txt', upper_key, results, {})
metadata = results[0].metadata
metric = results[0].metric
self.assertEqual(1, len(results))
self.assertEqual('malloc:', metric)
self.assertAlmostEqual(1.2e+11, metadata['duration'])
self.assertAlmostEqual(2.82979e+09, metadata['iterations'])
self.assertAlmostEqual(42.406, metadata['time_per_iteration'])
self.assertAlmostEqual(1800, metadata['max_rss'])
self.assertAlmostEqual(1, metadata['threads'])
self.assertAlmostEqual(4, metadata['min_size'])
self.assertAlmostEqual(32768, metadata['max_size'])
self.assertAlmostEqual(88, metadata['random_seed'])
if __name__ == '__main__':
unittest.main()
|
from unittest import mock
import pytest
from meld.const import ActionMode
from meld.matchers.myers import DiffChunk
def make_chunk(chunk_type):
return DiffChunk(chunk_type, 0, 1, 0, 1)
@pytest.mark.parametrize("mode, editable, chunk, expected_action", [
# Replace mode with replace chunks
(ActionMode.Replace, (True, True), make_chunk('replace'), ActionMode.Replace),
(ActionMode.Replace, (True, False), make_chunk('replace'), ActionMode.Delete),
(ActionMode.Replace, (False, True), make_chunk('replace'), ActionMode.Replace),
(ActionMode.Replace, (False, False), make_chunk('replace'), None),
# Replace mode with delete chunks
(ActionMode.Replace, (True, True), make_chunk('delete'), ActionMode.Replace),
(ActionMode.Replace, (True, False), make_chunk('delete'), ActionMode.Delete),
(ActionMode.Replace, (False, True), make_chunk('delete'), ActionMode.Replace),
(ActionMode.Replace, (False, False), make_chunk('delete'), None),
# Delete mode makes a slightly weird choice to remove non-delete
# actions while in delete mode; insert mode makes the opposite
# choice
#
# Delete mode with replace chunks
(ActionMode.Delete, (True, True), make_chunk('replace'), ActionMode.Delete),
(ActionMode.Delete, (True, False), make_chunk('replace'), ActionMode.Delete),
(ActionMode.Delete, (False, True), make_chunk('replace'), None),
(ActionMode.Delete, (False, False), make_chunk('replace'), None),
# Delete mode with delete chunks
(ActionMode.Delete, (True, True), make_chunk('delete'), ActionMode.Delete),
(ActionMode.Delete, (True, False), make_chunk('delete'), ActionMode.Delete),
(ActionMode.Delete, (False, True), make_chunk('delete'), None),
(ActionMode.Delete, (False, False), make_chunk('delete'), None),
# Insert mode with replace chunks
(ActionMode.Insert, (True, True), make_chunk('replace'), ActionMode.Insert),
(ActionMode.Insert, (True, False), make_chunk('replace'), ActionMode.Delete),
(ActionMode.Insert, (False, True), make_chunk('replace'), ActionMode.Insert),
(ActionMode.Insert, (False, False), make_chunk('replace'), None),
# Insert mode with delete chunks
(ActionMode.Insert, (True, True), make_chunk('delete'), ActionMode.Replace),
(ActionMode.Insert, (True, False), make_chunk('delete'), ActionMode.Delete),
(ActionMode.Insert, (False, True), make_chunk('delete'), ActionMode.Replace),
(ActionMode.Insert, (False, False), make_chunk('delete'), None),
# We should never have insert chunks here
(ActionMode.Replace, (True, True), make_chunk('insert'), None),
(ActionMode.Replace, (True, False), make_chunk('insert'), None),
(ActionMode.Replace, (False, True), make_chunk('insert'), None),
(ActionMode.Replace, (False, False), make_chunk('insert'), None),
# TODO: Add tests for conflict chunks
])
def test_classify_change_actions(mode, editable, chunk, expected_action):
# These tests are disabled due to a segfault on the CI machines.
return
from meld.actiongutter import ActionGutter
source_editable, target_editable = editable
with mock.patch.object(ActionGutter, 'icon_direction'):
renderer = ActionGutter()
renderer._source_view = mock.Mock()
renderer._source_view.get_editable.return_value = source_editable
renderer._target_view = mock.Mock()
renderer._target_view.get_editable.return_value = target_editable
renderer.action_mode = mode
action = renderer._classify_change_actions(chunk)
assert action == expected_action
|
import gc
from stash.tests.stashtest import StashTestCase
class GciTests(StashTestCase):
"""tests for the 'gci' command."""
def setUp(self):
"""setup the tests."""
StashTestCase.setUp(self)
gc.enable() # make sure gc is enabled.
def test_help(self):
"""test 'gci --help'"""
output = self.run_command("gci --help", exitcode=0)
self.assertIn("gci", output)
self.assertIn("-h", output)
self.assertIn("--help", output)
self.assertIn("usage", output)
self.assertIn("enable", output)
self.assertIn("disable", output)
self.assertIn("status", output)
self.assertIn("collect", output)
self.assertIn("threshold", output)
self.assertIn("break", output)
self.assertIn("debug", output)
def test_status(self):
"""test 'gci status'"""
output = self.run_command("gci status", exitcode=0)
self.assertIn("GC status:", output)
self.assertIn("Tracked objects:", output)
self.assertIn("Size of tracked objects:", output)
self.assertIn("Garbage:", output)
self.assertIn("Size of garbage:", output)
self.assertIn("Debug:", output)
def test_enabled_by_default(self):
"""test 'gci status' reporting the gc as enabled by default."""
output = self.run_command("gci status", exitcode=0)
self.assertIn("GC status:", output)
self.assertIn("Enabled", output)
self.assertNotIn("Disabled", output)
def test_disabled_status(self):
"""test 'gci status' reporting the gc as disabled."""
assert gc.isenabled(), "Internal error: gc not enabled at test start!"
gc.disable()
output = self.run_command("gci status", exitcode=0)
self.assertIn("GC status:", output)
self.assertNotIn("Enabled", output)
self.assertIn("Disabled", output)
gc.enable()
def test_disable(self):
"""test 'gci disable' reporting the gc as enabled by default."""
assert gc.isenabled(), "Internal error: gc not enabled at test start!"
self.run_command("gci disable", exitcode=0)
assert not gc.isenabled(), "'gci disable' did not work!"
gc.enable()
def test_enable(self):
"""test 'gci disable' reporting the gc as enabled by default."""
assert gc.isenabled(), "Internal error: gc not enabled at test start!"
gc.disable()
self.run_command("gci enable", exitcode=0)
assert gc.isenabled(), "'gci enable' did not work!"
def test_debug(self):
"""test 'gci debug'"""
output = self.run_command("gci debug", exitcode=0).replace("\n", "")
self.assertEqual(output, "Debug: 0")
self.run_command("gci debug 1", exitcode=0)
output = self.run_command("gci debug", exitcode=0).replace("\n", "")
self.assertEqual(output, "Debug: 1")
self.run_command("gci debug 0", exitcode=0)
output = self.run_command("gci debug", exitcode=0).replace("\n", "")
self.assertEqual(output, "Debug: 0")
def test_collect(self):
"""test 'gci collect'."""
# only check for exit code
# TODO: make a better test
output = self.run_command("gci collect", exitcode=0)
self.assertEqual(output.replace("\n", ""), "")
def test_break(self):
"""test 'gci break'."""
if len(gc.garbage) == 0:
eec = 1
eo = "Error: No Garbage found!"
else:
eec = 0
eo = ""
output = self.run_command("gci break", exitcode=eec)
self.assertEqual(output.replace("\n", ""), eo)
def test_threshold(self):
"""test 'gci threshold'."""
g1, g2, g3 = gc.get_threshold()
output = self.run_command("gci threshold", exitcode=0)
self.assertIn("G1: " + str(g1), output)
self.assertIn("G2: " + str(g2), output)
self.assertIn("G3: " + str(g3), output)
n1 = g1 + 1
n2 = g2 + 1
n3 = g3 + 1
output = self.run_command("gci threshold {} {} {}".format(n1, n2, n3), exitcode=0)
self.assertEqual(output.replace("\n", ""), "")
output = self.run_command("gci threshold", exitcode=0)
self.assertIn("G1: " + str(n1), output)
self.assertIn("G2: " + str(n2), output)
self.assertIn("G3: " + str(n3), output)
gc.set_threshold(g1, g2, g3)
output = self.run_command("gci threshold", exitcode=0)
self.assertIn("G1: " + str(g1), output)
self.assertIn("G2: " + str(g2), output)
self.assertIn("G3: " + str(g3), output)
|
import json
import httpretty
import pytest
from hangups import auth
# pylint: disable=redefined-outer-name
class FakeCredentialsPrompt(auth.CredentialsPrompt):
def __init__(self):
self.was_prompted = False
def get_email(self):
self.was_prompted = True
return '[email protected]'
def get_password(self):
self.was_prompted = True
return 'password'
def get_verification_code(self):
self.was_prompted = True
return '123456'
def get_authorization_code(self):
self.was_prompted = True
return 'auth_code'
@pytest.fixture
def credentials_prompt():
return FakeCredentialsPrompt()
class FakeRefreshTokenCache(auth.RefreshTokenCache):
def __init__(self):
super().__init__('fake_filename')
self._refresh_token = None
def get(self):
return self._refresh_token
def set(self, refresh_token):
self._refresh_token = refresh_token
@pytest.fixture
def refresh_token_cache():
return FakeRefreshTokenCache()
def get_form(form_id, action, input_id):
return '<form id="{}" action="{}"><input id="{}"></form>'.format(
form_id, action, input_id
)
def mock_google(verification_input_id=None):
"""Set up httpretty to mock authentication requests.
This simplifies the sequence of redirects and doesn't make any assertions
about the requests.
"""
httpretty.HTTPretty.allow_net_connect = False
httpretty.register_uri(
httpretty.GET,
'https://accounts.google.com/o/oauth2/programmatic_auth',
body=get_form(
auth.FORM_SELECTOR[1:], '/password_form', auth.EMAIL_SELECTOR[1:]
), content_type='text/html'
)
next_action = (
'/verification' if verification_input_id is not None else '/finished'
)
httpretty.register_uri(
httpretty.GET, 'https://accounts.google.com/password_form',
body=get_form(
auth.FORM_SELECTOR[1:], next_action, auth.PASSWORD_SELECTOR[1:]
), content_type='text/html'
)
httpretty.register_uri(
httpretty.GET, 'https://accounts.google.com/verification',
body=get_form(
auth.VERIFICATION_FORM_SELECTOR[1:], '/finished',
verification_input_id
), content_type='text/html'
)
httpretty.register_uri(
httpretty.GET, 'https://accounts.google.com/finished',
body='success', content_type='text/html', set_cookie='oauth_code=foo'
)
httpretty.register_uri(
httpretty.POST, 'https://accounts.google.com/o/oauth2/token',
body=json.dumps(dict(access_token='access', refresh_token='refresh')),
content_type='application/json'
)
httpretty.register_uri(
httpretty.GET, 'https://accounts.google.com/accounts/OAuthLogin',
body='uberauth', content_type='text/html'
)
httpretty.register_uri(
httpretty.GET, 'https://accounts.google.com/MergeSession',
body='uberauth', content_type='text/html',
set_cookie='session=foo; Domain=.google.com'
)
@httpretty.activate
def test_login(credentials_prompt, refresh_token_cache):
mock_google()
cookies = auth.get_auth(credentials_prompt, refresh_token_cache)
assert credentials_prompt.was_prompted
assert refresh_token_cache.get() is not None
assert cookies['session'] == 'foo'
@httpretty.activate
def test_login_totp_verification(credentials_prompt, refresh_token_cache):
mock_google(verification_input_id=auth.TOTP_CODE_SELECTOR[1:])
cookies = auth.get_auth(credentials_prompt, refresh_token_cache)
assert credentials_prompt.was_prompted
assert refresh_token_cache.get() is not None
assert cookies['session'] == 'foo'
@httpretty.activate
def test_login_phone_verification(credentials_prompt, refresh_token_cache):
mock_google(verification_input_id=auth.PHONE_CODE_SELECTOR[1:])
cookies = auth.get_auth(credentials_prompt, refresh_token_cache)
assert credentials_prompt.was_prompted
assert refresh_token_cache.get() is not None
assert cookies['session'] == 'foo'
@httpretty.activate
def test_refresh_token(credentials_prompt, refresh_token_cache):
mock_google()
refresh_token_cache.set('foo')
cookies = auth.get_auth(credentials_prompt, refresh_token_cache)
assert not credentials_prompt.was_prompted
assert refresh_token_cache.get() is not None
assert cookies['session'] == 'foo'
@httpretty.activate
def test_manual_login(credentials_prompt, refresh_token_cache):
mock_google()
cookies = auth.get_auth(
credentials_prompt, refresh_token_cache, manual_login=True
)
assert credentials_prompt.was_prompted
assert refresh_token_cache.get() is not None
assert cookies['session'] == 'foo'
|
import logging
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
BaseNotificationService,
)
from homeassistant.const import ATTR_LATITUDE, ATTR_LOCATION, ATTR_LONGITUDE, ATTR_NAME
from . import DOMAIN as BMW_DOMAIN
ATTR_LAT = "lat"
ATTR_LOCATION_ATTRIBUTES = ["street", "city", "postal_code", "country"]
ATTR_LON = "lon"
ATTR_SUBJECT = "subject"
ATTR_TEXT = "text"
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the BMW notification service."""
accounts = hass.data[BMW_DOMAIN]
_LOGGER.debug("Found BMW accounts: %s", ", ".join([a.name for a in accounts]))
svc = BMWNotificationService()
svc.setup(accounts)
return svc
class BMWNotificationService(BaseNotificationService):
"""Send Notifications to BMW."""
def __init__(self):
"""Set up the notification service."""
self.targets = {}
def setup(self, accounts):
"""Get the BMW vehicle(s) for the account(s)."""
for account in accounts:
self.targets.update({v.name: v for v in account.account.vehicles})
def send_message(self, message="", **kwargs):
"""Send a message or POI to the car."""
for _vehicle in kwargs[ATTR_TARGET]:
_LOGGER.debug("Sending message to %s", _vehicle.name)
# Extract params from data dict
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = kwargs.get(ATTR_DATA)
# Check if message is a POI
if data is not None and ATTR_LOCATION in data:
location_dict = {
ATTR_LAT: data[ATTR_LOCATION][ATTR_LATITUDE],
ATTR_LON: data[ATTR_LOCATION][ATTR_LONGITUDE],
ATTR_NAME: message,
}
# Update dictionary with additional attributes if available
location_dict.update(
{
k: v
for k, v in data[ATTR_LOCATION].items()
if k in ATTR_LOCATION_ATTRIBUTES
}
)
_vehicle.remote_services.trigger_send_poi(location_dict)
else:
_vehicle.remote_services.trigger_send_message(
{ATTR_TEXT: message, ATTR_SUBJECT: title}
)
|
from flexx.app._component2 import PyComponent, JsComponent
from flexx.app._component2 import BaseAppComponent, LocalComponent, ProxyComponent
from flexx.event import Component
from flexx import event, app
from flexx.util.testing import run_tests_if_main, raises, skip
class StubSession:
id = 'y'
status = 2
app = None
def _register_component(self, c, id=None):
id = id or 'x'
c._id = id
c._uid = self.id + '_' + id
def _unregister_component(self, c):
pass
def send_command(self, *command):
pass
def keep_alive(self, ob):
pass
class MyPComponent1(PyComponent):
CSS = "xx"
foo = event.IntProp()
foo2 = event.IntProp()
@event.action
def increase_foo(self):
self._mutate_foo(self.foo + 1)
@event.reaction('foo')
def track_foo(self, *events):
pass
class MyJComponent1(JsComponent):
CSS = "xx"
foo = event.IntProp()
foo2 = event.IntProp()
@event.action
def increase_foo(self):
self._mutate_foo(self.foo + 1)
@event.reaction('foo')
def track_foo(self, *events):
pass
class MyPComponent2(MyPComponent1):
pass
class MyJComponent2(MyJComponent1):
pass
all_classes = [MyPComponent2, MyJComponent2, MyPComponent2.JS, MyJComponent2.JS,
MyPComponent1, MyJComponent1, MyPComponent1.JS, MyJComponent1.JS,
PyComponent, JsComponent, PyComponent.JS, JsComponent.JS,
LocalComponent, ProxyComponent,
BaseAppComponent,
Component]
def test_pycomponent_heritage():
C = MyPComponent2
# Names and repr
assert C.__name__ == C.JS.__name__
assert 'PyComponent' in repr(C) and 'PyComponent' in repr(C.JS)
assert not 'proxy' in repr(C) and 'proxy' in repr(C.JS)
assert not 'JS' in repr(C) and 'for JS' in repr(C.JS)
mro = [MyPComponent2, MyPComponent1, PyComponent, LocalComponent, BaseAppComponent, Component, object]
# Validate inheritance of py class
assert C.mro() == mro
# Also check issubclass()
for cls in mro:
assert issubclass(C, cls)
for cls in all_classes:
if cls not in mro:
assert not issubclass(C, cls)
# Also check isinstance()
foo = C(flx_session=StubSession())
for cls in mro:
assert isinstance(foo, cls)
for cls in all_classes:
if cls not in mro:
assert not isinstance(foo, cls)
mro = [MyPComponent2.JS, MyPComponent1.JS, PyComponent.JS, ProxyComponent, BaseAppComponent, Component, object]
# Validate inheritance of JS class
assert C.JS.mro() == mro
# Also check issubclass()
for cls in mro:
assert issubclass(C.JS, cls)
for cls in all_classes:
if cls not in mro:
assert not issubclass(C.JS, cls)
def test_jscomponent_heritage():
session = app.manager.get_default_session()
if session is None:
session = app.manager.create_default_session()
C = MyJComponent2
# Names and repr
assert C.__name__ == C.JS.__name__
assert 'JsComponent' in repr(C) and 'JsComponent' in repr(C.JS)
assert 'proxy' in repr(C) and 'proxy' not in repr(C.JS)
assert not 'JS' in repr(C) and 'for JS' in repr(C.JS)
mro = [MyJComponent2, MyJComponent1, JsComponent, ProxyComponent, BaseAppComponent, Component, object]
# Validate inheritance of py class
assert C.mro() == mro
# Also check issubclass()
for cls in mro:
assert issubclass(C, cls)
for cls in all_classes:
if cls not in mro:
assert not issubclass(C, cls)
# Also check isinstance()
foo = C(flx_session=session)
for cls in mro:
assert isinstance(foo, cls)
for cls in all_classes:
if cls not in mro:
assert not isinstance(foo, cls)
mro = [MyJComponent2.JS, MyJComponent1.JS, JsComponent.JS, LocalComponent, BaseAppComponent, Component, object]
# Validate inheritance of JS class
assert C.JS.mro() == mro
# Also check issubclass()
for cls in mro:
assert issubclass(C.JS, cls)
for cls in all_classes:
if cls not in mro:
assert not issubclass(C.JS, cls)
def test_properties():
assert MyPComponent2.__properties__ == ['foo', 'foo2']
assert MyPComponent2.JS.__properties__ == ['foo', 'foo2']
assert MyJComponent2.__properties__ == ['foo', 'foo2']
assert MyJComponent2.JS.__properties__ == ['foo', 'foo2']
assert MyPComponent2.__actions__ == ['increase_foo']
assert MyPComponent2.JS.__actions__ == ['_emit_at_proxy']
assert MyJComponent2.__actions__ == ['_emit_at_proxy']
assert MyJComponent2.JS.__actions__ == ['increase_foo']
assert MyPComponent2.__reactions__ == ['track_foo']
assert MyPComponent2.JS.__reactions__ == []
assert MyJComponent2.__reactions__ == []
assert MyJComponent2.JS.__reactions__ == ['track_foo']
def test_cannot_instantiate_without_session():
app.manager.remove_default_session()
with raises(RuntimeError) as err:
PyComponent()
assert 'needs a session!' in str(err)
with raises(RuntimeError) as err:
JsComponent()
assert 'needs a session!' in str(err)
def test_generated_js1():
m = app.assets.modules['flexx.app._component2']
js = m.get_js()
classes = []
for line in js.splitlines():
if '._base_class =' in line:
classes.append(line.split('.')[0])
assert classes == ['LocalProperty',
'BaseAppComponent',
'LocalComponent', 'ProxyComponent', 'StubComponent',
'JsComponent', 'PyComponent']
print(classes)
def test_generated_js2():
js = MyPComponent2.JS.CODE
assert '__properties__ = ["foo", "foo2"]' in js
assert js.count('foo2') == 1 # in __properties__
assert js.count('increase_foo') == 0
assert js.count('_mutate_') == 0
js = MyJComponent2.JS.CODE
assert '__properties__ = ["foo", "foo2"]' in js
assert js.count('foo2') == 2 # in __properties__ and __proxy_properties__
assert js.count('increase_foo') == 1
assert js.count('_mutate_') == 0
def test_generated_css1():
assert not hasattr(MyPComponent1.JS, 'CSS')
assert not hasattr(MyJComponent1.JS, 'CSS')
assert not hasattr(MyPComponent2.JS, 'CSS')
assert not hasattr(MyJComponent2.JS, 'CSS')
assert MyPComponent1.CSS == 'xx'
assert MyJComponent1.CSS == 'xx'
assert MyPComponent2.CSS == ''
assert MyJComponent2.CSS == ''
def test_misc():
clss = app.get_component_classes()
assert PyComponent in clss and JsComponent in clss
assert LocalComponent not in clss and ProxyComponent not in clss
assert BaseAppComponent not in clss
# Assert that the list is a copy
clss.remove(PyComponent)
assert PyComponent in app.get_component_classes()
run_tests_if_main()
|
from __future__ import absolute_import
import unittest
import lxml.html
from .common_imports import doctest, HelperTestCase, skipif
try:
import cssselect
except ImportError:
cssselect = None
HTML = '''
<div>
<a href="foo">link</a>
<a>anchor</a>
</div>
'''
class CSSTestCase(HelperTestCase):
pytestmark = skipif('cssselect is None')
def test_cssselect(self):
div, = lxml.html.fromstring(HTML).xpath('//div')
def count(selector, expected_count, **kwargs):
result = div.cssselect(selector, **kwargs)
self.assertEqual(len(result), expected_count)
count('div', 1)
count('a', 2)
count('em', 0)
# Element names are case-insensitive in HTML
count('DIV', 1)
# ... but not in XHTML or XML
count('DIV', 0, translator='xhtml')
count('DIV', 0, translator='xml')
# :contains() is case-insensitive in lxml
count(':contains("link")', 2) # div, a
count(':contains("LInk")', 2)
# Whatever the document language
count(':contains("LInk")', 2, translator='xhtml')
count(':contains("LInk")', 2, translator='xml')
# ... but not in upstream cssselect
import cssselect
count(':contains("link")', 2, translator=cssselect.HTMLTranslator())
count(':contains("LInk")', 0, translator=cssselect.HTMLTranslator())
def test_suite():
suite = unittest.TestSuite()
try:
import cssselect
except ImportError:
# no 'cssselect' installed
print("Skipping tests in lxml.cssselect - external cssselect package is not installed")
return suite
import lxml.cssselect
suite.addTests(doctest.DocTestSuite(lxml.cssselect))
suite.addTests([unittest.makeSuite(CSSTestCase)])
return suite
|
import numpy as np
from .utils import _ensure_int, verbose, logger
###############################################################################
# Class for interpolation between adjacent points
class _Interp2(object):
r"""Interpolate between two points.
Parameters
----------
control_points : array, shape (n_changes,)
The control points (indices) to use.
values : callable | array, shape (n_changes, ...)
Callable that takes the control point and returns a list of
arrays that must be interpolated.
interp : str
Can be 'zero', 'linear', 'hann', or 'cos2' (same as hann).
Notes
-----
This will process data using overlapping windows of potentially
different sizes to achieve a constant output value using different
2-point interpolation schemes. For example, for linear interpolation,
and window sizes of 6 and 17, this would look like::
1 _ _
|\ / '-. .-'
| \ / '-. .-'
| x |-.-|
| / \ .-' '-.
|/ \_.-' '-.
0 +----|----|----|----|---
0 5 10 15 20 25
"""
def __init__(self, control_points, values, interp='hann'):
# set up interpolation
self.control_points = np.array(control_points, int).ravel()
if not np.array_equal(np.unique(self.control_points),
self.control_points):
raise ValueError('Control points must be sorted and unique')
if len(self.control_points) == 0:
raise ValueError('Must be at least one control point')
if not (self.control_points >= 0).all():
raise ValueError('All control points must be positive (got %s)'
% (self.control_points[:3],))
if isinstance(values, np.ndarray):
values = [values]
if isinstance(values, (list, tuple)):
for v in values:
if not (v is None or isinstance(v, np.ndarray)):
raise TypeError('All entries in "values" must be ndarray '
'or None, got %s' % (type(v),))
if v is not None and v.shape[0] != len(self.control_points):
raise ValueError('Values, if provided, must be the same '
'length as the number of control points '
'(%s), got %s'
% (len(self.control_points), v.shape[0]))
use_values = values
def val(pt):
idx = np.where(control_points == pt)[0][0]
return [v[idx] if v is not None else None for v in use_values]
values = val
self.values = values
self.n_last = None
self._position = 0 # start at zero
self._left_idx = 0
self._left = self._right = self._use_interp = None
known_types = ('cos2', 'linear', 'zero', 'hann')
if interp not in known_types:
raise ValueError('interp must be one of %s, got "%s"'
% (known_types, interp))
self._interp = interp
def feed_generator(self, n_pts):
"""Feed data and get interpolators as a generator."""
self.n_last = 0
n_pts = _ensure_int(n_pts, 'n_pts')
original_position = self._position
stop = self._position + n_pts
logger.debug('Feed %s (%s-%s)' % (n_pts, self._position, stop))
used = np.zeros(n_pts, bool)
if self._left is None: # first one
logger.debug(' Eval @ %s (%s)' % (0, self.control_points[0]))
self._left = self.values(self.control_points[0])
if len(self.control_points) == 1:
self._right = self._left
n_used = 0
# Left zero-order hold condition
if self._position < self.control_points[self._left_idx]:
n_use = min(self.control_points[self._left_idx] - self._position,
n_pts)
logger.debug(' Left ZOH %s' % n_use)
this_sl = slice(None, n_use)
assert used[this_sl].size == n_use
assert not used[this_sl].any()
used[this_sl] = True
yield [this_sl, self._left, None, None]
self._position += n_use
n_used += n_use
self.n_last += 1
# Standard interpolation condition
stop_right_idx = np.where(self.control_points >= stop)[0]
if len(stop_right_idx) == 0:
stop_right_idx = [len(self.control_points) - 1]
stop_right_idx = stop_right_idx[0]
left_idxs = np.arange(self._left_idx, stop_right_idx)
self.n_last += max(len(left_idxs) - 1, 0)
for bi, left_idx in enumerate(left_idxs):
if left_idx != self._left_idx or self._right is None:
if self._right is not None:
assert left_idx == self._left_idx + 1
self._left = self._right
self._left_idx += 1
self._use_interp = None # need to recreate it
eval_pt = self.control_points[self._left_idx + 1]
logger.debug(' Eval @ %s (%s)'
% (self._left_idx + 1, eval_pt))
self._right = self.values(eval_pt)
assert self._right is not None
left_point = self.control_points[self._left_idx]
right_point = self.control_points[self._left_idx + 1]
if self._use_interp is None:
interp_span = right_point - left_point
if self._interp == 'zero':
self._use_interp = None
elif self._interp == 'linear':
self._use_interp = np.linspace(1., 0., interp_span,
endpoint=False)
else: # self._interp in ('cos2', 'hann'):
self._use_interp = np.cos(
np.linspace(0, np.pi / 2., interp_span,
endpoint=False))
self._use_interp *= self._use_interp
n_use = min(stop, right_point) - self._position
if n_use > 0:
logger.debug(' Interp %s %s (%s-%s)' % (self._interp, n_use,
left_point, right_point))
interp_start = self._position - left_point
assert interp_start >= 0
if self._use_interp is None:
this_interp = None
else:
this_interp = \
self._use_interp[interp_start:interp_start + n_use]
assert this_interp.size == n_use
this_sl = slice(n_used, n_used + n_use)
assert used[this_sl].size == n_use
assert not used[this_sl].any()
used[this_sl] = True
yield [this_sl, self._left, self._right, this_interp]
self._position += n_use
n_used += n_use
# Right zero-order hold condition
if self.control_points[self._left_idx] <= self._position:
n_use = stop - self._position
if n_use > 0:
logger.debug(' Right ZOH %s' % n_use)
this_sl = slice(n_pts - n_use, None)
assert not used[this_sl].any()
used[this_sl] = True
assert self._right is not None
yield [this_sl, self._right, None, None]
self._position += n_use
n_used += n_use
self.n_last += 1
assert self._position == stop
assert n_used == n_pts
assert used.all()
assert self._position == original_position + n_pts
def feed(self, n_pts):
"""Feed data and get interpolated values."""
# Convenience function for assembly
out_arrays = None
for o in self.feed_generator(n_pts):
if out_arrays is None:
out_arrays = [np.empty(v.shape + (n_pts,))
if v is not None else None for v in o[1]]
for ai, arr in enumerate(out_arrays):
if arr is not None:
if o[3] is None:
arr[..., o[0]] = o[1][ai][..., np.newaxis]
else:
arr[..., o[0]] = (
o[1][ai][..., np.newaxis] * o[3] +
o[2][ai][..., np.newaxis] * (1. - o[3]))
assert out_arrays is not None
return out_arrays
###############################################################################
# Constant overlap-add processing class
def _check_store(store):
if isinstance(store, np.ndarray):
store = [store]
if isinstance(store, (list, tuple)) and all(isinstance(s, np.ndarray)
for s in store):
store = _Storer(*store)
if not callable(store):
raise TypeError('store must be callable, got type %s'
% (type(store),))
return store
class _COLA(object):
r"""Constant overlap-add processing helper.
Parameters
----------
process : callable
A function that takes a chunk of input data with shape
``(n_channels, n_samples)`` and processes it.
store : callable | ndarray
A function that takes a completed chunk of output data.
Can also be an ``ndarray``, in which case it is treated as the
output data in which to store the results.
n_total : int
The total number of samples.
n_samples : int
The number of samples per window.
n_overlap : int
The overlap between windows.
window : str
The window to use. Default is "hann".
tol : float
The tolerance for COLA checking.
Notes
-----
This will process data using overlapping windows to achieve a constant
output value. For example, for ``n_total=27``, ``n_samples=10``,
``n_overlap=5`` and ``window='triang'``::
1 _____ _______
| \ /\ /\ /
| \ / \ / \ /
| x x x
| / \ / \ / \
| / \/ \/ \
0 +----|----|----|----|----|-
0 5 10 15 20 25
This produces four windows: the first three are the requested length
(10 samples) and the last one is longer (12 samples). The first and last
window are asymmetric.
"""
@verbose
def __init__(self, process, store, n_total, n_samples, n_overlap,
sfreq, window='hann', tol=1e-10, verbose=None):
from scipy.signal import get_window
n_samples = _ensure_int(n_samples, 'n_samples')
n_overlap = _ensure_int(n_overlap, 'n_overlap')
n_total = _ensure_int(n_total, 'n_total')
if n_samples <= 0:
raise ValueError('n_samples must be > 0, got %s' % (n_samples,))
if n_overlap < 0:
raise ValueError('n_overlap must be >= 0, got %s' % (n_overlap,))
if n_total < 0:
raise ValueError('n_total must be >= 0, got %s' % (n_total,))
self._n_samples = int(n_samples)
self._n_overlap = int(n_overlap)
del n_samples, n_overlap
if n_total < self._n_samples:
raise ValueError('Number of samples per window (%d) must be at '
'most the total number of samples (%s)'
% (self._n_samples, n_total))
if not callable(process):
raise TypeError('process must be callable, got type %s'
% (type(process),))
self._process = process
self._step = self._n_samples - self._n_overlap
self._store = _check_store(store)
self._idx = 0
self._in_buffers = self._out_buffers = None
# Create our window boundaries
window_name = window if isinstance(window, str) else 'custom'
self._window = get_window(window, self._n_samples,
fftbins=(self._n_samples - 1) % 2)
self._window /= _check_cola(self._window, self._n_samples, self._step,
window_name, tol=tol)
self.starts = np.arange(0, n_total - self._n_samples + 1, self._step)
self.stops = self.starts + self._n_samples
delta = n_total - self.stops[-1]
self.stops[-1] = n_total
sfreq = float(sfreq)
pl = 's' if len(self.starts) != 1 else ''
logger.info(' Processing %4d data chunk%s of (at least) %0.1f sec '
'with %0.1f sec overlap and %s windowing'
% (len(self.starts), pl, self._n_samples / sfreq,
self._n_overlap / sfreq, window_name))
del window, window_name
if delta > 0:
logger.info(' The final %0.3f sec will be lumped into the '
'final window' % (delta / sfreq,))
self.verbose = verbose
@property
def _in_offset(self):
"""Compute from current processing window start and buffer len."""
return self.starts[self._idx] + self._in_buffers[0].shape[-1]
@verbose
def feed(self, *datas, **kwargs):
"""Pass in a chunk of data."""
# Append to our input buffer
if self._in_buffers is None:
self._in_buffers = [None] * len(datas)
if len(datas) != len(self._in_buffers):
raise ValueError('Got %d array(s), needed %d'
% (len(datas), len(self._in_buffers)))
for di, data in enumerate(datas):
if not isinstance(data, np.ndarray) or data.ndim < 1:
raise TypeError('data entry %d must be an 2D ndarray, got %s'
% (di, type(data),))
if self._in_buffers[di] is None:
# In practice, users can give large chunks, so we use
# dynamic allocation of the in buffer. We could save some
# memory allocation by only ever processing max_len at once,
# but this would increase code complexity.
self._in_buffers[di] = np.empty(
data.shape[:-1] + (0,), data.dtype)
if data.shape[:-1] != self._in_buffers[di].shape[:-1] or \
self._in_buffers[di].dtype != data.dtype:
raise TypeError('data must dtype %s and shape[:-1]==%s, '
'got dtype %s shape[:-1]=%s'
% (self._in_buffers[di].dtype,
self._in_buffers[di].shape[:-1],
data.dtype, data.shape[:-1]))
logger.debug(' + Appending %d->%d'
% (self._in_offset, self._in_offset + data.shape[-1]))
self._in_buffers[di] = np.concatenate(
[self._in_buffers[di], data], -1)
if self._in_offset > self.stops[-1]:
raise ValueError('data (shape %s) exceeded expected total '
'buffer size (%s > %s)'
% (data.shape, self._in_offset,
self.stops[-1]))
# Check to see if we can process the next chunk and dump outputs
while self._idx < len(self.starts) and \
self._in_offset >= self.stops[self._idx]:
start, stop = self.starts[self._idx], self.stops[self._idx]
this_len = stop - start
this_window = self._window.copy()
if self._idx == len(self.starts) - 1:
this_window = np.pad(
self._window, (0, this_len - len(this_window)), 'constant')
for offset in range(self._step, len(this_window), self._step):
n_use = len(this_window) - offset
this_window[offset:] += self._window[:n_use]
if self._idx == 0:
for offset in range(self._n_samples - self._step, 0,
-self._step):
this_window[:offset] += self._window[-offset:]
logger.debug(' * Processing %d->%d' % (start, stop))
this_proc = [in_[..., :this_len].copy()
for in_ in self._in_buffers]
if not all(proc.shape[-1] == this_len == this_window.size
for proc in this_proc):
raise RuntimeError('internal indexing error')
outs = self._process(*this_proc, **kwargs)
if self._out_buffers is None:
max_len = np.max(self.stops - self.starts)
self._out_buffers = [np.zeros(o.shape[:-1] + (max_len,),
o.dtype) for o in outs]
for oi, out in enumerate(outs):
out *= this_window
self._out_buffers[oi][..., :stop - start] += out
self._idx += 1
if self._idx < len(self.starts):
next_start = self.starts[self._idx]
else:
next_start = self.stops[-1]
delta = next_start - self.starts[self._idx - 1]
for di in range(len(self._in_buffers)):
self._in_buffers[di] = self._in_buffers[di][..., delta:]
logger.debug(' - Shifting input/output buffers by %d samples'
% (delta,))
self._store(*[o[..., :delta] for o in self._out_buffers])
for ob in self._out_buffers:
ob[..., :-delta] = ob[..., delta:]
ob[..., -delta:] = 0.
def _check_cola(win, nperseg, step, window_name, tol=1e-10):
"""Check whether the Constant OverLap Add (COLA) constraint is met."""
# adapted from SciPy
binsums = np.sum([win[ii * step:(ii + 1) * step]
for ii in range(nperseg // step)], axis=0)
if nperseg % step != 0:
binsums[:nperseg % step] += win[-(nperseg % step):]
const = np.median(binsums)
deviation = np.max(np.abs(binsums - const))
if deviation > tol:
raise ValueError('segment length %d with step %d for %s window '
'type does not provide a constant output '
'(%g%% deviation)'
% (nperseg, step, window_name,
100 * deviation / const))
return const
class _Storer(object):
"""Store data in chunks."""
def __init__(self, *outs, picks=None):
for oi, out in enumerate(outs):
if not isinstance(out, np.ndarray) or out.ndim < 1:
raise TypeError('outs[oi] must be >= 1D ndarray, got %s'
% (out,))
self.outs = outs
self.idx = 0
self.picks = picks
def __call__(self, *outs):
if (len(outs) != len(self.outs) or
not all(out.shape[-1] == outs[0].shape[-1] for out in outs)):
raise ValueError('Bad outs')
idx = (Ellipsis,)
if self.picks is not None:
idx += (self.picks,)
stop = self.idx + outs[0].shape[-1]
idx += (slice(self.idx, stop),)
for o1, o2 in zip(self.outs, outs):
o1[idx] = o2
self.idx = stop
|
from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity
from homeassistant.exceptions import PlatformNotReady
from . import CONF_MONITORED_CONDITIONS, DATA_KEY, LTEEntity
from .sensor_types import BINARY_SENSOR_CLASSES
async def async_setup_platform(hass, config, async_add_entities, discovery_info):
"""Set up Netgear LTE binary sensor devices."""
if discovery_info is None:
return
modem_data = hass.data[DATA_KEY].get_modem_data(discovery_info)
if not modem_data or not modem_data.data:
raise PlatformNotReady
binary_sensor_conf = discovery_info[DOMAIN]
monitored_conditions = binary_sensor_conf[CONF_MONITORED_CONDITIONS]
binary_sensors = []
for sensor_type in monitored_conditions:
binary_sensors.append(LTEBinarySensor(modem_data, sensor_type))
async_add_entities(binary_sensors)
class LTEBinarySensor(LTEEntity, BinarySensorEntity):
"""Netgear LTE binary sensor entity."""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return getattr(self.modem_data.data, self.sensor_type)
@property
def device_class(self):
"""Return the class of binary sensor."""
return BINARY_SENSOR_CLASSES[self.sensor_type]
|
import logging
import re
from hashlib import md5
import urllib.parse
import cherrypy
from cherrypy._cpcompat import text_or_bytes
from cherrypy.lib import httputil as _httputil
from cherrypy.lib import is_iterator
# Conditional HTTP request support #
def validate_etags(autotags=False, debug=False):
"""Validate the current ETag against If-Match, If-None-Match headers.
If autotags is True, an ETag response-header value will be provided
from an MD5 hash of the response body (unless some other code has
already provided an ETag header). If False (the default), the ETag
will not be automatic.
WARNING: the autotags feature is not designed for URL's which allow
methods other than GET. For example, if a POST to the same URL returns
no content, the automatic ETag will be incorrect, breaking a fundamental
use for entity tags in a possibly destructive fashion. Likewise, if you
raise 304 Not Modified, the response body will be empty, the ETag hash
will be incorrect, and your application will break.
See :rfc:`2616` Section 14.24.
"""
response = cherrypy.serving.response
# Guard against being run twice.
if hasattr(response, 'ETag'):
return
status, reason, msg = _httputil.valid_status(response.status)
etag = response.headers.get('ETag')
# Automatic ETag generation. See warning in docstring.
if etag:
if debug:
cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS')
elif not autotags:
if debug:
cherrypy.log('Autotags off', 'TOOLS.ETAGS')
elif status != 200:
if debug:
cherrypy.log('Status not 200', 'TOOLS.ETAGS')
else:
etag = response.collapse_body()
etag = '"%s"' % md5(etag).hexdigest()
if debug:
cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS')
response.headers['ETag'] = etag
response.ETag = etag
# "If the request would, without the If-Match header field, result in
# anything other than a 2xx or 412 status, then the If-Match header
# MUST be ignored."
if debug:
cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS')
if status >= 200 and status <= 299:
request = cherrypy.serving.request
conditions = request.headers.elements('If-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions and not (conditions == ['*'] or etag in conditions):
raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
'not match %r' % (etag, conditions))
conditions = request.headers.elements('If-None-Match') or []
conditions = [str(x) for x in conditions]
if debug:
cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
'TOOLS.ETAGS')
if conditions == ['*'] or etag in conditions:
if debug:
cherrypy.log('request.method: %s' %
request.method, 'TOOLS.ETAGS')
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
'matched %r' % (etag, conditions))
def validate_since():
"""Validate the current Last-Modified against If-Modified-Since headers.
If no code has set the Last-Modified response header, then no validation
will be performed.
"""
response = cherrypy.serving.response
lastmod = response.headers.get('Last-Modified')
if lastmod:
status, reason, msg = _httputil.valid_status(response.status)
request = cherrypy.serving.request
since = request.headers.get('If-Unmodified-Since')
if since and since != lastmod:
if (status >= 200 and status <= 299) or status == 412:
raise cherrypy.HTTPError(412)
since = request.headers.get('If-Modified-Since')
if since and since == lastmod:
if (status >= 200 and status <= 299) or status == 304:
if request.method in ('GET', 'HEAD'):
raise cherrypy.HTTPRedirect([], 304)
else:
raise cherrypy.HTTPError(412)
# Tool code #
def allow(methods=None, debug=False):
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
The given methods are case-insensitive, and may be in any order.
If only one method is allowed, you may supply a single string;
if more than one, supply a list of strings.
Regardless of whether the current method is allowed or not, this
also emits an 'Allow' response header, containing the given methods.
"""
if not isinstance(methods, (tuple, list)):
methods = [methods]
methods = [m.upper() for m in methods if m]
if not methods:
methods = ['GET', 'HEAD']
elif 'GET' in methods and 'HEAD' not in methods:
methods.append('HEAD')
cherrypy.response.headers['Allow'] = ', '.join(methods)
if cherrypy.request.method not in methods:
if debug:
cherrypy.log('request.method %r not in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
raise cherrypy.HTTPError(405)
else:
if debug:
cherrypy.log('request.method %r in methods %r' %
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
scheme='X-Forwarded-Proto', debug=False):
"""Change the base URL (scheme://host[:port][/path]).
For running a CP server behind Apache, lighttpd, or other HTTP server.
For Apache and lighttpd, you should leave the 'local' argument at the
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
tools.proxy.local = 'Origin'.
If you want the new request.base to include path info (not just the host),
you must explicitly set base to the full base path, and ALSO set 'local'
to '', so that the X-Forwarded-Host request header (which never includes
path info) does not override it. Regardless, the value for 'base' MUST
NOT end in a slash.
cherrypy.request.remote.ip (the IP address of the client) will be
rewritten if the header specified by the 'remote' arg is valid.
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
want to rewrite remote.ip, set the 'remote' arg to an empty string.
"""
request = cherrypy.serving.request
if scheme:
s = request.headers.get(scheme, None)
if debug:
cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY')
if s == 'on' and 'ssl' in scheme.lower():
# This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header
scheme = 'https'
else:
# This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
scheme = s
if not scheme:
scheme = request.base[:request.base.find('://')]
if local:
lbase = request.headers.get(local, None)
if debug:
cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY')
if lbase is not None:
base = lbase.split(',')[0]
if not base:
default = urllib.parse.urlparse(request.base).netloc
base = request.headers.get('Host', default)
if base.find('://') == -1:
# add http:// or https:// if needed
base = scheme + '://' + base
request.base = base
if remote:
xff = request.headers.get(remote)
if debug:
cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
if xff:
if remote == 'X-Forwarded-For':
# Grab the first IP in a comma-separated list. Ref #1268.
xff = next(ip.strip() for ip in xff.split(','))
request.remote.ip = xff
def ignore_headers(headers=('Range',), debug=False):
"""Delete request headers whose field names are included in 'headers'.
This is a useful tool for working behind certain HTTP servers;
for example, Apache duplicates the work that CP does for 'Range'
headers, and will doubly-truncate the response.
"""
request = cherrypy.serving.request
for name in headers:
if name in request.headers:
if debug:
cherrypy.log('Ignoring request header %r' % name,
'TOOLS.IGNORE_HEADERS')
del request.headers[name]
def response_headers(headers=None, debug=False):
"""Set headers on the response."""
if debug:
cherrypy.log('Setting response headers: %s' % repr(headers),
'TOOLS.RESPONSE_HEADERS')
for name, value in (headers or []):
cherrypy.serving.response.headers[name] = value
response_headers.failsafe = True
def referer(pattern, accept=True, accept_missing=False, error=403,
message='Forbidden Referer header.', debug=False):
"""Raise HTTPError if Referer header does/does not match the given pattern.
pattern
A regular expression pattern to test against the Referer.
accept
If True, the Referer must match the pattern; if False,
the Referer must NOT match the pattern.
accept_missing
If True, permit requests with no Referer header.
error
The HTTP error code to return to the client on failure.
message
A string to include in the response body on failure.
"""
try:
ref = cherrypy.serving.request.headers['Referer']
match = bool(re.match(pattern, ref))
if debug:
cherrypy.log('Referer %r matches %r' % (ref, pattern),
'TOOLS.REFERER')
if accept == match:
return
except KeyError:
if debug:
cherrypy.log('No Referer header', 'TOOLS.REFERER')
if accept_missing:
return
raise cherrypy.HTTPError(error, message)
class SessionAuth(object):
"""Assert that the user is logged in."""
session_key = 'username'
debug = False
def check_username_and_password(self, username, password):
pass
def anonymous(self):
"""Provide a temporary user name for anonymous users."""
pass
def on_login(self, username):
pass
def on_logout(self, username):
pass
def on_check(self, username):
pass
def login_screen(self, from_page='..', username='', error_msg='',
**kwargs):
return (str("""<html><body>
Message: %(error_msg)s
<form method="post" action="do_login">
Login: <input type="text" name="username" value="%(username)s" size="10" />
<br />
Password: <input type="password" name="password" size="10" />
<br />
<input type="hidden" name="from_page" value="%(from_page)s" />
<br />
<input type="submit" />
</form>
</body></html>""") % vars()).encode('utf-8')
def do_login(self, username, password, from_page='..', **kwargs):
"""Login. May raise redirect, or return True if request handled."""
response = cherrypy.serving.response
error_msg = self.check_username_and_password(username, password)
if error_msg:
body = self.login_screen(from_page, username, error_msg)
response.body = body
if 'Content-Length' in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers['Content-Length']
return True
else:
cherrypy.serving.request.login = username
cherrypy.session[self.session_key] = username
self.on_login(username)
raise cherrypy.HTTPRedirect(from_page or '/')
def do_logout(self, from_page='..', **kwargs):
"""Logout. May raise redirect, or return True if request handled."""
sess = cherrypy.session
username = sess.get(self.session_key)
sess[self.session_key] = None
if username:
cherrypy.serving.request.login = None
self.on_logout(username)
raise cherrypy.HTTPRedirect(from_page)
def do_check(self):
"""Assert username. Raise redirect, or return True if request handled.
"""
sess = cherrypy.session
request = cherrypy.serving.request
response = cherrypy.serving.response
username = sess.get(self.session_key)
if not username:
sess[self.session_key] = username = self.anonymous()
self._debug_message('No session[username], trying anonymous')
if not username:
url = cherrypy.url(qs=request.query_string)
self._debug_message(
'No username, routing to login_screen with from_page %(url)r',
locals(),
)
response.body = self.login_screen(url)
if 'Content-Length' in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers['Content-Length']
return True
self._debug_message('Setting request.login to %(username)r', locals())
request.login = username
self.on_check(username)
def _debug_message(self, template, context={}):
if not self.debug:
return
cherrypy.log(template % context, 'TOOLS.SESSAUTH')
def run(self):
request = cherrypy.serving.request
response = cherrypy.serving.response
path = request.path_info
if path.endswith('login_screen'):
self._debug_message('routing %(path)r to login_screen', locals())
response.body = self.login_screen()
return True
elif path.endswith('do_login'):
if request.method != 'POST':
response.headers['Allow'] = 'POST'
self._debug_message('do_login requires POST')
raise cherrypy.HTTPError(405)
self._debug_message('routing %(path)r to do_login', locals())
return self.do_login(**request.params)
elif path.endswith('do_logout'):
if request.method != 'POST':
response.headers['Allow'] = 'POST'
raise cherrypy.HTTPError(405)
self._debug_message('routing %(path)r to do_logout', locals())
return self.do_logout(**request.params)
else:
self._debug_message('No special path, running do_check')
return self.do_check()
def session_auth(**kwargs):
"""Session authentication hook.
Any attribute of the SessionAuth class may be overridden
via a keyword arg to this function:
""" + '\n '.join(
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
for k in dir(SessionAuth)
if not k.startswith('__')
)
sa = SessionAuth()
for k, v in kwargs.items():
setattr(sa, k, v)
return sa.run()
def log_traceback(severity=logging.ERROR, debug=False):
"""Write the last error's traceback to the cherrypy error log."""
cherrypy.log('', 'HTTP', severity=severity, traceback=True)
def log_request_headers(debug=False):
"""Write request headers to the cherrypy error log."""
h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
def log_hooks(debug=False):
"""Write request.hooks to the cherrypy error log."""
request = cherrypy.serving.request
msg = []
# Sort by the standard points if possible.
from cherrypy import _cprequest
points = _cprequest.hookpoints
for k in request.hooks.keys():
if k not in points:
points.append(k)
for k in points:
msg.append(' %s:' % k)
v = request.hooks.get(k, [])
v.sort()
for h in v:
msg.append(' %r' % h)
cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
':\n' + '\n'.join(msg), 'HTTP')
def redirect(url='', internal=True, debug=False):
"""Raise InternalRedirect or HTTPRedirect to the given url."""
if debug:
cherrypy.log('Redirecting %sto: %s' %
({True: 'internal ', False: ''}[internal], url),
'TOOLS.REDIRECT')
if internal:
raise cherrypy.InternalRedirect(url)
else:
raise cherrypy.HTTPRedirect(url)
def trailing_slash(missing=True, extra=False, status=None, debug=False):
"""Redirect if path_info has (missing|extra) trailing slash."""
request = cherrypy.serving.request
pi = request.path_info
if debug:
cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' %
(request.is_index, missing, extra, pi),
'TOOLS.TRAILING_SLASH')
if request.is_index is True:
if missing:
if not pi.endswith('/'):
new_url = cherrypy.url(pi + '/', request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
elif request.is_index is False:
if extra:
# If pi == '/', don't redirect to ''!
if pi.endswith('/') and pi != '/':
new_url = cherrypy.url(pi[:-1], request.query_string)
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
def flatten(debug=False):
"""Wrap response.body in a generator that recursively iterates over body.
This allows cherrypy.response.body to consist of 'nested generators';
that is, a set of generators that yield generators.
"""
def flattener(input):
numchunks = 0
for x in input:
if not is_iterator(x):
numchunks += 1
yield x
else:
for y in flattener(x):
numchunks += 1
yield y
if debug:
cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN')
response = cherrypy.serving.response
response.body = flattener(response.body)
def accept(media=None, debug=False):
"""Return the client's preferred media-type (from the given Content-Types).
If 'media' is None (the default), no test will be performed.
If 'media' is provided, it should be the Content-Type value (as a string)
or values (as a list or tuple of strings) which the current resource
can emit. The client's acceptable media ranges (as declared in the
Accept request header) will be matched in order to these Content-Type
values; the first such string is returned. That is, the return value
will always be one of the strings provided in the 'media' arg (or None
if 'media' is None).
If no match is found, then HTTPError 406 (Not Acceptable) is raised.
Note that most web browsers send */* as a (low-quality) acceptable
media range, which should match any Content-Type. In addition, "...if
no Accept header field is present, then it is assumed that the client
accepts all media types."
Matching types are checked in order of client preference first,
and then in the order of the given 'media' values.
Note that this function does not honor accept-params (other than "q").
"""
if not media:
return
if isinstance(media, text_or_bytes):
media = [media]
request = cherrypy.serving.request
# Parse the Accept request header, and try to match one
# of the requested media-ranges (in order of preference).
ranges = request.headers.elements('Accept')
if not ranges:
# Any media type is acceptable.
if debug:
cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT')
return media[0]
else:
# Note that 'ranges' is sorted in order of preference
for element in ranges:
if element.qvalue > 0:
if element.value == '*/*':
# Matches any type or subtype
if debug:
cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
return media[0]
elif element.value.endswith('/*'):
# Matches any subtype
mtype = element.value[:-1] # Keep the slash
for m in media:
if m.startswith(mtype):
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return m
else:
# Matches exact value
if element.value in media:
if debug:
cherrypy.log('Match due to %s' % element.value,
'TOOLS.ACCEPT')
return element.value
# No suitable media-range found.
ah = request.headers.get('Accept')
if ah is None:
msg = 'Your client did not send an Accept header.'
else:
msg = 'Your client sent this Accept header: %s.' % ah
msg += (' But this resource only emits these media types: %s.' %
', '.join(media))
raise cherrypy.HTTPError(406, msg)
class MonitoredHeaderMap(_httputil.HeaderMap):
def transform_key(self, key):
self.accessed_headers.add(key)
return super(MonitoredHeaderMap, self).transform_key(key)
def __init__(self):
self.accessed_headers = set()
super(MonitoredHeaderMap, self).__init__()
def autovary(ignore=None, debug=False):
"""Auto-populate the Vary response header based on request.header access.
"""
request = cherrypy.serving.request
req_h = request.headers
request.headers = MonitoredHeaderMap()
request.headers.update(req_h)
if ignore is None:
ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type'])
def set_response_header():
resp_h = cherrypy.serving.response.headers
v = set([e.value for e in resp_h.elements('Vary')])
if debug:
cherrypy.log(
'Accessed headers: %s' % request.headers.accessed_headers,
'TOOLS.AUTOVARY')
v = v.union(request.headers.accessed_headers)
v = v.difference(ignore)
v = list(v)
v.sort()
resp_h['Vary'] = ', '.join(v)
request.hooks.attach('before_finalize', set_response_header, 95)
def convert_params(exception=ValueError, error=400):
"""Convert request params based on function annotations, with error handling.
exception
Exception class to catch.
status
The HTTP error code to return to the client on failure.
"""
request = cherrypy.serving.request
types = request.handler.callable.__annotations__
with cherrypy.HTTPError.handle(exception, error):
for key in set(types).intersection(request.params):
request.params[key] = types[key](request.params[key])
|
import hashlib
import os
import stat
import subprocess
from base64 import b64decode, b64encode
from distutils.spawn import find_executable
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from weblate.trans.util import get_clean_env
from weblate.utils import messages
from weblate.utils.data import data_dir
from weblate.utils.hash import calculate_checksum
# SSH key files
KNOWN_HOSTS = "known_hosts"
RSA_KEY = "id_rsa"
RSA_KEY_PUB = "id_rsa.pub"
def ssh_file(filename):
"""Generate full path to SSH configuration file."""
return os.path.join(data_dir("ssh"), filename)
def is_key_line(key):
"""Check whether this line looks like a valid known_hosts line."""
if not key:
return False
# Comment
if key[0] == "#":
return False
# Special entry like @cert-authority
if key[0] == "@":
return False
return (
" ssh-rsa " in key or " ecdsa-sha2-nistp256 " in key or " ssh-ed25519 " in key
)
def parse_hosts_line(line):
"""Parse single hosts line into tuple host, key fingerprint."""
host, keytype, key = line.strip().split(None, 3)[:3]
digest = hashlib.sha256(b64decode(key)).digest()
fingerprint = b64encode(digest).rstrip(b"=").decode()
if host.startswith("|1|"):
# Translators: placeholder SSH hashed hostname
host = _("[hostname hashed]")
return host, keytype, fingerprint
def get_host_keys():
"""Return list of host keys."""
try:
result = []
with open(ssh_file(KNOWN_HOSTS)) as handle:
for line in handle:
line = line.strip()
if is_key_line(line):
result.append(parse_hosts_line(line))
except OSError:
return []
return result
def get_key_data():
"""Parse host key and returns it."""
# Read key data if it exists
if os.path.exists(ssh_file(RSA_KEY_PUB)):
with open(ssh_file(RSA_KEY_PUB)) as handle:
key_data = handle.read()
key_type, key_fingerprint, key_id = key_data.strip().split(None, 2)
return {
"key": key_data,
"type": key_type,
"fingerprint": key_fingerprint,
"id": key_id,
}
return None
def generate_ssh_key(request):
"""Generate SSH key."""
keyfile = ssh_file(RSA_KEY)
pubkeyfile = ssh_file(RSA_KEY_PUB)
try:
# Actually generate the key
subprocess.run(
[
"ssh-keygen",
"-q",
"-b",
"4096",
"-N",
"",
"-C",
"Weblate",
"-t",
"rsa",
"-f",
keyfile,
],
universal_newlines=True,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=get_clean_env(),
)
except (subprocess.CalledProcessError, OSError) as exc:
messages.error(
request, _("Failed to generate key: %s") % getattr(exc, "output", str(exc))
)
return
# Fix key permissions
os.chmod(keyfile, stat.S_IWUSR | stat.S_IRUSR)
os.chmod(pubkeyfile, stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
messages.success(request, _("Created new SSH key."))
def add_host_key(request, host, port=""):
"""Add host key for a host."""
if not host:
messages.error(request, _("Invalid host name given!"))
else:
cmdline = ["ssh-keyscan"]
if port:
cmdline.extend(["-p", str(port)])
cmdline.append(host)
try:
result = subprocess.run(
cmdline,
env=get_clean_env(),
check=True,
universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
keys = []
for key in result.stdout.splitlines():
key = key.strip()
if not is_key_line(key):
continue
keys.append(key)
host, keytype, fingerprint = parse_hosts_line(key)
messages.warning(
request,
_(
"Added host key for %(host)s with fingerprint "
"%(fingerprint)s (%(keytype)s), "
"please verify that it is correct."
)
% {"host": host, "fingerprint": fingerprint, "keytype": keytype},
)
if not keys:
messages.error(request, _("Failed to fetch public key for a host!"))
with open(ssh_file(KNOWN_HOSTS), "a") as handle:
for key in keys:
handle.write(f"{key}\n")
except subprocess.CalledProcessError as exc:
messages.error(
request, _("Failed to get host key: %s") % exc.stderr or exc.stdout
)
except OSError as exc:
messages.error(request, _("Failed to get host key: %s") % str(exc))
def can_generate_key():
"""Check whether we can generate key."""
return find_executable("ssh-keygen") is not None
class SSHWrapper:
# Custom ssh wrapper
# - use custom location for known hosts and key
# - do not hash it
# - strict hosk key checking
# - force not using system configuration (to avoid evil things as SendEnv)
SSH_WRAPPER_TEMPLATE = r"""#!/bin/sh
exec ssh \
-o "UserKnownHostsFile={known_hosts}" \
-o "IdentityFile={identity}" \
-o StrictHostKeyChecking=yes \
-o HashKnownHosts=no \
-F /dev/null \
"$@"
"""
@cached_property
def filename(self):
"""Calculates unique wrapper filename.
It is based on template and DATA_DIR settings.
"""
digest = calculate_checksum(self.SSH_WRAPPER_TEMPLATE, data_dir("ssh"))
return ssh_file(f"ssh-weblate-wrapper-{digest}")
def create(self):
"""Create wrapper for SSH to pass custom known hosts and key."""
if os.path.exists(self.filename):
return
with open(self.filename, "w") as handle:
handle.write(
self.SSH_WRAPPER_TEMPLATE.format(
known_hosts=ssh_file(KNOWN_HOSTS), identity=ssh_file(RSA_KEY)
)
)
os.chmod(self.filename, 0o755) # nosec
SSH_WRAPPER = SSHWrapper()
|
import logging
import os
import inspect
import imp
import sys
import re
import six
logging.basicConfig()
logger = logging.getLogger("kalliope")
def pipe_print(line):
line = Utils.encode_text_utf8(line)
print(line)
class KalliopeModuleNotFoundError(Exception):
"""
The module can not been found
.. notes: Check the case: must be in lower case.
"""
pass
class Utils(object):
color_list = dict(
PURPLE='\033[95m',
BLUE='\033[94m',
GREEN='\033[92m',
YELLOW='\033[93m',
RED='\033[91m',
ENDLINE='\033[0m',
BOLD='\033[1m',
UNDERLINE='\033[4m'
)
##################
#
# Shell properly displayed
#
#########
@classmethod
def print_info(cls, text_to_print):
pipe_print(cls.color_list["BLUE"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_success(cls, text_to_print):
pipe_print(cls.color_list["GREEN"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_warning(cls, text_to_print):
pipe_print(cls.color_list["YELLOW"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_danger(cls, text_to_print):
pipe_print(cls.color_list["RED"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_header(cls, text_to_print):
pipe_print(cls.color_list["HEADER"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_purple(cls, text_to_print):
pipe_print(cls.color_list["PURPLE"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_bold(cls, text_to_print):
pipe_print(cls.color_list["BOLD"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@classmethod
def print_underline(cls, text_to_print):
pipe_print(cls.color_list["UNDERLINE"] + text_to_print + cls.color_list["ENDLINE"])
logger.debug(text_to_print)
@staticmethod
def print_yaml_nicely(to_print):
"""
Used for debug
:param to_print: Dict to print nicely
:return:
"""
import json
line = json.dumps(to_print, indent=2)
return line.encode('utf-8')
##################
#
# Dynamic loading
#
#########
@classmethod
def get_dynamic_class_instantiation(cls, package_name, module_name, parameters=None, resources_dir=None):
"""
Load a python class dynamically
from my_package.my_module import my_class
mod = __import__('my_package.my_module', fromlist=['my_class'])
klass = getattr(mod, 'my_class')
:param package_name: name of the package where we will find the module to load (neurons, tts, stt, trigger)
:param module_name: name of the module from the package_name to load. This one is capitalized. Eg: Snowboy
:param parameters: dict parameters to send as argument to the module
:param resources_dir: the resource directory to check for external resources
:return:
"""
package_path = "kalliope." + package_name + "." + module_name.lower() + "." + module_name.lower()
logger.debug("[Utils]-> get_dynamic_class_instantiation : package path : %s" % (package_path))
if resources_dir is not None:
neuron_resource_path = resources_dir + os.sep + module_name.lower() \
+ os.sep + module_name.lower() + ".py"
if os.path.exists(neuron_resource_path):
imp.load_source(module_name.capitalize(), neuron_resource_path)
package_path = module_name.capitalize()
logger.debug("[Utils]-> get_dynamic_class_instantiation : loading path : %s, as package %s" % (
neuron_resource_path, package_path))
mod = __import__(package_path, fromlist=[module_name.capitalize()])
try:
klass = getattr(mod, module_name.capitalize())
except AttributeError:
logger.debug("Error: No module named %s " % module_name.capitalize())
raise KalliopeModuleNotFoundError(
"The module %s does not exist in package %s" % (module_name.capitalize(), package_name))
if klass is not None:
# run the plugin
if not parameters:
return klass()
elif isinstance(parameters, dict):
return klass(**parameters)
else:
return klass(parameters)
return None
##################
#
# Paths management
#
#########
@staticmethod
def get_current_file_parent_parent_path(current_script_path):
parent_parent_path = os.path.normpath(current_script_path + os.sep + os.pardir + os.sep + os.pardir)
return parent_parent_path
@staticmethod
def get_current_file_parent_path(current_script_path):
parent_path = os.path.normpath(current_script_path + os.sep + os.pardir)
return parent_path
@classmethod
def get_real_file_path(cls, file_path_to_test):
"""
Try to return a full path from a given <file_path_to_test>
If the path is an absolute on, we return it directly.
If the path is relative, we try to get the full path in this order:
- from the current directory where kalliope has been called + the file_path_to_test.
Eg: /home/me/Documents/kalliope_config
- from /etc/kalliope + file_path_to_test
- from the default file passed as <file_name> at the root of the project
:param file_path_to_test file path to test
:type file_path_to_test: str
:return: absolute path to the file file_path_to_test or None if is doen't exist
"""
if not os.path.isabs(file_path_to_test):
current_script_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
path_order = {
1: os.getcwd() + os.sep + file_path_to_test,
2: "/etc/kalliope" + os.sep + file_path_to_test,
# In this case 'get_current_file_parent_parent_path' is corresponding to kalliope root path
# from /an/unknown/path/kalliope/kalliope/core/Utils to /an/unknown/path/kalliope/kalliope
3: cls.get_current_file_parent_parent_path(current_script_path) + os.sep + file_path_to_test
}
for key in sorted(path_order):
new_file_path_to_test = path_order[key]
logger.debug("Try to load file from %s: %s" % (key, new_file_path_to_test))
if os.path.isfile(new_file_path_to_test):
logger.debug("File found in %s" % new_file_path_to_test)
return new_file_path_to_test
else:
if os.path.isfile(file_path_to_test):
return file_path_to_test
else:
return None
@staticmethod
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
Utils.print_warning(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
Utils.print_warning("Please respond with 'yes' or 'no' or 'y' or 'n').\n")
##################
#
# Brackets management
#
#########
@staticmethod
def is_containing_bracket(sentence):
"""
Return True if the text in <sentence> contains brackets
:param sentence:
:return:
"""
# print "sentence to test %s" % sentence
pattern = r"{{|}}"
# prog = re.compile(pattern)
if not isinstance(sentence, six.text_type):
sentence = str(sentence)
check_bool = re.search(pattern, sentence)
if check_bool is not None:
return True
return False
@staticmethod
def find_all_matching_brackets(sentence):
"""
Find all the bracket matches from a given sentence
:param sentence: the sentence to check
:return: the list with all the matches
"""
pattern = r"((?:{{\s*)[\w\.]+(?:\s*}}))"
# find everything like {{ word }}
if not isinstance(sentence, six.text_type):
sentence = str(sentence)
return re.findall(pattern, sentence)
@staticmethod
def remove_spaces_in_brackets(sentence):
"""
If has brackets it removes spaces in brackets
:param sentence: the sentence to work on
:return: the sentence without any spaces in brackets
"""
pattern = '(?<=\{\{)\s+|\s+(?=\}\})'
# Remove white spaces (if any) between the variable and the double brace then split
if not isinstance(sentence, six.text_type):
sentence = str(sentence)
return re.sub(pattern, '', sentence)
##################
#
# Lists management
#
#########
@staticmethod
def get_next_value_list(list_to_check):
ite = list_to_check.__iter__()
next(ite, None)
return next(ite, None)
##################
#
# Encoding
#
#########
@staticmethod
def encode_text_utf8(text):
if sys.version_info[0] < 3:
if isinstance(text, str):
text = text.encode("utf-8")
return text
@staticmethod
def str_to_bool(s):
if isinstance(s, bool): # do not convert if already a boolean
return s
else:
if s == 'True' \
or s == 'true' \
or s == '1' \
or s == 1 \
or s == True:
return True
elif s == 'False' \
or s == 'false' \
or s == '0' \
or s == 0 \
or s == False:
return False
else:
return False
|
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
from perfkitbenchmarker import regex_util
FLAGS = flags.FLAGS
flags.DEFINE_string('mofed_version', '4.7-3.2.9.0', 'Mellanox OFED version')
# TODO(tohaowu) Add DEBIAN9, CENTOS7, RHEL
MOFED_OS_MAPPING = {
os_types.UBUNTU1604: 'ubuntu16.04',
os_types.UBUNTU1604_CUDA9: 'ubuntu16.04',
os_types.UBUNTU1710: 'ubuntu17.10',
os_types.UBUNTU1804: 'ubuntu18.04',
}
# Mellanox OpenFabrics drivers
MOFED_DRIVER = ('https://www.mellanox.com/downloads/ofed/MLNX_OFED-{version}/'
'MLNX_OFED_LINUX-{version}-{os}-x86_64.tgz')
def _Install(vm):
"""Installs the OpenMPI package on the VM."""
if vm.OS_TYPE not in MOFED_OS_MAPPING:
raise ValueError('OS type {} not in {}'.format(vm.OS_TYPE,
sorted(MOFED_OS_MAPPING)))
driver = MOFED_DRIVER.format(version=FLAGS.mofed_version,
os=MOFED_OS_MAPPING[vm.OS_TYPE])
vm.InstallPackages('libdapl2 libmlx4-1')
try:
vm.RemoteCommand('curl -fSsL {} | tar -zxpf -'.format(driver))
except:
raise errors.Setup.InvalidSetupError('Failed to download {}'.format(driver))
stdout, _ = vm.RemoteCommand('cd MLNX_OFED_LINUX-* && sudo ./mlnxofedinstall '
'--force')
if not regex_util.ExtractExactlyOneMatch(r'Installation passed successfully',
stdout):
raise errors.Benchmarks.PrepareException(
'Mellanox OpenFabrics driver isn\'t installed successfully.')
vm.RemoteCommand('sudo /etc/init.d/openibd restart')
vm.RemoteCommand("sudo sed -i -e 's/# OS.EnableRDMA=y/"
"OS.EnableRDMA=y/g' /etc/waagent.conf")
vm.RemoteCommand("sudo sed -i -e 's/# OS.UpdateRdmaDriver=y/"
"OS.UpdateRdmaDriver=y/g' /etc/waagent.conf")
# https://docs.microsoft.com/en-us/azure/virtual-machines/linux/sizes-hpc#rdma-capable-instances
vm.RemoteCommand('cat << EOF | sudo tee -a /etc/security/limits.conf\n'
'* hard memlock unlimited\n'
'* soft memlock unlimited\n'
'* hard nofile 65535\n'
'* soft nofile 65535\n'
'EOF')
def YumInstall(vm):
"""Installs the OpenMPI package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the OpenMPI package on the VM."""
_Install(vm)
|
import unittest
import numpy as np
import numpy.testing as np_test
from scipy.special import beta
from scipy.stats import multivariate_normal
from pgmpy.factors.continuous import ContinuousFactor
class TestContinuousFactor(unittest.TestCase):
def pdf1(self, x, y):
return np.power(x, 1) * np.power(y, 2) / beta(x, y)
def pdf2(self, *args):
return multivariate_normal.pdf(args, [0, 0], [[1, 0], [0, 1]])
def pdf3(self, x, y, z):
return z * (np.power(x, 1) * np.power(y, 2)) / beta(x, y)
def test_class_init(self):
phi1 = ContinuousFactor(["x", "y"], self.pdf1)
self.assertEqual(phi1.scope(), ["x", "y"])
self.assertEqual(phi1.pdf, self.pdf1)
phi2 = ContinuousFactor(["x1", "x2"], self.pdf2)
self.assertEqual(phi2.scope(), ["x1", "x2"])
self.assertEqual(phi2.pdf, self.pdf2)
phi3 = ContinuousFactor(["x", "y", "z"], self.pdf3)
self.assertEqual(phi3.scope(), ["x", "y", "z"])
self.assertEqual(phi3.pdf, self.pdf3)
def test_class_init_typeerror(self):
self.assertRaises(TypeError, ContinuousFactor, "x y", self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, "x", self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, "x1 x2", self.pdf2)
self.assertRaises(TypeError, ContinuousFactor, "x1", self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, "x y z", self.pdf3)
self.assertRaises(TypeError, ContinuousFactor, "x", self.pdf3)
self.assertRaises(TypeError, ContinuousFactor, set(["x", "y"]), self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, {"x": 1, "y": 2}, self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, set(["x1", "x2"]), self.pdf2)
self.assertRaises(TypeError, ContinuousFactor, {"x1": 1, "x2": 2}, self.pdf1)
self.assertRaises(TypeError, ContinuousFactor, set(["x", "y", "z"]), self.pdf3)
self.assertRaises(
TypeError, ContinuousFactor, {"x": 1, "y": 2, "z": 3}, self.pdf3
)
def test_class_init_valueerror(self):
self.assertRaises(ValueError, ContinuousFactor, ["x", "x"], self.pdf1)
self.assertRaises(ValueError, ContinuousFactor, ["x", "y", "y"], self.pdf1)
self.assertRaises(ValueError, ContinuousFactor, ["x1", "x1"], self.pdf2)
self.assertRaises(ValueError, ContinuousFactor, ["x1", "x2", "x2"], self.pdf2)
self.assertRaises(ValueError, ContinuousFactor, ["x", "x"], self.pdf1)
self.assertRaises(
ValueError, ContinuousFactor, ["x", "y", "y", "z", "z"], self.pdf1
)
class TestContinuousFactorMethods(unittest.TestCase):
def pdf1(self, x, y):
return np.power(x, 1) * np.power(y, 2) / beta(x, y)
def pdf2(self, x1, x2):
return multivariate_normal.pdf([x1, x2], [0, 0], [[1, 0], [0, 1]])
def pdf3(self, x, y, z):
return z * (np.power(x, 1) * np.power(y, 2)) / beta(x, y)
def pdf4(self, x1, x2, x3):
return multivariate_normal.pdf(
[x1, x2, x3], [0, 0, 0], [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
)
def setUp(self):
self.phi1 = ContinuousFactor(["x", "y"], self.pdf1)
self.phi2 = ContinuousFactor(["x1", "x2"], self.pdf2)
self.phi3 = ContinuousFactor(["x", "y", "z"], self.pdf3)
self.phi4 = ContinuousFactor(["x1", "x2", "x3"], self.pdf4)
def test_scope(self):
self.assertEqual(self.phi1.scope(), self.phi1.scope())
self.assertEqual(self.phi2.scope(), self.phi2.scope())
self.assertEqual(self.phi3.scope(), self.phi3.scope())
def test_assignment(self):
self.assertEqual(self.phi1.assignment(1.212, 2), self.pdf1(1.212, 2))
self.assertEqual(self.phi2.assignment(1, -2.231), self.pdf2(1, -2.231))
self.assertEqual(
self.phi3.assignment(1.212, 2.213, -3), self.pdf3(1.212, 2.213, -3)
)
def test_reduce(self):
phi1 = self.phi1.copy()
phi1.reduce([("x", 1)])
def reduced_pdf1(y):
return (np.power(1, 1) * np.power(y, 2)) / beta(1, y)
self.assertEqual(phi1.scope(), ["y"])
for inp in np.random.rand(4):
self.assertEqual(phi1.pdf(inp), reduced_pdf1(inp))
self.assertEqual(phi1.pdf(y=inp), reduced_pdf1(inp))
phi1 = self.phi1.reduce([("x", 1)], inplace=False)
self.assertEqual(phi1.scope(), ["y"])
for inp in np.random.rand(4):
self.assertEqual(phi1.pdf(inp), reduced_pdf1(inp))
self.assertEqual(phi1.pdf(y=inp), reduced_pdf1(inp))
phi2 = self.phi2.copy()
phi2.reduce([("x2", 7.213)])
def reduced_pdf2(x1):
return multivariate_normal.pdf([x1, 7.213], [0, 0], [[1, 0], [0, 1]])
self.assertEqual(phi2.scope(), ["x1"])
for inp in np.random.rand(4):
self.assertEqual(phi2.pdf(inp), reduced_pdf2(inp))
self.assertEqual(phi2.pdf(x1=inp), reduced_pdf2(inp))
phi2 = self.phi2.reduce([("x2", 7.213)], inplace=False)
self.assertEqual(phi2.scope(), ["x1"])
for inp in np.random.rand(4):
self.assertEqual(phi2.pdf(inp), reduced_pdf2(inp))
self.assertEqual(phi2.pdf(x1=inp), reduced_pdf2(inp))
phi3 = self.phi3.copy()
phi3.reduce([("y", 0.112), ("z", 23)])
def reduced_pdf4(x):
return 23 * (np.power(x, 1) * np.power(0.112, 2)) / beta(x, 0.112)
self.assertEqual(phi3.scope(), ["x"])
for inp in np.random.rand(4):
self.assertEqual(phi3.pdf(inp), reduced_pdf4(inp))
self.assertEqual(phi3.pdf(x=inp), reduced_pdf4(inp))
phi3 = self.phi3.copy()
phi3.reduce([("y", 0.112)])
def reduced_pdf3(x, z):
return z * (np.power(x, 1) * np.power(0.112, 2)) / beta(x, 0.112)
self.assertEqual(phi3.scope(), ["x", "z"])
for inp in np.random.rand(4, 2):
self.assertEqual(phi3.pdf(inp[0], inp[1]), reduced_pdf3(inp[0], inp[1]))
self.assertEqual(phi3.pdf(x=inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1]))
phi3 = self.phi3.reduce([("y", 0.112)], inplace=False)
self.assertEqual(phi3.scope(), ["x", "z"])
for inp in np.random.rand(4, 2):
self.assertEqual(phi3.pdf(inp[0], inp[1]), reduced_pdf3(inp[0], inp[1]))
self.assertEqual(phi3.pdf(x=inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1]))
self.assertEqual(phi3.pdf(inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1]))
phi3 = self.phi3.reduce([("y", 0.112), ("z", 23)], inplace=False)
self.assertEqual(phi3.scope(), ["x"])
for inp in np.random.rand(4):
self.assertEqual(phi3.pdf(inp), reduced_pdf4(inp))
self.assertEqual(phi3.pdf(x=inp), reduced_pdf4(inp))
def test_reduce_error(self):
self.assertRaises(TypeError, self.phi1.reduce, "x1")
self.assertRaises(TypeError, self.phi1.reduce, set(["x", "y"]))
self.assertRaises(TypeError, self.phi1.reduce, {"x": 1, "y": 1})
self.assertRaises(TypeError, self.phi4.reduce, "x4")
self.assertRaises(TypeError, self.phi4.reduce, set(["x1", "x2", "x3"]))
self.assertRaises(TypeError, self.phi4.reduce, {"x1": 1, "x2": 1, "x3": 1})
self.assertRaises(ValueError, self.phi1.reduce, [("z", 3)])
self.assertRaises(ValueError, self.phi1.reduce, [("x", 0), ("y", 1), ("z", 4)])
self.assertRaises(ValueError, self.phi4.reduce, [("x4", 7)])
self.assertRaises(
ValueError, self.phi4.reduce, [("x1", 1), ("x2", 2), ("x3", 3), ("x4", 4)]
)
def test_marginalize(self):
phi2 = self.phi2.copy()
phi2.marginalize(["x2"])
self.assertEqual(phi2.scope(), ["x1"])
for inp in np.random.rand(4):
np_test.assert_almost_equal(
phi2.pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi2 = self.phi2.marginalize(["x2"], inplace=False)
self.assertEqual(phi2.scope(), ["x1"])
for inp in np.random.rand(4):
np_test.assert_almost_equal(
phi2.pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi4 = self.phi4.copy()
phi4.marginalize(["x2"])
self.assertEqual(phi4.scope(), ["x1", "x3"])
for inp in np.random.rand(4, 2):
np_test.assert_almost_equal(
phi4.pdf(inp[0], inp[1]),
multivariate_normal.pdf([inp[0], inp[1]], [0, 0], [[1, 0], [0, 1]]),
)
phi4.marginalize(["x3"])
self.assertEqual(phi4.scope(), ["x1"])
for inp in np.random.rand(1):
np_test.assert_almost_equal(
phi4.pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi4 = self.phi4.marginalize(["x2"], inplace=False)
self.assertEqual(phi4.scope(), ["x1", "x3"])
for inp in np.random.rand(4, 2):
np_test.assert_almost_equal(
phi4.pdf(inp[0], inp[1]),
multivariate_normal.pdf([inp[0], inp[1]], [0, 0], [[1, 0], [0, 1]]),
)
phi4 = phi4.marginalize(["x3"], inplace=False)
self.assertEqual(phi4.scope(), ["x1"])
for inp in np.random.rand(1):
np_test.assert_almost_equal(
phi4.pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
def test_marginalize_error(self):
self.assertRaises(TypeError, self.phi1.marginalize, "x1")
self.assertRaises(TypeError, self.phi1.marginalize, set(["x", "y"]))
self.assertRaises(TypeError, self.phi1.marginalize, {"x": 1, "y": 1})
self.assertRaises(TypeError, self.phi4.marginalize, "x4")
self.assertRaises(TypeError, self.phi4.marginalize, set(["x1", "x2", "x3"]))
self.assertRaises(TypeError, self.phi4.marginalize, {"x1": 1, "x2": 1, "x3": 1})
self.assertRaises(ValueError, self.phi1.marginalize, ["z"])
self.assertRaises(ValueError, self.phi1.marginalize, ["x", "y", "z"])
self.assertRaises(ValueError, self.phi4.marginalize, ["x4"])
self.assertRaises(ValueError, self.phi4.marginalize, ["x1", "x2", "x3", "x4"])
def test_normalize(self):
def pdf2(x1, x2):
return 2 * self.pdf2(x1, x2)
phi2 = ContinuousFactor(["x1", "x2"], pdf2)
phi4 = phi2.copy()
phi4.normalize()
self.assertEqual(phi4.scope(), phi2.scope())
for inp in np.random.rand(1, 2):
np_test.assert_almost_equal(
phi4.pdf(inp[0], inp[1]), self.pdf2(inp[0], inp[1])
)
phi2.normalize()
self.assertEqual(phi4.scope(), phi2.scope())
for inp in np.random.rand(1, 2):
np_test.assert_almost_equal(
phi2.pdf(inp[0], inp[1]), phi4.pdf(inp[0], inp[1])
)
def test_operate(self):
phi1 = self.phi1.copy()
phi1._operate(self.phi2, "product")
self.assertEqual(phi1.scope(), ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1.pdf(*inp),
self.phi1.pdf(inp[0], inp[1]) * self.phi2.pdf(inp[2], inp[3]),
)
phi1 = self.phi1._operate(self.phi2, "product", inplace=False)
self.assertEqual(phi1.scope(), ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1.pdf(*inp),
self.phi1.pdf(inp[0], inp[1]) * self.phi2.pdf(inp[2], inp[3]),
)
phi1 = self.phi1 * self.phi2
self.assertEqual(phi1.scope(), ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1.pdf(*inp),
self.phi1.pdf(inp[0], inp[1]) * self.phi2.pdf(inp[2], inp[3]),
)
phi3 = self.phi3.copy()
phi3._operate(self.phi1, "product")
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) * self.phi1.pdf(inp[0], inp[1])
)
phi3 = self.phi3._operate(self.phi1, "product", inplace=False)
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) * self.phi1.pdf(inp[0], inp[1])
)
phi3 = self.phi3 * self.phi1
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) * self.phi1.pdf(inp[0], inp[1])
)
phi3 = self.phi3.copy()
phi3._operate(self.phi1, "divide")
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) / self.phi1.pdf(inp[0], inp[1])
)
phi3 = self.phi3._operate(self.phi1, "divide", inplace=False)
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) / self.phi1.pdf(inp[0], inp[1])
)
phi3 = self.phi3 / self.phi1
self.assertEqual(phi3.scope(), ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3.pdf(*inp), self.phi3.pdf(*inp) / self.phi1.pdf(inp[0], inp[1])
)
phi4 = self.phi4.copy()
phi4._operate(self.phi2, "product")
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) * self.phi2.pdf(inp[0], inp[1])
)
phi4 = self.phi4._operate(self.phi2, "product", inplace=False)
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) * self.phi2.pdf(inp[0], inp[1])
)
phi4 = self.phi4 * self.phi2
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) * self.phi2.pdf(inp[0], inp[1])
)
phi4 = self.phi4.copy()
phi4._operate(self.phi2, "divide")
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) / self.phi2.pdf(inp[0], inp[1])
)
phi4 = self.phi4._operate(self.phi2, "divide", inplace=False)
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) / self.phi2.pdf(inp[0], inp[1])
)
phi4 = self.phi4 / self.phi2
self.assertEqual(phi4.scope(), ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4.pdf(*inp), self.phi4.pdf(*inp) / self.phi2.pdf(inp[0], inp[1])
)
def test_operate_error(self):
self.assertRaises(TypeError, self.phi1._operate, 1, "product")
self.assertRaises(TypeError, self.phi1._operate, 1, "divide")
self.assertRaises(TypeError, self.phi1._operate, "1", "product")
self.assertRaises(TypeError, self.phi1._operate, "1", "divide")
self.assertRaises(TypeError, self.phi1._operate, self.phi2.pdf, "product")
self.assertRaises(TypeError, self.phi1._operate, self.phi2.pdf, "divide")
self.assertRaises(TypeError, self.phi1._operate, [1], "product")
self.assertRaises(TypeError, self.phi1._operate, [1], "divide")
self.assertRaises(TypeError, self.phi4._operate, 1, "product")
self.assertRaises(TypeError, self.phi4._operate, 1, "divide")
self.assertRaises(TypeError, self.phi4._operate, "1", "product")
self.assertRaises(TypeError, self.phi4._operate, "1", "divide")
self.assertRaises(TypeError, self.phi4._operate, self.phi2.pdf, "product")
self.assertRaises(TypeError, self.phi4._operate, self.phi2.pdf, "divide")
self.assertRaises(TypeError, self.phi4._operate, [1], "product")
self.assertRaises(TypeError, self.phi4._operate, [1], "divide")
self.assertRaises(TypeError, self.phi1._operate, 1, "product", False)
self.assertRaises(TypeError, self.phi1._operate, 1, "divide", False)
self.assertRaises(TypeError, self.phi1._operate, "1", "product", False)
self.assertRaises(TypeError, self.phi1._operate, "1", "divide", False)
self.assertRaises(
TypeError, self.phi1._operate, self.phi2.pdf, "product", False
)
self.assertRaises(TypeError, self.phi1._operate, self.phi2.pdf, "divide", False)
self.assertRaises(TypeError, self.phi1._operate, [1], "product", False)
self.assertRaises(TypeError, self.phi1._operate, [1], "divide", False)
self.assertRaises(TypeError, self.phi4._operate, 1, "product", False)
self.assertRaises(TypeError, self.phi4._operate, 1, "divide", False)
self.assertRaises(TypeError, self.phi4._operate, "1", "product", False)
self.assertRaises(TypeError, self.phi4._operate, "1", "divide", False)
self.assertRaises(
TypeError, self.phi4._operate, self.phi2.pdf, "product", False
)
self.assertRaises(TypeError, self.phi4._operate, self.phi2.pdf, "divide", False)
self.assertRaises(TypeError, self.phi4._operate, [1], "product", False)
self.assertRaises(TypeError, self.phi4._operate, [1], "divide", False)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi2)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi3)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi4)
self.assertRaises(ValueError, self.phi2.__truediv__, self.phi3)
self.assertRaises(ValueError, self.phi2.__truediv__, self.phi4)
def test_copy(self):
copy1 = self.phi1.copy()
copy2 = self.phi3.copy()
copy4 = copy1.copy()
copy5 = copy2.copy()
self.assertEqual(copy1.scope(), copy4.scope())
self.assertEqual(copy1.pdf, copy4.pdf)
self.assertEqual(copy2.scope(), copy5.scope())
self.assertEqual(copy2.pdf, copy5.pdf)
# TODO: Fix these
# copy1.variables = ['A', 'B']
# self.assertEqual(copy4.scope(), self.phi1.scope())
# def pdf(a, b):
# return (a + b) / (a * a + b * b)
# copy1._pdf = pdf
# copy1_pdf = pdf
# self.assertEqual(copy4.pdf, self.phi1.pdf)
# copy4.variables = ['X', 'Y']
# self.assertEqual(copy1.scope(), ['x', 'y'])
# copy4._pdf = lambda a, b: a + b
# for inp in np.random.rand(4, 2):
# self.assertEqual(copy1.pdf(inp[0], inp[1]), copy1_pdf(inp[0], inp[1]))
# copy2.reduce([('x', 7.7)])
# def reduced_pdf(y, z):
# return z*(np.power(7.7, 1) * np.power(y, 2)) / beta(7.7, y)
# self.assertEqual(copy5.scope(), self.phi3.scope())
# self.assertEqual(copy5.pdf, self.phi3.pdf)
# copy5.reduce([('x', 11), ('z', 13)])
# self.assertEqual(copy2.scope(), ['y', 'z'])
# for inp in np.random.rand(4, 2):
# self.assertEqual(copy2.pdf(inp[0], inp[1]), reduced_pdf(inp[0], inp[1]))
def tearDown(self):
del self.phi1
del self.phi2
del self.phi3
|
import pytest
from homeassistant.components import updater
from homeassistant.helpers.update_coordinator import UpdateFailed
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import mock_component
NEW_VERSION = "10000.0"
MOCK_VERSION = "10.0"
MOCK_DEV_VERSION = "10.0.dev0"
MOCK_HUUID = "abcdefg"
MOCK_RESPONSE = {"version": "0.15", "release-notes": "https://home-assistant.io"}
MOCK_CONFIG = {updater.DOMAIN: {"reporting": True}}
RELEASE_NOTES = "test release notes"
@pytest.fixture(autouse=True)
def mock_version():
"""Mock current version."""
with patch("homeassistant.components.updater.current_version", MOCK_VERSION):
yield
@pytest.fixture(name="mock_get_newest_version")
def mock_get_newest_version_fixture():
"""Fixture to mock get_newest_version."""
with patch(
"homeassistant.components.updater.get_newest_version",
return_value=(NEW_VERSION, RELEASE_NOTES),
) as mock:
yield mock
@pytest.fixture(name="mock_get_uuid", autouse=True)
def mock_get_uuid_fixture():
"""Fixture to mock get_uuid."""
with patch("homeassistant.helpers.instance_id.async_get") as mock:
yield mock
async def test_new_version_shows_entity_true(
hass, mock_get_uuid, mock_get_newest_version
):
"""Test if sensor is true if new version is available."""
mock_get_uuid.return_value = MOCK_HUUID
assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}})
await hass.async_block_till_done()
assert hass.states.is_state("binary_sensor.updater", "on")
assert (
hass.states.get("binary_sensor.updater").attributes["newest_version"]
== NEW_VERSION
)
assert (
hass.states.get("binary_sensor.updater").attributes["release_notes"]
== RELEASE_NOTES
)
async def test_same_version_shows_entity_false(
hass, mock_get_uuid, mock_get_newest_version
):
"""Test if sensor is false if no new version is available."""
mock_get_uuid.return_value = MOCK_HUUID
mock_get_newest_version.return_value = (MOCK_VERSION, "")
assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}})
await hass.async_block_till_done()
assert hass.states.is_state("binary_sensor.updater", "off")
assert (
hass.states.get("binary_sensor.updater").attributes["newest_version"]
== MOCK_VERSION
)
assert "release_notes" not in hass.states.get("binary_sensor.updater").attributes
async def test_disable_reporting(hass, mock_get_uuid, mock_get_newest_version):
"""Test we do not gather analytics when disable reporting is active."""
mock_get_uuid.return_value = MOCK_HUUID
mock_get_newest_version.return_value = (MOCK_VERSION, "")
assert await async_setup_component(
hass, updater.DOMAIN, {updater.DOMAIN: {"reporting": False}}
)
await hass.async_block_till_done()
assert hass.states.is_state("binary_sensor.updater", "off")
await updater.get_newest_version(hass, MOCK_HUUID, MOCK_CONFIG)
call = mock_get_newest_version.mock_calls[0][1]
assert call[0] is hass
assert call[1] is None
async def test_get_newest_version_no_analytics_when_no_huuid(hass, aioclient_mock):
"""Test we do not gather analytics when no huuid is passed in."""
aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE)
with patch(
"homeassistant.helpers.system_info.async_get_system_info", side_effect=Exception
):
res = await updater.get_newest_version(hass, None, False)
assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"])
async def test_get_newest_version_analytics_when_huuid(hass, aioclient_mock):
"""Test we gather analytics when huuid is passed in."""
aioclient_mock.post(updater.UPDATER_URL, json=MOCK_RESPONSE)
with patch(
"homeassistant.helpers.system_info.async_get_system_info",
return_value={"fake": "bla"},
):
res = await updater.get_newest_version(hass, MOCK_HUUID, False)
assert res == (MOCK_RESPONSE["version"], MOCK_RESPONSE["release-notes"])
async def test_error_fetching_new_version_bad_json(hass, aioclient_mock):
"""Test we handle json error while fetching new version."""
aioclient_mock.post(updater.UPDATER_URL, text="not json")
with patch(
"homeassistant.helpers.system_info.async_get_system_info",
return_value={"fake": "bla"},
), pytest.raises(UpdateFailed):
await updater.get_newest_version(hass, MOCK_HUUID, False)
async def test_error_fetching_new_version_invalid_response(hass, aioclient_mock):
"""Test we handle response error while fetching new version."""
aioclient_mock.post(
updater.UPDATER_URL,
json={
"version": "0.15"
# 'release-notes' is missing
},
)
with patch(
"homeassistant.helpers.system_info.async_get_system_info",
return_value={"fake": "bla"},
), pytest.raises(UpdateFailed):
await updater.get_newest_version(hass, MOCK_HUUID, False)
async def test_new_version_shows_entity_after_hour_hassio(
hass, mock_get_uuid, mock_get_newest_version
):
"""Test if binary sensor gets updated if new version is available / Hass.io."""
mock_get_uuid.return_value = MOCK_HUUID
mock_component(hass, "hassio")
hass.data["hassio_core_info"] = {"version_latest": "999.0"}
assert await async_setup_component(hass, updater.DOMAIN, {updater.DOMAIN: {}})
await hass.async_block_till_done()
assert hass.states.is_state("binary_sensor.updater", "on")
assert (
hass.states.get("binary_sensor.updater").attributes["newest_version"] == "999.0"
)
assert (
hass.states.get("binary_sensor.updater").attributes["release_notes"]
== RELEASE_NOTES
)
|
import voluptuous as vol
from homeassistant import config_entries, util
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from .binding import EmulatedRoku
from .config_flow import configured_servers
from .const import (
CONF_ADVERTISE_IP,
CONF_ADVERTISE_PORT,
CONF_HOST_IP,
CONF_LISTEN_PORT,
CONF_SERVERS,
CONF_UPNP_BIND_MULTICAST,
DOMAIN,
)
SERVER_CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_LISTEN_PORT): cv.port,
vol.Optional(CONF_HOST_IP): cv.string,
vol.Optional(CONF_ADVERTISE_IP): cv.string,
vol.Optional(CONF_ADVERTISE_PORT): cv.port,
vol.Optional(CONF_UPNP_BIND_MULTICAST): cv.boolean,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_SERVERS): vol.All(
cv.ensure_list, [SERVER_CONFIG_SCHEMA]
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the emulated roku component."""
conf = config.get(DOMAIN)
if conf is None:
return True
existing_servers = configured_servers(hass)
for entry in conf[CONF_SERVERS]:
if entry[CONF_NAME] not in existing_servers:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up an emulated roku server from a config entry."""
config = config_entry.data
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
name = config[CONF_NAME]
listen_port = config[CONF_LISTEN_PORT]
host_ip = config.get(CONF_HOST_IP) or util.get_local_ip()
advertise_ip = config.get(CONF_ADVERTISE_IP)
advertise_port = config.get(CONF_ADVERTISE_PORT)
upnp_bind_multicast = config.get(CONF_UPNP_BIND_MULTICAST)
server = EmulatedRoku(
hass,
name,
host_ip,
listen_port,
advertise_ip,
advertise_port,
upnp_bind_multicast,
)
hass.data[DOMAIN][name] = server
return await server.setup()
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
name = entry.data[CONF_NAME]
server = hass.data[DOMAIN].pop(name)
return await server.unload()
|
import collections
import json
import yaml
from molecule import logger
from molecule import util
from molecule.model import schema_v1
from molecule.model import schema_v2
LOG = logger.get_logger(__name__)
class MyDumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False):
return super(MyDumper, self).increase_indent(flow, False)
class Migrate(object):
def __init__(self, molecule_file):
"""
Initialize a new migrate class and returns None.
:param molecule_file: A string containing an absolute path to the
molecule v1 config to parse.
:return: None
"""
self._molecule_file = molecule_file
self._v2 = self._get_config()
self._v1 = self._get_v1_config()
def dump(self):
od = self._convert()
yaml.add_representer(collections.OrderedDict,
self._get_dict_representer)
return yaml.dump(
od,
Dumper=MyDumper,
default_flow_style=False,
explicit_start=True,
line_break=1)
def _convert(self):
if self._v1.get('vagrant'):
msg = 'Vagrant syle v1 config found'
LOG.info(msg)
self._set_vagrant_platforms()
self._set_vagrant_provider()
else:
msg = 'Vagrant migrations only supported. Exiting.'
util.sysexit_with_message(msg)
self._set_provisioner()
self._set_verifier()
od = collections.OrderedDict(
sorted(self._v2.items(), key=lambda t: t[0]))
errors = schema_v2.validate(self._to_dict(od))
self._check_errors(errors)
return od
def _to_dict(self, od):
return json.loads(json.dumps(od))
def _set_vagrant_provider(self):
provider = self._v1['vagrant']['providers'][0]
self._v2['driver']['provider']['name'] = provider['name']
def _set_vagrant_platforms(self):
platforms = self._v1['vagrant']['platforms'][0]
provider = self._v1['vagrant']['providers'][0]
platforms_list = []
instances = self._v1['vagrant']['instances']
for instance in instances:
i = collections.OrderedDict({})
i['name'] = instance['name']
if platforms.get('box'):
i['box'] = platforms['box']
if platforms.get('box_version'):
i['box_version'] = platforms['box_version']
if platforms.get('box_url'):
i['box_url'] = platforms['box_url']
if provider.get('options', {}).get('memory'):
i['memory'] = provider['options']['memory']
if provider.get('options', {}).get('cpus'):
i['cpus'] = provider['options']['cpus']
if instance.get('ansible_groups'):
i['groups'] = instance['ansible_groups']
if instance.get('interfaces'):
i['interfaces'] = instance['interfaces']
if instance.get('raw_config_args'):
i['raw_config_args'] = instance['raw_config_args']
platforms_list.append(i)
self._v2['platforms'] = platforms_list
def _set_provisioner(self):
ansible = self._v1.get('ansible', collections.OrderedDict({}))
self._v2['provisioner']['name'] = 'ansible'
self._v2['provisioner']['env'] = collections.OrderedDict({})
if ansible.get('raw_env_vars'):
self._v2['provisioner']['env'] = self._v1['ansible'][
'raw_env_vars']
self._v2['provisioner']['options'] = collections.OrderedDict({})
self._v2['provisioner']['lint'] = collections.OrderedDict({})
self._v2['provisioner']['lint']['name'] = 'ansible-lint'
if ansible.get('extra_vars'):
self._v2['provisioner']['options']['extra-vars'] = ansible[
'extra_vars']
if ansible.get('verbose'):
self._v2['provisioner']['options']['verbose'] = ansible['verbose']
if ansible.get('become'):
self._v2['provisioner']['options']['become'] = ansible['become']
if ansible.get('tags'):
self._v2['provisioner']['options']['tags'] = ansible['tags']
def _set_verifier(self):
verifier = self._v1['verifier']
self._v2['verifier']['name'] = 'testinfra'
self._v2['verifier']['options'] = collections.OrderedDict({})
self._v2['verifier']['lint'] = collections.OrderedDict({})
self._v2['verifier']['lint']['name'] = 'flake8'
if verifier.get('options', {}).get('sudo'):
self._v2['verifier']['options']['sudo'] = verifier['options'][
'sudo']
def _get_dict_representer(self, dumper, data):
return dumper.represent_dict(data.items())
def _get_v1_config(self):
d = util.safe_load(open(self._molecule_file))
errors = schema_v1.validate(d)
self._check_errors(errors)
return d
def _get_config(self):
d = collections.OrderedDict({
'dependency': {
'name': 'galaxy',
},
'driver': {
'name': 'vagrant',
'provider': collections.OrderedDict({}),
},
'lint': {
'name': 'yamllint',
},
'provisioner': collections.OrderedDict({}),
'platforms': [],
'scenario': {
'name': 'default',
},
'verifier': collections.OrderedDict({}),
})
return d
def _check_errors(self, errors):
if errors:
msg = "Failed to validate.\n\n{}".format(errors)
util.sysexit_with_message(msg)
|
import logging
import posixpath
from absl import flags
from perfkitbenchmarker import vm_util
# Github URL for HPC tools used with flag --gce_hpc_tools
_HPC_URL = 'https://github.com/GoogleCloudPlatform/hpc-tools.git'
# Remote git checkout directory
_HPC_REMOTE_DIR = posixpath.join(vm_util.VM_TMP_DIR, 'hpc-tools')
# HPC tools tuning script
_HPC_SCRIPT = 'mpi-tuning.sh'
flags.DEFINE_string('gce_hpc_tools_tag', None,
'Github tag of hpc-tools to use. Default is latest.')
flags.DEFINE_list('gce_hpc_tools_tuning', [
'tcpmem', 'networklatency', 'limits', 'nosmt', 'nofirewalld', 'noselinux',
'nomitigation', 'reboot'
], 'List of HPC tunings. `bash mpi-tools.sh` for description.')
FLAGS = flags.FLAGS
def YumInstall(vm):
"""Applies the hpc-tools environment script.
Optionally reboots.
Args:
vm: Virtual machine to apply HPC tools on.
"""
tools_version = _CloneRepo(vm, FLAGS.gce_hpc_tools_tag)
vm.metadata.update({
'hpc_tools': True,
'hpc_tools_tag': FLAGS.gce_hpc_tools_tag or 'head',
'hpc_tools_version': tools_version,
'hpc_tools_tuning': ','.join(sorted(FLAGS.gce_hpc_tools_tuning)),
})
logging.info('Applying hpc-tools to %s', vm)
apply_command = f'cd {_HPC_REMOTE_DIR}; sudo bash {_HPC_SCRIPT}'
for tuning in sorted(FLAGS.gce_hpc_tools_tuning):
apply_command += f' --{tuning}'
if 'reboot' in FLAGS.gce_hpc_tools_tuning:
# Script will call reboot which makes a normal RemoteCommand fail.
vm.RemoteCommand(apply_command, ignore_failure=True)
vm.WaitForBootCompletion()
else:
vm.RemoteCommand(apply_command)
def _CloneRepo(vm, hpc_tools_tag):
"""Clones git repo, switches to tag, and returns current commit."""
vm.InstallPackages('git')
vm.RemoteCommand(f'rm -rf {_HPC_REMOTE_DIR}; '
f'git clone {_HPC_URL} {_HPC_REMOTE_DIR}')
if hpc_tools_tag:
vm.RemoteCommand(f'cd {_HPC_REMOTE_DIR}; git checkout {hpc_tools_tag}')
stdout, _ = vm.RemoteCommand(
f'cd {_HPC_REMOTE_DIR}; git log --pretty=format:"%h" -n 1')
return stdout.splitlines()[-1]
|
import pytest
from homeassistant.components.input_text import (
ATTR_MAX,
ATTR_MIN,
ATTR_MODE,
ATTR_VALUE,
CONF_INITIAL,
CONF_MAX_VALUE,
CONF_MIN_VALUE,
DOMAIN,
MODE_TEXT,
SERVICE_SET_VALUE,
)
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
SERVICE_RELOAD,
)
from homeassistant.core import Context, CoreState, State
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import entity_registry
from homeassistant.loader import bind_hass
from homeassistant.setup import async_setup_component
# pylint: disable=protected-access
from tests.async_mock import patch
from tests.common import mock_restore_cache
TEST_VAL_MIN = 2
TEST_VAL_MAX = 22
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
"id": "from_storage",
"name": "from storage",
"initial": "loaded from storage",
ATTR_MAX: TEST_VAL_MAX,
ATTR_MIN: TEST_VAL_MIN,
ATTR_MODE: MODE_TEXT,
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
@bind_hass
def set_value(hass, entity_id, value):
"""Set input_text to value.
This is a legacy helper method. Do not use it for new tests.
"""
hass.async_create_task(
hass.services.async_call(
DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: value}
)
)
async def test_config(hass):
"""Test config."""
invalid_configs = [
None,
{},
{"name with space": None},
{"test_1": {"min": 50, "max": 50}},
]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_set_value(hass):
"""Test set_value method."""
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_1": {"initial": "test", "min": 3, "max": 10}}}
)
entity_id = "input_text.test_1"
state = hass.states.get(entity_id)
assert str(state.state) == "test"
set_value(hass, entity_id, "testing")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert str(state.state) == "testing"
set_value(hass, entity_id, "testing too long")
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert str(state.state) == "testing"
async def test_mode(hass):
"""Test mode settings."""
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_default_text": {"initial": "test", "min": 3, "max": 10},
"test_explicit_text": {
"initial": "test",
"min": 3,
"max": 10,
"mode": "text",
},
"test_explicit_password": {
"initial": "test",
"min": 3,
"max": 10,
"mode": "password",
},
}
},
)
state = hass.states.get("input_text.test_default_text")
assert state
assert state.attributes["mode"] == "text"
state = hass.states.get("input_text.test_explicit_text")
assert state
assert state.attributes["mode"] == "text"
state = hass.states.get("input_text.test_explicit_password")
assert state
assert state.attributes["mode"] == "password"
async def test_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(State("input_text.b1", "test"), State("input_text.b2", "testing too long")),
)
hass.state = CoreState.starting
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {"b1": None, "b2": {"min": 0, "max": 10}}}
)
state = hass.states.get("input_text.b1")
assert state
assert str(state.state) == "test"
state = hass.states.get("input_text.b2")
assert state
assert str(state.state) == "unknown"
async def test_initial_state_overrules_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(State("input_text.b1", "testing"), State("input_text.b2", "testing too long")),
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"b1": {"initial": "test", "min": 0, "max": 10},
"b2": {"initial": "test", "min": 0, "max": 10},
}
},
)
state = hass.states.get("input_text.b1")
assert state
assert str(state.state) == "test"
state = hass.states.get("input_text.b2")
assert state
assert str(state.state) == "test"
async def test_no_initial_state_and_no_restore_state(hass):
"""Ensure that entity is create without initial and restore feature."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"b1": {"min": 0, "max": 100}}})
state = hass.states.get("input_text.b1")
assert state
assert str(state.state) == "unknown"
async def test_input_text_context(hass, hass_admin_user):
"""Test that input_text context works."""
assert await async_setup_component(
hass, "input_text", {"input_text": {"t1": {"initial": "bla"}}}
)
state = hass.states.get("input_text.t1")
assert state is not None
await hass.services.async_call(
"input_text",
"set_value",
{"entity_id": state.entity_id, "value": "new_value"},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("input_text.t1")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_config_none(hass):
"""Set up input_text without any config."""
await async_setup_component(hass, DOMAIN, {DOMAIN: {"b1": None}})
state = hass.states.get("input_text.b1")
assert state
assert str(state.state) == "unknown"
# with empty config we still should have the defaults
assert state.attributes[ATTR_MODE] == MODE_TEXT
assert state.attributes[ATTR_MAX] == CONF_MAX_VALUE
assert state.attributes[ATTR_MIN] == CONF_MIN_VALUE
async def test_reload(hass, hass_admin_user, hass_read_only_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {"test_1": {"initial": "test 1"}, "test_2": {"initial": "test 2"}}},
)
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_text.test_1")
state_2 = hass.states.get("input_text.test_2")
state_3 = hass.states.get("input_text.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is None
assert state_1.state == "test 1"
assert state_2.state == "test 2"
assert state_1.attributes[ATTR_MIN] == 0
assert state_2.attributes[ATTR_MAX] == 100
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: {
"test_2": {"initial": "test reloaded", ATTR_MIN: 12},
"test_3": {"initial": "test 3", ATTR_MAX: 21},
}
},
):
with pytest.raises(Unauthorized):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_read_only_user.id),
)
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_text.test_1")
state_2 = hass.states.get("input_text.test_2")
state_3 = hass.states.get("input_text.test_3")
assert state_1 is None
assert state_2 is not None
assert state_3 is not None
assert state_2.attributes[ATTR_MIN] == 12
assert state_3.attributes[ATTR_MAX] == 21
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == "loaded from storage"
assert state.attributes.get(ATTR_EDITABLE)
assert state.attributes[ATTR_MAX] == TEST_VAL_MAX
assert state.attributes[ATTR_MIN] == TEST_VAL_MIN
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"initial": "yaml initial value",
ATTR_MODE: MODE_TEXT,
ATTR_MAX: 33,
ATTR_MIN: 3,
ATTR_NAME: "yaml friendly name",
}
}
}
)
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == "loaded from storage"
assert state.attributes.get(ATTR_EDITABLE)
assert state.attributes[ATTR_MAX] == TEST_VAL_MAX
assert state.attributes[ATTR_MIN] == TEST_VAL_MIN
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert state.state == "yaml initial value"
assert not state.attributes[ATTR_EDITABLE]
assert state.attributes[ATTR_MAX] == 33
assert state.attributes[ATTR_MIN] == 3
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"initial": "yaml initial value",
ATTR_MODE: MODE_TEXT,
ATTR_MAX: 33,
ATTR_MIN: 3,
ATTR_NAME: "yaml friendly name",
}
}
}
)
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_update(hass, hass_ws_client, storage_setup):
"""Test updating min/max updates the state."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state.attributes[ATTR_FRIENDLY_NAME] == "from storage"
assert state.attributes[ATTR_MODE] == MODE_TEXT
assert state.state == "loaded from storage"
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
ATTR_NAME: "even newer name",
CONF_INITIAL: "newer option",
ATTR_MIN: 6,
ATTR_MODE: "password",
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.state == "loaded from storage"
assert state.attributes[ATTR_FRIENDLY_NAME] == "even newer name"
assert state.attributes[ATTR_MODE] == "password"
assert state.attributes[ATTR_MIN] == 6
assert state.attributes[ATTR_MAX] == TEST_VAL_MAX
async def test_ws_create(hass, hass_ws_client, storage_setup):
"""Test create WS."""
assert await storage_setup(items=[])
input_id = "new_input"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/create",
"name": "New Input",
"initial": "even newer option",
ATTR_MAX: 44,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.state == "even newer option"
assert state.attributes[ATTR_FRIENDLY_NAME] == "New Input"
assert state.attributes[ATTR_EDITABLE]
assert state.attributes[ATTR_MAX] == 44
assert state.attributes[ATTR_MIN] == 0
async def test_setup_no_config(hass, hass_admin_user):
"""Test component setup with no config."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(hass, DOMAIN, {})
with patch(
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start == len(hass.states.async_entity_ids())
|
import typing
import tensorflow as tf
from keras.engine import Layer
class DynamicPoolingLayer(Layer):
"""
Layer that computes dynamic pooling of one tensor.
:param psize1: pooling size of dimension 1
:param psize2: pooling size of dimension 2
:param kwargs: Standard layer keyword arguments.
Examples:
>>> import matchzoo as mz
>>> layer = mz.layers.DynamicPoolingLayer(3, 2)
>>> num_batch, left_len, right_len, num_dim = 5, 3, 2, 10
>>> layer.build([[num_batch, left_len, right_len, num_dim],
... [num_batch, left_len, right_len, 3]])
"""
def __init__(self,
psize1: int,
psize2: int,
**kwargs):
""":class:`DynamicPoolingLayer` constructor."""
super().__init__(**kwargs)
self._psize1 = psize1
self._psize2 = psize2
def build(self, input_shape: typing.List[int]):
"""
Build the layer.
:param input_shape: the shapes of the input tensors,
for DynamicPoolingLayer we need tow input tensors.
"""
super().build(input_shape)
input_shape_one = input_shape[0]
self._msize1 = input_shape_one[1]
self._msize2 = input_shape_one[2]
def call(self, inputs: list, **kwargs) -> typing.Any:
"""
The computation logic of DynamicPoolingLayer.
:param inputs: two input tensors.
"""
self._validate_dpool_size()
x, dpool_index = inputs
dpool_shape = tf.shape(dpool_index)
batch_index_one = tf.expand_dims(
tf.expand_dims(
tf.range(dpool_shape[0]), axis=-1),
axis=-1)
batch_index = tf.expand_dims(
tf.tile(batch_index_one, [1, self._msize1, self._msize2]),
axis=-1)
dpool_index_ex = tf.concat([batch_index, dpool_index], axis=3)
x_expand = tf.gather_nd(x, dpool_index_ex)
stride1 = self._msize1 // self._psize1
stride2 = self._msize2 // self._psize2
x_pool = tf.nn.max_pool(x_expand,
[1, stride1, stride2, 1],
[1, stride1, stride2, 1],
"VALID")
return x_pool
def compute_output_shape(self, input_shape: list) -> tuple:
"""
Calculate the layer output shape.
:param input_shape: the shapes of the input tensors,
for DynamicPoolingLayer we need tow input tensors.
"""
input_shape_one = input_shape[0]
return (None, self._psize1, self._psize2, input_shape_one[3])
def get_config(self) -> dict:
"""Get the config dict of DynamicPoolingLayer."""
config = {
'psize1': self._psize1,
'psize2': self._psize2
}
base_config = super(DynamicPoolingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _validate_dpool_size(self):
suggestion = self.get_size_suggestion(
self._msize1, self._msize2, self._psize1, self._psize2
)
if suggestion != (self._psize1, self._psize2):
raise ValueError(
"DynamicPooling Layer can not "
f"generate ({self._psize1} x {self._psize2}) output "
f"feature map, please use ({suggestion[0]} x {suggestion[1]})"
f" instead. `model.params['dpool_size'] = {suggestion}` "
)
@classmethod
def get_size_suggestion(
cls,
msize1: int,
msize2: int,
psize1: int,
psize2: int
) -> typing.Tuple[int, int]:
"""
Get `dpool_size` suggestion for a given shape.
Returns the nearest legal `dpool_size` for the given combination of
`(psize1, psize2)`.
:param msize1: size of the left text.
:param msize2: size of the right text.
:param psize1: base size of the pool.
:param psize2: base size of the pool.
:return:
"""
stride1 = msize1 // psize1
stride2 = msize2 // psize2
suggestion1 = msize1 // stride1
suggestion2 = msize2 // stride2
return (suggestion1, suggestion2)
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.sensor import DOMAIN
from homeassistant.components.sensor.device_trigger import ENTITY_TRIGGERS
from homeassistant.const import CONF_PLATFORM, PERCENTAGE, STATE_UNKNOWN
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.testing_config.custom_components.test.sensor import DEVICE_CLASSES
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": trigger["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
for trigger in ENTITY_TRIGGERS[device_class]
if device_class != "none"
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 12
assert triggers == expected_triggers
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor trigger."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["battery"].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"extra_fields": [
{
"description": {"suffix": PERCENTAGE},
"name": "above",
"optional": True,
"type": "float",
},
{
"description": {"suffix": PERCENTAGE},
"name": "below",
"optional": True,
"type": "float",
},
{"name": "for", "optional": True, "type": "positive_time_period_dict"},
]
}
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 1
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_get_trigger_capabilities_none(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor trigger."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
triggers = [
{
"platform": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": "sensor.beer",
"type": "is_battery_level",
},
{
"platform": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": platform.ENTITIES["none"].entity_id,
"type": "is_battery_level",
},
]
expected_capabilities = {}
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_if_fires_not_on_above_below(hass, calls, caplog):
"""Test for value triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "battery_level",
},
"action": {"service": "test.automation"},
}
]
},
)
assert "must contain at least one of below, above" in caplog.text
async def test_if_fires_on_state_above(hass, calls):
"""Test for value triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "battery_level",
"above": 10,
},
"action": {
"service": "test.automation",
"data_template": {
"some": "bat_low {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "bat_low device - {} - 9 - 11 - None".format(
sensor1.entity_id
)
async def test_if_fires_on_state_below(hass, calls):
"""Test for value triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "battery_level",
"below": 10,
},
"action": {
"service": "test.automation",
"data_template": {
"some": "bat_low {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "bat_low device - {} - 11 - 9 - None".format(
sensor1.entity_id
)
async def test_if_fires_on_state_between(hass, calls):
"""Test for value triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "battery_level",
"above": 10,
"below": 20,
},
"action": {
"service": "test.automation",
"data_template": {
"some": "bat_low {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "bat_low device - {} - 9 - 11 - None".format(
sensor1.entity_id
)
hass.states.async_set(sensor1.entity_id, 21)
await hass.async_block_till_done()
assert len(calls) == 1
hass.states.async_set(sensor1.entity_id, 19)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "bat_low device - {} - 21 - 19 - None".format(
sensor1.entity_id
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "battery_level",
"above": 10,
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert (
calls[0].data["some"]
== f"turn_off device - {sensor1.entity_id} - unknown - 11 - 0:00:05"
)
|
import unittest
import pydegensac
import numpy as np
class TestPydegensac(unittest.TestCase):
def test_find_homography(self):
src_pts = np.float32([ [0,0],[0,1],[1,1],[1,0] ]).reshape(-1,2)
dst_pts = np.float32([ [0,0],[0,-1],[-1,-1],[-1,0] ]).reshape(-1,2)
H, mask = pydegensac.findHomography(src_pts, dst_pts, 4, 1)
self.assertEqual(3, len(H))
self.assertEqual(4, len(mask))
|
import logging
import RFXtrx as rfxtrxmod
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import CONF_DEVICES, STATE_ON
from homeassistant.core import callback
from . import (
CONF_AUTOMATIC_ADD,
CONF_DATA_BITS,
CONF_SIGNAL_REPETITIONS,
DEFAULT_SIGNAL_REPETITIONS,
DOMAIN,
SIGNAL_EVENT,
RfxtrxCommandEntity,
get_device_id,
get_rfx_object,
)
from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST
DATA_SWITCH = f"{DOMAIN}_switch"
_LOGGER = logging.getLogger(__name__)
def supported(event):
"""Return whether an event supports switch."""
return (
isinstance(event.device, rfxtrxmod.LightingDevice)
and not event.device.known_to_be_dimmable
and not event.device.known_to_be_rollershutter
or isinstance(event.device, rfxtrxmod.RfyDevice)
)
async def async_setup_entry(
hass,
config_entry,
async_add_entities,
):
"""Set up config entry."""
discovery_info = config_entry.data
device_ids = set()
# Add switch from config file
entities = []
for packet_id, entity_info in discovery_info[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
if not supported(event):
continue
device_id = get_device_id(
event.device, data_bits=entity_info.get(CONF_DATA_BITS)
)
if device_id in device_ids:
continue
device_ids.add(device_id)
entity = RfxtrxSwitch(
event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS]
)
entities.append(entity)
async_add_entities(entities)
@callback
def switch_update(event, device_id):
"""Handle sensor updates from the RFXtrx gateway."""
if not supported(event):
return
if device_id in device_ids:
return
device_ids.add(device_id)
_LOGGER.info(
"Added switch (Device ID: %s Class: %s Sub: %s, Event: %s)",
event.device.id_string.lower(),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
entity = RfxtrxSwitch(
event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event
)
async_add_entities([entity])
# Subscribe to main RFXtrx events
if discovery_info[CONF_AUTOMATIC_ADD]:
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, switch_update)
class RfxtrxSwitch(RfxtrxCommandEntity, SwitchEntity):
"""Representation of a RFXtrx switch."""
async def async_added_to_hass(self):
"""Restore device state."""
await super().async_added_to_hass()
if self._event is None:
old_state = await self.async_get_last_state()
if old_state is not None:
self._state = old_state.state == STATE_ON
def _apply_event(self, event):
"""Apply command from rfxtrx."""
super()._apply_event(event)
if event.values["Command"] in COMMAND_ON_LIST:
self._state = True
elif event.values["Command"] in COMMAND_OFF_LIST:
self._state = False
@callback
def _handle_event(self, event, device_id):
"""Check if event applies to me and update."""
if device_id != self._device_id:
return
self._apply_event(event)
self.async_write_ha_state()
@property
def is_on(self):
"""Return true if device is on."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._async_send(self._device.send_on)
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._async_send(self._device.send_off)
self._state = False
self.async_write_ha_state()
|
import os
import os.path
import cherrypy
from cherrypy.lib import static
localDir = os.path.dirname(__file__)
absDir = os.path.join(os.getcwd(), localDir)
class FileDemo(object):
@cherrypy.expose
def index(self):
return """
<html><body>
<h2>Upload a file</h2>
<form action="upload" method="post" enctype="multipart/form-data">
filename: <input type="file" name="myFile" /><br />
<input type="submit" />
</form>
<h2>Download a file</h2>
<a href='download'>This one</a>
</body></html>
"""
@cherrypy.expose
def upload(self, myFile):
out = """<html>
<body>
myFile length: %s<br />
myFile filename: %s<br />
myFile mime-type: %s
</body>
</html>"""
# Although this just counts the file length, it demonstrates
# how to read large files in chunks instead of all at once.
# CherryPy reads the uploaded file into a temporary file;
# myFile.file.read reads from that.
size = 0
while True:
data = myFile.file.read(8192)
if not data:
break
size += len(data)
return out % (size, myFile.filename, myFile.content_type)
@cherrypy.expose
def download(self):
path = os.path.join(absDir, 'pdf_file.pdf')
return static.serve_file(path, 'application/x-download',
'attachment', os.path.basename(path))
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(FileDemo(), config=tutconf)
|
import os
import warnings
__licence__ = "BSD (3 clause)"
def get_github_url(app, view, path):
return (
f"https://github.com/{app.config.edit_on_github_project}/"
f"{view}/{app.config.edit_on_github_branch}/"
f"{app.config.edit_on_github_src_path}{path}"
)
def html_page_context(app, pagename, templatename, context, doctree):
if templatename != "page.html":
return
if not app.config.edit_on_github_project:
warnings.warn("edit_on_github_project not specified")
return
if not doctree:
warnings.warn("doctree is None")
return
path = os.path.relpath(doctree.get("source"), app.builder.srcdir)
show_url = get_github_url(app, "blob", path)
edit_url = get_github_url(app, "edit", path)
context["show_on_github_url"] = show_url
context["edit_on_github_url"] = edit_url
def setup(app):
app.add_config_value("edit_on_github_project", "", True)
app.add_config_value("edit_on_github_branch", "master", True)
app.add_config_value("edit_on_github_src_path", "", True) # 'eg' "docs/"
app.connect("html-page-context", html_page_context)
|
import logging
from .canopy_index import CanopyIndex
from .index import Index
from .core import Enumerator
logger = logging.getLogger(__name__)
class TfIdfIndex(Index):
def __init__(self):
self._index = CanopyIndex()
self._doc_to_id = Enumerator(start=1)
self._parseTerms = self._index.lexicon.parseTerms
def index(self, doc):
if doc not in self._doc_to_id:
i = self._doc_to_id[doc]
self._index.index_doc(i, doc)
def unindex(self, doc):
i = self._doc_to_id.pop(doc)
self._index.unindex_doc(i)
self.initSearch()
def initSearch(self):
self._index.initSearch()
def search(self, doc, threshold=0):
query_list = self._parseTerms(doc)
if query_list:
results = [center for score, center
in self._index.apply(query_list, threshold)]
else:
results = []
return results
|
import asyncio
import pytest
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.somfy import DOMAIN, config_flow
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.helpers import config_entry_oauth2_flow
from tests.async_mock import patch
from tests.common import MockConfigEntry
CLIENT_ID_VALUE = "1234"
CLIENT_SECRET_VALUE = "5678"
@pytest.fixture()
async def mock_impl(hass):
"""Mock implementation."""
await setup.async_setup_component(hass, "http", {})
impl = config_entry_oauth2_flow.LocalOAuth2Implementation(
hass,
DOMAIN,
CLIENT_ID_VALUE,
CLIENT_SECRET_VALUE,
"https://accounts.somfy.com/oauth/oauth/v2/auth",
"https://accounts.somfy.com/oauth/oauth/v2/token",
)
config_flow.SomfyFlowHandler.async_register_implementation(hass, impl)
return impl
async def test_abort_if_no_configuration(hass):
"""Check flow abort when no configuration."""
flow = config_flow.SomfyFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "missing_configuration"
async def test_abort_if_existing_entry(hass):
"""Check flow abort when an entry already exist."""
flow = config_flow.SomfyFlowHandler()
flow.hass = hass
MockConfigEntry(domain=DOMAIN).add_to_hass(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_full_flow(hass, aiohttp_client, aioclient_mock, current_request):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_CLIENT_ID: CLIENT_ID_VALUE,
CONF_CLIENT_SECRET: CLIENT_SECRET_VALUE,
},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
"https://accounts.somfy.com/oauth/oauth/v2/auth"
f"?response_type=code&client_id={CLIENT_ID_VALUE}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
"https://accounts.somfy.com/oauth/oauth/v2/token",
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch("homeassistant.components.somfy.api.ConfigEntrySomfyApi"):
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["data"]["auth_implementation"] == DOMAIN
result["data"]["token"].pop("expires_at")
assert result["data"]["token"] == {
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
}
assert DOMAIN in hass.config.components
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert entry.state == config_entries.ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
async def test_abort_if_authorization_timeout(hass, mock_impl):
"""Check Somfy authorization timeout."""
flow = config_flow.SomfyFlowHandler()
flow.hass = hass
with patch.object(
mock_impl, "async_generate_authorize_url", side_effect=asyncio.TimeoutError
):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_timeout"
|
from __future__ import division
import mock
import numpy as np
import unittest
import chainer
from chainer import backends
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.fpn import BboxHead
from chainercv.links.model.fpn import FasterRCNN
from chainercv.links.model.fpn import MaskHead
from chainercv.links.model.fpn import RPN
from chainercv.utils import assert_is_bbox
from chainercv.utils import assert_is_detection_link
from chainercv.utils import assert_is_instance_segmentation_link
def _random_array(xp, shape):
return xp.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
class DummyExtractor(chainer.Link):
scales = (1 / 2, 1 / 4, 1 / 8)
mean = _random_array(np, (3, 1, 1))
def forward(self, x):
n, _, h, w = x.shape
return [chainer.Variable(_random_array(
self.xp, (n, 16, int(h * scale), int(w * scale))))
for scale in self.scales]
class DummyFasterRCNN(FasterRCNN):
def __init__(self, n_fg_class, return_values, min_size, max_size):
extractor = DummyExtractor()
super(DummyFasterRCNN, self).__init__(
extractor=extractor,
rpn=RPN(extractor.scales),
bbox_head=BboxHead(n_fg_class + 1, extractor.scales),
mask_head=MaskHead(n_fg_class + 1, extractor.scales),
return_values=return_values,
min_size=min_size, max_size=max_size,
)
def dummy_roi_average_align_2d(
x, rois, roi_indices, outsize, spatial_scale, sampling_ratio=None):
if not isinstance(outsize, chainer.utils.collections_abc.Iterable):
outsize = outsize, outsize
xp = backends.cuda.get_array_module(x.array)
y = _random_array(xp, (len(rois), x.shape[1], outsize[0], outsize[1]))
return chainer.Variable(y)
@testing.parameterize(*testing.product_dict(
[
{'return_values': 'detection'},
{'return_values': 'instance_segmentation'},
{'return_values': 'rpn'}
],
[
{'n_fg_class': 1},
{'n_fg_class': 5},
{'n_fg_class': 20},
],
[
{
'in_sizes': [(480, 640), (320, 320)],
'min_size': 800, 'max_size': 1333,
'expected_shape': (800, 1088),
},
{
'in_sizes': [(200, 50), (400, 100)],
'min_size': 200, 'max_size': 320,
'expected_shape': (320, 96),
},
],
))
class TestFasterRCNN(unittest.TestCase):
def setUp(self):
if self.return_values == 'detection':
return_values = ['bboxes', 'labels', 'scores']
elif self.return_values == 'instance_segmentation':
return_values = ['masks', 'labels', 'scores']
elif self.return_values == 'rpn':
return_values = ['rois']
self.link = DummyFasterRCNN(n_fg_class=self.n_fg_class,
return_values=return_values,
min_size=self.min_size,
max_size=self.max_size)
def test_use_preset(self):
self.link.nms_thresh = 0
self.link.score_thresh = 0
self.link.use_preset('visualize')
self.assertEqual(self.link.nms_thresh, 0.5)
self.assertEqual(self.link.score_thresh, 0.7)
self.link.nms_thresh = 0
self.link.score_thresh = 0
self.link.use_preset('evaluate')
self.assertEqual(self.link.nms_thresh, 0.5)
self.assertEqual(self.link.score_thresh, 0.05)
with self.assertRaises(ValueError):
self.link.use_preset('unknown')
def _check_call(self):
x = _random_array(self.link.xp, (2, 3, 32, 32))
with chainer.using_config('train', False):
hs, rois, roi_indices = self.link(x)
self.assertEqual(len(hs), len(self.link.extractor.scales))
for l in range(len(self.link.extractor.scales)):
self.assertIsInstance(hs[l], chainer.Variable)
self.assertIsInstance(hs[l].data, self.link.xp.ndarray)
self.assertIsInstance(rois, self.link.xp.ndarray)
self.assertEqual(rois.shape[1:], (4,))
self.assertIsInstance(roi_indices, self.link.xp.ndarray)
self.assertEqual(roi_indices.shape[1:], ())
self.assertEqual(rois.shape[0], roi_indices.shape[0])
@attr.slow
def test_call_cpu(self):
with mock.patch('chainer.functions.roi_average_align_2d',
dummy_roi_average_align_2d):
self._check_call()
@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self._check_call()
def test_call_train_mode(self):
x = _random_array(self.link.xp, (2, 3, 32, 32))
with self.assertRaises(AssertionError):
with chainer.using_config('train', True):
self.link(x)
def _check_predict(self):
if self.return_values == 'detection':
assert_is_detection_link(self.link, self.n_fg_class)
elif self.return_values == 'instance_segmentation':
assert_is_instance_segmentation_link(self.link, self.n_fg_class)
elif self.return_values == 'rpn':
imgs = [
np.random.randint(
0, 256, size=(3, 480, 320)).astype(np.float32),
np.random.randint(
0, 256, size=(3, 480, 320)).astype(np.float32)]
result = self.link.predict(imgs)
assert len(result) == 1
assert len(result[0]) == len(imgs)
for i in range(len(result[0])):
roi = result[0][i]
assert_is_bbox(roi)
@attr.slow
def test_predict_cpu(self):
with mock.patch('chainer.functions.roi_average_align_2d',
dummy_roi_average_align_2d):
self._check_predict()
@attr.gpu
def test_predict_gpu(self):
self.link.to_gpu()
self._check_predict()
def test_prepare(self):
imgs = [_random_array(np, (3, s[0], s[1])) for s in self.in_sizes]
out, scales = self.link.prepare(imgs)
self.assertIsInstance(out, np.ndarray)
full_expected_shape = (len(self.in_sizes), 3,
self.expected_shape[0],
self.expected_shape[1])
self.assertEqual(out.shape, full_expected_shape)
testing.run_module(__name__, __file__)
|
import math
import pytest
from jinja2.exceptions import UndefinedError
from jinja2.nativetypes import NativeEnvironment
from jinja2.nativetypes import NativeTemplate
from jinja2.runtime import Undefined
@pytest.fixture
def env():
return NativeEnvironment()
def test_is_defined_native_return(env):
t = env.from_string("{{ missing is defined }}")
assert not t.render()
def test_undefined_native_return(env):
t = env.from_string("{{ missing }}")
assert isinstance(t.render(), Undefined)
def test_adding_undefined_native_return(env):
t = env.from_string("{{ 3 + missing }}")
with pytest.raises(UndefinedError):
t.render()
def test_cast_int(env):
t = env.from_string("{{ value|int }}")
result = t.render(value="3")
assert isinstance(result, int)
assert result == 3
def test_list_add(env):
t = env.from_string("{{ a + b }}")
result = t.render(a=["a", "b"], b=["c", "d"])
assert isinstance(result, list)
assert result == ["a", "b", "c", "d"]
def test_multi_expression_add(env):
t = env.from_string("{{ a }} + {{ b }}")
result = t.render(a=["a", "b"], b=["c", "d"])
assert not isinstance(result, list)
assert result == "['a', 'b'] + ['c', 'd']"
def test_loops(env):
t = env.from_string("{% for x in value %}{{ x }}{% endfor %}")
result = t.render(value=["a", "b", "c", "d"])
assert isinstance(result, str)
assert result == "abcd"
def test_loops_with_ints(env):
t = env.from_string("{% for x in value %}{{ x }}{% endfor %}")
result = t.render(value=[1, 2, 3, 4])
assert isinstance(result, int)
assert result == 1234
def test_loop_look_alike(env):
t = env.from_string("{% for x in value %}{{ x }}{% endfor %}")
result = t.render(value=[1])
assert isinstance(result, int)
assert result == 1
@pytest.mark.parametrize(
("source", "expect"),
(
("{{ value }}", True),
("{{ value }}", False),
("{{ 1 == 1 }}", True),
("{{ 2 + 2 == 5 }}", False),
("{{ None is none }}", True),
("{{ '' == None }}", False),
),
)
def test_booleans(env, source, expect):
t = env.from_string(source)
result = t.render(value=expect)
assert isinstance(result, bool)
assert result is expect
def test_variable_dunder(env):
t = env.from_string("{{ x.__class__ }}")
result = t.render(x=True)
assert isinstance(result, type)
def test_constant_dunder(env):
t = env.from_string("{{ true.__class__ }}")
result = t.render()
assert isinstance(result, type)
def test_constant_dunder_to_string(env):
t = env.from_string("{{ true.__class__|string }}")
result = t.render()
assert not isinstance(result, type)
assert result in {"<type 'bool'>", "<class 'bool'>"}
def test_string_literal_var(env):
t = env.from_string("[{{ 'all' }}]")
result = t.render()
assert isinstance(result, str)
assert result == "[all]"
def test_string_top_level(env):
t = env.from_string("'Jinja'")
result = t.render()
assert result == "Jinja"
def test_tuple_of_variable_strings(env):
t = env.from_string("'{{ a }}', 'data', '{{ b }}', b'{{ c }}'")
result = t.render(a=1, b=2, c="bytes")
assert isinstance(result, tuple)
assert result == ("1", "data", "2", b"bytes")
def test_concat_strings_with_quotes(env):
t = env.from_string("--host='{{ host }}' --user \"{{ user }}\"")
result = t.render(host="localhost", user="Jinja")
assert result == "--host='localhost' --user \"Jinja\""
def test_no_intermediate_eval(env):
t = env.from_string("0.000{{ a }}")
result = t.render(a=7)
assert isinstance(result, float)
# If intermediate eval happened, 0.000 would render 0.0, then 7
# would be appended, resulting in 0.07.
assert math.isclose(result, 0.0007)
def test_spontaneous_env():
t = NativeTemplate("{{ true }}")
assert isinstance(t.environment, NativeEnvironment)
|
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from . import DOMAIN
SUPPORT_FLAGS = 0
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo climate devices."""
async_add_entities(
[
DemoClimate(
unique_id="climate_1",
name="HeatPump",
target_temperature=68,
unit_of_measurement=TEMP_FAHRENHEIT,
preset=None,
current_temperature=77,
fan_mode=None,
target_humidity=None,
current_humidity=None,
swing_mode=None,
hvac_mode=HVAC_MODE_HEAT,
hvac_action=CURRENT_HVAC_HEAT,
aux=None,
target_temp_high=None,
target_temp_low=None,
hvac_modes=[HVAC_MODE_HEAT, HVAC_MODE_OFF],
),
DemoClimate(
unique_id="climate_2",
name="Hvac",
target_temperature=21,
unit_of_measurement=TEMP_CELSIUS,
preset=None,
current_temperature=22,
fan_mode="On High",
target_humidity=67,
current_humidity=54,
swing_mode="Off",
hvac_mode=HVAC_MODE_COOL,
hvac_action=CURRENT_HVAC_COOL,
aux=False,
target_temp_high=None,
target_temp_low=None,
hvac_modes=[mode for mode in HVAC_MODES if mode != HVAC_MODE_HEAT_COOL],
),
DemoClimate(
unique_id="climate_3",
name="Ecobee",
target_temperature=None,
unit_of_measurement=TEMP_CELSIUS,
preset="home",
preset_modes=["home", "eco"],
current_temperature=23,
fan_mode="Auto Low",
target_humidity=None,
current_humidity=None,
swing_mode="Auto",
hvac_mode=HVAC_MODE_HEAT_COOL,
hvac_action=None,
aux=None,
target_temp_high=24,
target_temp_low=21,
hvac_modes=[HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT],
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo climate devices config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoClimate(ClimateEntity):
"""Representation of a demo climate device."""
def __init__(
self,
unique_id,
name,
target_temperature,
unit_of_measurement,
preset,
current_temperature,
fan_mode,
target_humidity,
current_humidity,
swing_mode,
hvac_mode,
hvac_action,
aux,
target_temp_high,
target_temp_low,
hvac_modes,
preset_modes=None,
):
"""Initialize the climate device."""
self._unique_id = unique_id
self._name = name
self._support_flags = SUPPORT_FLAGS
if target_temperature is not None:
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE
if preset is not None:
self._support_flags = self._support_flags | SUPPORT_PRESET_MODE
if fan_mode is not None:
self._support_flags = self._support_flags | SUPPORT_FAN_MODE
if target_humidity is not None:
self._support_flags = self._support_flags | SUPPORT_TARGET_HUMIDITY
if swing_mode is not None:
self._support_flags = self._support_flags | SUPPORT_SWING_MODE
if aux is not None:
self._support_flags = self._support_flags | SUPPORT_AUX_HEAT
if HVAC_MODE_HEAT_COOL in hvac_modes or HVAC_MODE_AUTO in hvac_modes:
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE_RANGE
self._target_temperature = target_temperature
self._target_humidity = target_humidity
self._unit_of_measurement = unit_of_measurement
self._preset = preset
self._preset_modes = preset_modes
self._current_temperature = current_temperature
self._current_humidity = current_humidity
self._current_fan_mode = fan_mode
self._hvac_action = hvac_action
self._hvac_mode = hvac_mode
self._aux = aux
self._current_swing_mode = swing_mode
self._fan_modes = ["On Low", "On High", "Auto Low", "Auto High", "Off"]
self._hvac_modes = hvac_modes
self._swing_modes = ["Auto", "1", "2", "3", "Off"]
self._target_temperature_high = target_temp_high
self._target_temperature_low = target_temp_low
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the climate device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_high(self):
"""Return the highbound target temperature we try to reach."""
return self._target_temperature_high
@property
def target_temperature_low(self):
"""Return the lowbound target temperature we try to reach."""
return self._target_temperature_low
@property
def current_humidity(self):
"""Return the current humidity."""
return self._current_humidity
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return self._target_humidity
@property
def hvac_action(self):
"""Return current operation ie. heat, cool, idle."""
return self._hvac_action
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return self._hvac_modes
@property
def preset_mode(self):
"""Return preset mode."""
return self._preset
@property
def preset_modes(self):
"""Return preset modes."""
return self._preset_modes
@property
def is_aux_heat(self):
"""Return true if aux heat is on."""
return self._aux
@property
def fan_mode(self):
"""Return the fan setting."""
return self._current_fan_mode
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return self._fan_modes
@property
def swing_mode(self):
"""Return the swing setting."""
return self._current_swing_mode
@property
def swing_modes(self):
"""List of available swing modes."""
return self._swing_modes
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
if kwargs.get(ATTR_TEMPERATURE) is not None:
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
if (
kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None
and kwargs.get(ATTR_TARGET_TEMP_LOW) is not None
):
self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
self.async_write_ha_state()
async def async_set_humidity(self, humidity):
"""Set new humidity level."""
self._target_humidity = humidity
self.async_write_ha_state()
async def async_set_swing_mode(self, swing_mode):
"""Set new swing mode."""
self._current_swing_mode = swing_mode
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
self._current_fan_mode = fan_mode
self.async_write_ha_state()
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
self._hvac_mode = hvac_mode
self.async_write_ha_state()
async def async_set_preset_mode(self, preset_mode):
"""Update preset_mode on."""
self._preset = preset_mode
self.async_write_ha_state()
async def async_turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self._aux = True
self.async_write_ha_state()
async def async_turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self._aux = False
self.async_write_ha_state()
|
import argparse
import os
import subprocess
import sys
import shlex
import shutil
from gitless import core
from . import pprint
def get_branch(branch_name, repo):
return _get_ref("branch", branch_name, repo)
def get_tag(tag_name, repo):
return _get_ref("tag", tag_name, repo)
def _get_ref(ref_type, ref_name, repo):
ref_type_cap = ref_type.capitalize()
r = getattr(repo, "lookup_" + ref_type)(ref_name)
if not r:
if '/' not in ref_name:
raise ValueError(
'{0} "{1}" doesn\'t exist'.format(ref_type_cap, ref_name))
# It might be a remote ref
remote, remote_ref = ref_name.split('/', 1)
try:
remote_repo = repo.remotes[remote]
except KeyError:
raise ValueError(
'Remote "{0}" doesn\'t exist, and there is no local '
'{1} "{2}"'.format(remote, ref_type_cap, ref_name))
r = getattr(remote_repo, "lookup_" + ref_type)(remote_ref)
if not r:
raise ValueError('{0} "{1}" doesn\'t exist in remote "{2}"'.format(
ref_type_cap, remote_ref, remote))
return r
def get_branch_or_use_upstream(branch_name, arg, repo):
if not branch_name: # use upstream branch
current_b = repo.current_branch
upstream_b = current_b.upstream
if not upstream_b:
raise ValueError(
'No {0} branch specified and the current branch has no upstream '
'branch set'.format(arg))
ret = current_b.upstream
else:
ret = get_branch(branch_name, repo)
return ret
def page(fp, repo):
if not sys.stdout.isatty(): # we are being piped or redirected
if sys.platform != 'win32':
# Prevent Python from throwing exceptions on SIGPIPE
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
# memory-friendly way to output contents of file to stdout
with open(fp, 'r') as f:
shutil.copyfileobj(f, sys.stdout)
return
# On Windows, we need to call 'more' through cmd.exe (with 'cmd'). The /C is
# so that the command window gets closed after 'more' finishes
default_pager = 'less' if sys.platform != 'win32' else 'cmd /C more'
try:
pager = repo.config['core.pager']
except KeyError:
pager = '' # empty string will evaluate to False below
pager = pager or os.environ.get('PAGER', None) or default_pager
cmd = shlex.split(pager) # split into constituents
if os.path.basename(cmd[0]) == 'less':
cmd.extend(['-r', '-f']) # append arguments
cmd.append(fp) # add file name to page command
try:
ret = subprocess.call(cmd, stdin=sys.stdin, stdout=sys.stdout)
if ret != 0:
pprint.err('Call to pager {0} failed'.format(pager))
except OSError:
pprint.err('Couldn\'t launch pager {0}'.format(pager))
pprint.err_exp('change the value of git\'s core.pager setting')
class PathProcessor(argparse.Action):
def __init__(
self, option_strings, dest, repo=None, skip_dir_test=None,
skip_dir_cb=None, recursive=True, **kwargs):
self.repo = repo
self.skip_dir_test = skip_dir_test
self.skip_dir_cb = skip_dir_cb
self.recursive = recursive
super(PathProcessor, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, paths, option_string=None):
root = self.repo.root if self.repo else ''
repo_path = self.repo.path if self.repo else ''
# We add the sep so that we can use `startswith` to determine if a file
# is inside the .git folder
# `normpath` is important because libgit2 returns the repo_path with forward
# slashes on Windows
normalized_repo_path = os.path.normpath(repo_path) + os.path.sep
def process_paths():
for path in paths:
path = os.path.abspath(path)
# Treat symlinks as normal files, even if the link points to a
# directory. The directory could be outside of the repo, then things
# get weird... This is standard git behavior.
if self.recursive and os.path.isdir(path) and not os.path.islink(path):
for curr_dir, dirs, fps in os.walk(path, topdown=True):
if curr_dir.startswith(normalized_repo_path):
dirs[:] = []
continue
curr_dir_rel = os.path.relpath(curr_dir, root)
if (curr_dir_rel != "." and self.skip_dir_test and
self.skip_dir_test(curr_dir_rel)):
if self.skip_dir_cb:
self.skip_dir_cb(curr_dir_rel)
dirs[:] = []
continue
for fp in fps:
yield fp if curr_dir_rel == '.' else os.path.join(curr_dir_rel, fp)
else:
if not path.startswith(normalized_repo_path):
yield os.path.relpath(path, root)
setattr(namespace, self.dest, process_paths())
class CommitIdProcessor(argparse.Action):
def __init__(self, option_strings, dest, repo=None, **kwargs):
self.repo = repo
super(CommitIdProcessor, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, revs, option_string=None):
cids = (self.repo.revparse_single(rev).id for rev in revs)
setattr(namespace, self.dest, cids)
def oei_flags(subparsers, repo):
subparsers.add_argument(
'only', nargs='*',
help='use only files given (tracked modified or untracked)',
action=PathProcessor, repo=repo, metavar='file')
subparsers.add_argument(
'-e', '--exclude', nargs='+',
help='exclude files given (files must be tracked modified)',
action=PathProcessor, repo=repo, metavar='file')
subparsers.add_argument(
'-i', '--include', nargs='+',
help='include files given (files must be untracked)',
action=PathProcessor, repo=repo, metavar='file')
def oei_fs(args, repo):
"""Compute the final fileset per oei flags."""
only = frozenset(args.only if args.only else [])
exclude = frozenset(args.exclude if args.exclude else [])
include = frozenset(args.include if args.include else [])
curr_b = repo.current_branch
if not _oei_validate(only, exclude, include, curr_b):
raise ValueError('Invalid input')
if only:
ret = only
else:
# Tracked modified files
ret = frozenset(
f.fp for f in curr_b.status()
if f.type == core.GL_STATUS_TRACKED and f.modified) # using generator expression
# We get the files from status with forward slashes. On Windows, these
# won't match the paths provided by the user, which are normalized by
# PathProcessor
if sys.platform == 'win32':
ret = frozenset(p.replace('/', '\\') for p in ret)
ret -= exclude
ret |= include
ret = sorted(list(ret))
return ret
def _oei_validate(only, exclude, include, curr_b):
"""Validates user input per oei flags.
This function will print to stderr in case user-provided values are invalid
(and return False).
Returns:
True if the input is valid, False if otherwise.
"""
if only and (exclude or include):
pprint.err(
'You provided a list of filenames to be committed but also '
'provided a list of files to be excluded (-e) or included (-i)')
return False
err = []
def validate(fps, check_fn, msg):
''' fps: files
check_fn: lambda(file) -> boolean
msg: string-format of pre-defined constant string.
'''
ret = True
if not fps:
return ret
for fp in fps:
try:
f = curr_b.status_file(fp)
except KeyError:
err.append('File {0} doesn\'t exist'.format(fp))
ret = False # set error flag, but keep assessing other files
else: # executed after "try", exception will be ignored here
if not check_fn(f):
err.append(msg(fp)) # dynamic string formatting
ret = False
return ret
only_valid = validate(
only, lambda f: f.type == core.GL_STATUS_UNTRACKED or (
f.type == core.GL_STATUS_TRACKED and f.modified),
'File {0} is not a tracked modified or untracked file'.format)
exclude_valid = validate(
exclude, lambda f: f.type == core.GL_STATUS_TRACKED and f.modified,
'File {0} is not a tracked modified file'.format)
include_valid = validate(
include, lambda f: f.type == core.GL_STATUS_UNTRACKED,
'File {0} is not an untracked file'.format)
if only_valid and exclude_valid and include_valid:
return True
for e in err:
pprint.err(e)
return False
"""Aliases for argparse positional arguments."""
class AliasedSubParsersAction(argparse._SubParsersAction):
class _AliasedPseudoAction(argparse.Action):
def __init__(self, name, aliases, help):
dest = name
if aliases:
dest += ' (%s)' % ','.join(aliases)
sup = super(AliasedSubParsersAction._AliasedPseudoAction, self)
sup.__init__(option_strings=[], dest=dest, help=help)
def add_parser(self, name, **kwargs):
if 'aliases' in kwargs:
aliases = kwargs['aliases']
del kwargs['aliases']
else:
aliases = []
parser = super(AliasedSubParsersAction, self).add_parser(name, **kwargs)
# Make the aliases work.
for alias in aliases:
self._name_parser_map[alias] = parser
# Make the help text reflect them, first removing old help entry.
if 'help' in kwargs:
help = kwargs.pop('help')
self._choices_actions.pop()
pseudo_action = self._AliasedPseudoAction(name, aliases, help)
self._choices_actions.append(pseudo_action)
return parser
|
import logging
from numato_gpio import NumatoGpioError
from homeassistant.const import CONF_ID, CONF_NAME, CONF_SENSORS
from homeassistant.helpers.entity import Entity
from . import (
CONF_DEVICES,
CONF_DST_RANGE,
CONF_DST_UNIT,
CONF_PORTS,
CONF_SRC_RANGE,
DATA_API,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:gauge"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the configured Numato USB GPIO ADC sensor ports."""
if discovery_info is None:
return
api = hass.data[DOMAIN][DATA_API]
sensors = []
devices = hass.data[DOMAIN][CONF_DEVICES]
for device in [d for d in devices if CONF_SENSORS in d]:
device_id = device[CONF_ID]
ports = device[CONF_SENSORS][CONF_PORTS]
for port, adc_def in ports.items():
try:
api.setup_input(device_id, port)
except NumatoGpioError as err:
_LOGGER.error(
"Failed to initialize sensor '%s' on Numato device %s port %s: %s",
adc_def[CONF_NAME],
device_id,
port,
err,
)
continue
sensors.append(
NumatoGpioAdc(
adc_def[CONF_NAME],
device_id,
port,
adc_def[CONF_SRC_RANGE],
adc_def[CONF_DST_RANGE],
adc_def[CONF_DST_UNIT],
api,
)
)
add_entities(sensors, True)
class NumatoGpioAdc(Entity):
"""Represents an ADC port of a Numato USB GPIO expander."""
def __init__(self, name, device_id, port, src_range, dst_range, dst_unit, api):
"""Initialize the sensor."""
self._name = name
self._device_id = device_id
self._port = port
self._src_range = src_range
self._dst_range = dst_range
self._state = None
self._unit_of_measurement = dst_unit
self._api = api
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the state."""
try:
adc_val = self._api.read_adc_input(self._device_id, self._port)
adc_val = self._clamp_to_source_range(adc_val)
self._state = self._linear_scale_to_dest_range(adc_val)
except NumatoGpioError as err:
self._state = None
_LOGGER.error(
"Failed to update Numato device %s ADC-port %s: %s",
self._device_id,
self._port,
err,
)
def _clamp_to_source_range(self, val):
# clamp to source range
val = max(val, self._src_range[0])
val = min(val, self._src_range[1])
return val
def _linear_scale_to_dest_range(self, val):
# linear scale to dest range
src_len = self._src_range[1] - self._src_range[0]
adc_val_rel = val - self._src_range[0]
ratio = float(adc_val_rel) / float(src_len)
dst_len = self._dst_range[1] - self._dst_range[0]
dest_val = self._dst_range[0] + ratio * dst_len
return dest_val
|
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from . import AVAILABLE_SENSORS, DATA_ECOAL_BOILER
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ecoal sensors."""
if discovery_info is None:
return
devices = []
ecoal_contr = hass.data[DATA_ECOAL_BOILER]
for sensor_id in discovery_info:
name = AVAILABLE_SENSORS[sensor_id]
devices.append(EcoalTempSensor(ecoal_contr, name, sensor_id))
add_entities(devices, True)
class EcoalTempSensor(Entity):
"""Representation of a temperature sensor using ecoal status data."""
def __init__(self, ecoal_contr, name, status_attr):
"""Initialize the sensor."""
self._ecoal_contr = ecoal_contr
self._name = name
self._status_attr = status_attr
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
def update(self):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
# Old values read 0.5 back can still be used
status = self._ecoal_contr.get_cached_status()
self._state = getattr(status, self._status_attr)
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.remote import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a remote."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert triggers == expected_triggers
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a remote trigger."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_if_fires_on_state_change(hass, calls):
"""Test for turn_on and turn_off triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_on",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "turn_off device - {} - on - off - None".format(
ent1.entity_id
)
hass.states.async_set(ent1.entity_id, STATE_ON)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "turn_on device - {} - off - on - None".format(
ent1.entity_id
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert calls[0].data["some"] == "turn_off device - {} - on - off - 0:00:05".format(
ent1.entity_id
)
|
import datetime
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import HTTP_BAD_REQUEST, HTTP_INTERNAL_SERVER_ERROR
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, discovery
from homeassistant.util.json import load_json, save_json
from .const import (
CONF_ACTION_BACKGROUND_COLOR,
CONF_ACTION_ICON,
CONF_ACTION_ICON_COLOR,
CONF_ACTION_ICON_ICON,
CONF_ACTION_LABEL,
CONF_ACTION_LABEL_COLOR,
CONF_ACTION_LABEL_TEXT,
CONF_ACTION_NAME,
CONF_ACTIONS,
DOMAIN,
)
CONF_PUSH = "push"
CONF_PUSH_CATEGORIES = "categories"
CONF_PUSH_CATEGORIES_NAME = "name"
CONF_PUSH_CATEGORIES_IDENTIFIER = "identifier"
CONF_PUSH_CATEGORIES_ACTIONS = "actions"
CONF_PUSH_ACTIONS_IDENTIFIER = "identifier"
CONF_PUSH_ACTIONS_TITLE = "title"
CONF_PUSH_ACTIONS_ACTIVATION_MODE = "activationMode"
CONF_PUSH_ACTIONS_AUTHENTICATION_REQUIRED = "authenticationRequired"
CONF_PUSH_ACTIONS_DESTRUCTIVE = "destructive"
CONF_PUSH_ACTIONS_BEHAVIOR = "behavior"
CONF_PUSH_ACTIONS_CONTEXT = "context"
CONF_PUSH_ACTIONS_TEXT_INPUT_BUTTON_TITLE = "textInputButtonTitle"
CONF_PUSH_ACTIONS_TEXT_INPUT_PLACEHOLDER = "textInputPlaceholder"
CONF_USER = "user"
ATTR_FOREGROUND = "foreground"
ATTR_BACKGROUND = "background"
ACTIVATION_MODES = [ATTR_FOREGROUND, ATTR_BACKGROUND]
ATTR_DEFAULT_BEHAVIOR = "default"
ATTR_TEXT_INPUT_BEHAVIOR = "textInput"
BEHAVIORS = [ATTR_DEFAULT_BEHAVIOR, ATTR_TEXT_INPUT_BEHAVIOR]
ATTR_LAST_SEEN_AT = "lastSeenAt"
ATTR_DEVICE = "device"
ATTR_PUSH_TOKEN = "pushToken"
ATTR_APP = "app"
ATTR_PERMISSIONS = "permissions"
ATTR_PUSH_ID = "pushId"
ATTR_DEVICE_ID = "deviceId"
ATTR_PUSH_SOUNDS = "pushSounds"
ATTR_BATTERY = "battery"
ATTR_DEVICE_NAME = "name"
ATTR_DEVICE_LOCALIZED_MODEL = "localizedModel"
ATTR_DEVICE_MODEL = "model"
ATTR_DEVICE_PERMANENT_ID = "permanentID"
ATTR_DEVICE_SYSTEM_VERSION = "systemVersion"
ATTR_DEVICE_TYPE = "type"
ATTR_DEVICE_SYSTEM_NAME = "systemName"
ATTR_APP_BUNDLE_IDENTIFIER = "bundleIdentifier"
ATTR_APP_BUILD_NUMBER = "buildNumber"
ATTR_APP_VERSION_NUMBER = "versionNumber"
ATTR_LOCATION_PERMISSION = "location"
ATTR_NOTIFICATIONS_PERMISSION = "notifications"
PERMISSIONS = [ATTR_LOCATION_PERMISSION, ATTR_NOTIFICATIONS_PERMISSION]
ATTR_BATTERY_STATE = "state"
ATTR_BATTERY_LEVEL = "level"
ATTR_BATTERY_STATE_UNPLUGGED = "Not Charging"
ATTR_BATTERY_STATE_CHARGING = "Charging"
ATTR_BATTERY_STATE_FULL = "Full"
ATTR_BATTERY_STATE_UNKNOWN = "Unknown"
BATTERY_STATES = [
ATTR_BATTERY_STATE_UNPLUGGED,
ATTR_BATTERY_STATE_CHARGING,
ATTR_BATTERY_STATE_FULL,
ATTR_BATTERY_STATE_UNKNOWN,
]
ATTR_DEVICES = "devices"
PUSH_ACTION_SCHEMA = vol.Schema(
{
vol.Required(CONF_PUSH_ACTIONS_IDENTIFIER): vol.Upper,
vol.Required(CONF_PUSH_ACTIONS_TITLE): cv.string,
vol.Optional(
CONF_PUSH_ACTIONS_ACTIVATION_MODE, default=ATTR_BACKGROUND
): vol.In(ACTIVATION_MODES),
vol.Optional(
CONF_PUSH_ACTIONS_AUTHENTICATION_REQUIRED, default=False
): cv.boolean,
vol.Optional(CONF_PUSH_ACTIONS_DESTRUCTIVE, default=False): cv.boolean,
vol.Optional(CONF_PUSH_ACTIONS_BEHAVIOR, default=ATTR_DEFAULT_BEHAVIOR): vol.In(
BEHAVIORS
),
vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_BUTTON_TITLE): cv.string,
vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_PLACEHOLDER): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
PUSH_ACTION_LIST_SCHEMA = vol.All(cv.ensure_list, [PUSH_ACTION_SCHEMA])
PUSH_CATEGORY_SCHEMA = vol.Schema(
{
vol.Required(CONF_PUSH_CATEGORIES_NAME): cv.string,
vol.Required(CONF_PUSH_CATEGORIES_IDENTIFIER): vol.Lower,
vol.Required(CONF_PUSH_CATEGORIES_ACTIONS): PUSH_ACTION_LIST_SCHEMA,
}
)
PUSH_CATEGORY_LIST_SCHEMA = vol.All(cv.ensure_list, [PUSH_CATEGORY_SCHEMA])
ACTION_SCHEMA = vol.Schema(
{
vol.Required(CONF_ACTION_NAME): cv.string,
vol.Optional(CONF_ACTION_BACKGROUND_COLOR): cv.string,
vol.Optional(CONF_ACTION_LABEL): {
vol.Optional(CONF_ACTION_LABEL_TEXT): cv.string,
vol.Optional(CONF_ACTION_LABEL_COLOR): cv.string,
},
vol.Optional(CONF_ACTION_ICON): {
vol.Optional(CONF_ACTION_ICON_ICON): cv.string,
vol.Optional(CONF_ACTION_ICON_COLOR): cv.string,
},
},
)
ACTION_LIST_SCHEMA = vol.All(cv.ensure_list, [ACTION_SCHEMA])
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
CONF_PUSH: {CONF_PUSH_CATEGORIES: PUSH_CATEGORY_LIST_SCHEMA},
CONF_ACTIONS: ACTION_LIST_SCHEMA,
}
},
extra=vol.ALLOW_EXTRA,
)
IDENTIFY_DEVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_NAME): cv.string,
vol.Required(ATTR_DEVICE_LOCALIZED_MODEL): cv.string,
vol.Required(ATTR_DEVICE_MODEL): cv.string,
vol.Required(ATTR_DEVICE_PERMANENT_ID): cv.string,
vol.Required(ATTR_DEVICE_SYSTEM_VERSION): cv.string,
vol.Required(ATTR_DEVICE_TYPE): cv.string,
vol.Required(ATTR_DEVICE_SYSTEM_NAME): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
IDENTIFY_DEVICE_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_DEVICE_SCHEMA)
IDENTIFY_APP_SCHEMA = vol.Schema(
{
vol.Required(ATTR_APP_BUNDLE_IDENTIFIER): cv.string,
vol.Required(ATTR_APP_BUILD_NUMBER): cv.positive_int,
vol.Optional(ATTR_APP_VERSION_NUMBER): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
IDENTIFY_APP_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_APP_SCHEMA)
IDENTIFY_BATTERY_SCHEMA = vol.Schema(
{
vol.Required(ATTR_BATTERY_LEVEL): cv.positive_int,
vol.Required(ATTR_BATTERY_STATE): vol.In(BATTERY_STATES),
},
extra=vol.ALLOW_EXTRA,
)
IDENTIFY_BATTERY_SCHEMA_CONTAINER = vol.All(dict, IDENTIFY_BATTERY_SCHEMA)
IDENTIFY_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE): IDENTIFY_DEVICE_SCHEMA_CONTAINER,
vol.Required(ATTR_BATTERY): IDENTIFY_BATTERY_SCHEMA_CONTAINER,
vol.Required(ATTR_PUSH_TOKEN): cv.string,
vol.Required(ATTR_APP): IDENTIFY_APP_SCHEMA_CONTAINER,
vol.Required(ATTR_PERMISSIONS): vol.All(cv.ensure_list, [vol.In(PERMISSIONS)]),
vol.Required(ATTR_PUSH_ID): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.string,
vol.Optional(ATTR_PUSH_SOUNDS): list,
},
extra=vol.ALLOW_EXTRA,
)
CONFIGURATION_FILE = ".ios.conf"
def devices_with_push(hass):
"""Return a dictionary of push enabled targets."""
targets = {}
for device_name, device in hass.data[DOMAIN][ATTR_DEVICES].items():
if device.get(ATTR_PUSH_ID) is not None:
targets[device_name] = device.get(ATTR_PUSH_ID)
return targets
def enabled_push_ids(hass):
"""Return a list of push enabled target push IDs."""
push_ids = []
for device in hass.data[DOMAIN][ATTR_DEVICES].values():
if device.get(ATTR_PUSH_ID) is not None:
push_ids.append(device.get(ATTR_PUSH_ID))
return push_ids
def devices(hass):
"""Return a dictionary of all identified devices."""
return hass.data[DOMAIN][ATTR_DEVICES]
def device_name_for_push_id(hass, push_id):
"""Return the device name for the push ID."""
for device_name, device in hass.data[DOMAIN][ATTR_DEVICES].items():
if device.get(ATTR_PUSH_ID) is push_id:
return device_name
return None
async def async_setup(hass, config):
"""Set up the iOS component."""
conf = config.get(DOMAIN)
ios_config = await hass.async_add_executor_job(
load_json, hass.config.path(CONFIGURATION_FILE)
)
if ios_config == {}:
ios_config[ATTR_DEVICES] = {}
ios_config[CONF_USER] = conf or {}
if CONF_PUSH not in ios_config[CONF_USER]:
ios_config[CONF_USER][CONF_PUSH] = {}
hass.data[DOMAIN] = ios_config
# No entry support for notify component yet
discovery.load_platform(hass, "notify", DOMAIN, {}, config)
if conf is not None:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up an iOS entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "sensor")
)
hass.http.register_view(iOSIdentifyDeviceView(hass.config.path(CONFIGURATION_FILE)))
hass.http.register_view(iOSPushConfigView(hass.data[DOMAIN][CONF_USER][CONF_PUSH]))
hass.http.register_view(iOSConfigView(hass.data[DOMAIN][CONF_USER]))
return True
# pylint: disable=invalid-name
class iOSPushConfigView(HomeAssistantView):
"""A view that provides the push categories configuration."""
url = "/api/ios/push"
name = "api:ios:push"
def __init__(self, push_config):
"""Init the view."""
self.push_config = push_config
@callback
def get(self, request):
"""Handle the GET request for the push configuration."""
return self.json(self.push_config)
class iOSConfigView(HomeAssistantView):
"""A view that provides the whole user-defined configuration."""
url = "/api/ios/config"
name = "api:ios:config"
def __init__(self, config):
"""Init the view."""
self.config = config
@callback
def get(self, request):
"""Handle the GET request for the user-defined configuration."""
return self.json(self.config)
class iOSIdentifyDeviceView(HomeAssistantView):
"""A view that accepts device identification requests."""
url = "/api/ios/identify"
name = "api:ios:identify"
def __init__(self, config_path):
"""Initialize the view."""
self._config_path = config_path
async def post(self, request):
"""Handle the POST request for device identification."""
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
hass = request.app["hass"]
# Commented for now while iOS app is getting frequent updates
# try:
# data = IDENTIFY_SCHEMA(req_data)
# except vol.Invalid as ex:
# return self.json_message(
# vol.humanize.humanize_error(request.json, ex),
# HTTP_BAD_REQUEST)
data[ATTR_LAST_SEEN_AT] = datetime.datetime.now().isoformat()
name = data.get(ATTR_DEVICE_ID)
hass.data[DOMAIN][ATTR_DEVICES][name] = data
try:
save_json(self._config_path, hass.data[DOMAIN])
except HomeAssistantError:
return self.json_message("Error saving device.", HTTP_INTERNAL_SERVER_ERROR)
return self.json({"status": "registered"})
|
from collections import namedtuple
import json
import threading
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import network
from perfkitbenchmarker import providers
from perfkitbenchmarker.providers.openstack import utils
OSC_FLOATING_IP_CMD = 'floating ip'
OSC_SEC_GROUP_CMD = 'security group'
OSC_SEC_GROUP_RULE_CMD = 'security group rule'
SC_GROUP_NAME = 'perfkit_sc_group'
ICMP = 'icmp'
TCP = 'tcp'
UDP = 'udp'
FLOATING_IP_ADDRESS = 'floating_ip_address'
FLOATING_IP_ID = 'id'
FLOATING_NETWORK_ID = 'floating_network_id'
FLOATING_IP_KEYS = (FLOATING_IP_ADDRESS, FLOATING_IP_ID, FLOATING_NETWORK_ID,)
FLAGS = flags.FLAGS
MAX_PORT = 65535
MIN_PORT = 1
OpenStackFloatingIP = namedtuple('OpenStackFloatingIP', FLOATING_IP_KEYS)
class OpenStackFirewall(network.BaseFirewall):
"""
An object representing OpenStack Firewall based on Secure Groups.
"""
CLOUD = providers.OPENSTACK
def __init__(self):
self._lock = threading.Lock() # Guards security-group rule set
self.sec_group_rules_set = set()
with self._lock:
cmd = utils.OpenStackCLICommand(self, OSC_SEC_GROUP_CMD, 'show',
SC_GROUP_NAME)
stdout, stderr, _ = cmd.Issue(suppress_warning=True)
if stderr:
cmd = utils.OpenStackCLICommand(self, OSC_SEC_GROUP_CMD, 'create',
SC_GROUP_NAME)
del cmd.flags['format'] # Command does not support json output
cmd.Issue()
def AllowICMP(self, vm, icmp_type=-1, icmp_code=-1, source_range=None):
"""Creates a Security Group Rule on the Firewall to allow/disallow
ICMP traffic.
Args:
vm: The BaseVirtualMachine object to allow ICMP traffic to.
icmp_type: ICMP type to allow. If none given then allows all types.
icmp_code: ICMP code to allow. If none given then allows all codes.
source_range: The source IP range to allow ICMP traffic.
"""
if vm.is_static:
return
sec_group_rule = (ICMP, icmp_type, icmp_code, vm.group_id)
with self._lock:
if sec_group_rule in self.sec_group_rules_set:
return
cmd = utils.OpenStackCLICommand(vm, OSC_SEC_GROUP_RULE_CMD, 'create',
vm.group_id)
if source_range:
cmd.flags['src-ip'] = source_range
cmd.flags['dst-port'] = str(icmp_type)
cmd.flags['proto'] = ICMP
cmd.Issue(suppress_warning=True)
self.sec_group_rules_set.add(sec_group_rule)
def AllowPort(self, vm, start_port, end_port=None, source_range=None):
"""Creates a Security Group Rule on the Firewall to allow for both TCP
and UDP network traffic on given port, or port range.
Args:
vm: The BaseVirtualMachine object to open the port for.
start_port: The first local port to open in a range.
end_port: The last local port to open in a range. If None, only start_port
will be opened.
source_range: The source IP range to allow traffic for these ports.
"""
if vm.is_static:
return
if end_port is None:
end_port = start_port
sec_group_rule = (start_port, end_port, vm.group_id)
with self._lock:
if sec_group_rule in self.sec_group_rules_set:
return
cmd = utils.OpenStackCLICommand(vm, OSC_SEC_GROUP_RULE_CMD, 'create',
vm.group_id)
if source_range:
cmd.flags['src-ip'] = source_range
cmd.flags['dst-port'] = '%d:%d' % (start_port, end_port)
for prot in (TCP, UDP,):
cmd.flags['proto'] = prot
cmd.Issue(suppress_warning=True)
self.sec_group_rules_set.add(sec_group_rule)
def DisallowAllPorts(self):
"""Closes all ports on the firewall."""
pass
class OpenStackFloatingIPPool(object):
_floating_ip_lock = threading.Lock() # Guards floating IP allocation/release
def __init__(self, floating_network_id):
self.floating_network_id = floating_network_id
def associate(self, vm):
with self._floating_ip_lock:
floating_ip_obj = self._get_or_create(vm)
cmd = utils.OpenStackCLICommand(vm, 'server add floating ip', vm.id,
floating_ip_obj.floating_ip_address)
del cmd.flags['format'] # Command does not support json output format
_, stderr, _ = cmd.Issue()
if stderr:
raise errors.Error(stderr)
return floating_ip_obj
def _get_or_create(self, vm):
list_cmd = utils.OpenStackCLICommand(vm, OSC_FLOATING_IP_CMD, 'list')
stdout, stderr, _ = list_cmd.Issue()
if stderr:
raise errors.Error(stderr)
floating_ip_dict_list = json.loads(stdout)
for floating_ip_dict in floating_ip_dict_list:
if (floating_ip_dict['Floating Network'] == self.floating_network_id
and floating_ip_dict['Port'] is None):
# Due to inconsistent output, we need to convert the keys
floating_ip_obj = OpenStackFloatingIP(
floating_ip_address=floating_ip_dict['Floating IP Address'],
floating_network_id=floating_ip_dict['Floating Network'],
id=floating_ip_dict['ID']
)
return floating_ip_obj
return self._allocate(vm)
def _allocate(self, vm):
cmd = utils.OpenStackCLICommand(vm, OSC_FLOATING_IP_CMD, 'create',
self.floating_network_id)
stdout, stderr, _ = cmd.Issue()
if stderr.strip(): # Strip spaces
raise errors.Config.InvalidValue(
'Could not allocate a floating ip from the floating network "%s".'
% self.floating_network_id)
floating_ip_dict = json.loads(stdout)
# Extract subset of returned keys
floating_ip_obj = OpenStackFloatingIP(
floating_ip_address=floating_ip_dict['floating_ip_address'],
floating_network_id=floating_ip_dict['floating_network_id'],
id=floating_ip_dict['id']
)
return floating_ip_obj
def release(self, vm, floating_ip_obj):
cmd = utils.OpenStackCLICommand(vm, OSC_FLOATING_IP_CMD, 'show',
floating_ip_obj.id)
stdout, stderr, _ = cmd.Issue(suppress_warning=True)
if stderr:
return # Not found, moving on
floating_ip_dict = json.loads(stdout)
with self._floating_ip_lock:
delete_cmd = utils.OpenStackCLICommand(vm, OSC_FLOATING_IP_CMD, 'delete',
floating_ip_dict['id'])
del delete_cmd.flags['format'] # Command not support json output format
stdout, stderr, _ = delete_cmd.Issue(suppress_warning=True)
|
from datetime import timedelta
import logging
from haphilipsjs import PhilipsTV
import voluptuous as vol
from homeassistant.components.media_player import (
PLATFORM_SCHEMA,
BrowseMedia,
MediaPlayerEntity,
)
from homeassistant.components.media_player.const import (
MEDIA_CLASS_CHANNEL,
MEDIA_CLASS_DIRECTORY,
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_CHANNELS,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import call_later, track_time_interval
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
| SUPPORT_BROWSE_MEDIA
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_NAME = "Philips TV"
DEFAULT_API_VERSION = "1"
DEFAULT_SCAN_INTERVAL = 30
DELAY_ACTION_DEFAULT = 2.0
DELAY_ACTION_ON = 10.0
PREFIX_SEPARATOR = ": "
PREFIX_SOURCE = "Input"
PREFIX_CHANNEL = "Channel"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
)
def _inverted(data):
return {v: k for k, v in data.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Philips TV platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
api_version = config.get(CONF_API_VERSION)
turn_on_action = config.get(CONF_ON_ACTION)
tvapi = PhilipsTV(host, api_version)
domain = __name__.split(".")[-2]
on_script = Script(hass, turn_on_action, name, domain) if turn_on_action else None
add_entities([PhilipsTVMediaPlayer(tvapi, name, on_script)])
class PhilipsTVMediaPlayer(MediaPlayerEntity):
"""Representation of a Philips TV exposing the JointSpace API."""
def __init__(self, tv: PhilipsTV, name: str, on_script: Script):
"""Initialize the Philips TV."""
self._tv = tv
self._name = name
self._sources = {}
self._channels = {}
self._on_script = on_script
self._supports = SUPPORT_PHILIPS_JS
if self._on_script:
self._supports |= SUPPORT_TURN_ON
self._update_task = None
def _update_soon(self, delay):
"""Reschedule update task."""
if self._update_task:
self._update_task()
self._update_task = None
self.schedule_update_ha_state(force_refresh=False)
def update_forced(event_time):
self.schedule_update_ha_state(force_refresh=True)
def update_and_restart(event_time):
update_forced(event_time)
self._update_task = track_time_interval(
self.hass, update_forced, timedelta(seconds=DEFAULT_SCAN_INTERVAL)
)
call_later(self.hass, delay, update_and_restart)
async def async_added_to_hass(self):
"""Start running updates once we are added to hass."""
await self.hass.async_add_executor_job(self._update_soon, 0)
@property
def name(self):
"""Return the device name."""
return self._name
@property
def should_poll(self):
"""Device should be polled."""
return False
@property
def supported_features(self):
"""Flag media player features that are supported."""
return self._supports
@property
def state(self):
"""Get the device state. An exception means OFF state."""
if self._tv.on:
return STATE_ON
return STATE_OFF
@property
def source(self):
"""Return the current input source."""
return self._sources.get(self._tv.source_id)
@property
def source_list(self):
"""List of available input sources."""
return list(self._sources.values())
def select_source(self, source):
"""Set the input source."""
data = source.split(PREFIX_SEPARATOR, 1)
if data[0] == PREFIX_SOURCE: # Legacy way to set source
source_id = _inverted(self._sources).get(data[1])
if source_id:
self._tv.setSource(source_id)
elif data[0] == PREFIX_CHANNEL: # Legacy way to set channel
channel_id = _inverted(self._channels).get(data[1])
if channel_id:
self._tv.setChannel(channel_id)
else:
source_id = _inverted(self._sources).get(source)
if source_id:
self._tv.setSource(source_id)
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._tv.volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._tv.muted
def turn_on(self):
"""Turn on the device."""
if self._on_script:
self._on_script.run(context=self._context)
self._update_soon(DELAY_ACTION_ON)
def turn_off(self):
"""Turn off the device."""
self._tv.sendKey("Standby")
self._tv.on = False
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_up(self):
"""Send volume up command."""
self._tv.sendKey("VolumeUp")
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_down(self):
"""Send volume down command."""
self._tv.sendKey("VolumeDown")
self._update_soon(DELAY_ACTION_DEFAULT)
def mute_volume(self, mute):
"""Send mute command."""
self._tv.setVolume(None, mute)
self._update_soon(DELAY_ACTION_DEFAULT)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._tv.setVolume(volume, self._tv.muted)
self._update_soon(DELAY_ACTION_DEFAULT)
def media_previous_track(self):
"""Send rewind command."""
self._tv.sendKey("Previous")
self._update_soon(DELAY_ACTION_DEFAULT)
def media_next_track(self):
"""Send fast forward command."""
self._tv.sendKey("Next")
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def media_channel(self):
"""Get current channel if it's a channel."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def media_title(self):
"""Title of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return self._sources.get(self._tv.source_id)
@property
def media_content_type(self):
"""Return content type of playing media."""
if self._tv.source_id == "tv" or self._tv.source_id == "11":
return MEDIA_TYPE_CHANNEL
if self._tv.source_id is None and self._tv.channels:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_content_id(self):
"""Content type of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"channel_list": list(self._channels.values())}
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
channel_id = _inverted(self._channels).get(media_id)
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if media_content_id not in (None, ""):
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
)
return BrowseMedia(
title="Channels",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="",
media_content_type=MEDIA_TYPE_CHANNELS,
can_play=False,
can_expand=True,
children=[
BrowseMedia(
title=channel,
media_class=MEDIA_CLASS_CHANNEL,
media_content_id=channel,
media_content_type=MEDIA_TYPE_CHANNEL,
can_play=True,
can_expand=False,
)
for channel in self._channels.values()
],
)
def update(self):
"""Get the latest data and update device state."""
self._tv.update()
self._sources = {
srcid: source["name"] or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
self._channels = {
chid: channel["name"] for chid, channel in (self._tv.channels or {}).items()
}
|
from datetime import datetime, timedelta
import json
from homeassistant.components import recorder
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder.models import Events, RecorderRuns, States
from homeassistant.components.recorder.purge import purge_old_data
from homeassistant.components.recorder.util import session_scope
from homeassistant.util import dt as dt_util
from .common import wait_recording_done
from tests.async_mock import patch
def test_purge_old_states(hass, hass_recorder):
"""Test deleting old states."""
hass = hass_recorder()
_add_test_states(hass)
# make sure we start with 6 states
with session_scope(hass=hass) as session:
states = session.query(States)
assert states.count() == 6
# run purge_old_data()
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert states.count() == 4
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert states.count() == 2
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert finished
assert states.count() == 2
def test_purge_old_events(hass, hass_recorder):
"""Test deleting old events."""
hass = hass_recorder()
_add_test_events(hass)
with session_scope(hass=hass) as session:
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == 6
# run purge_old_data()
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert events.count() == 4
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert not finished
assert events.count() == 2
# we should only have 2 events left
finished = purge_old_data(hass.data[DATA_INSTANCE], 4, repack=False)
assert finished
assert events.count() == 2
def test_purge_method(hass, hass_recorder):
"""Test purge method."""
hass = hass_recorder()
service_data = {"keep_days": 4}
_add_test_events(hass)
_add_test_states(hass)
_add_test_recorder_runs(hass)
# make sure we start with 6 states
with session_scope(hass=hass) as session:
states = session.query(States)
assert states.count() == 6
events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%"))
assert events.count() == 6
recorder_runs = session.query(RecorderRuns)
assert recorder_runs.count() == 7
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
# run purge method - no service data, use defaults
hass.services.call("recorder", "purge")
hass.block_till_done()
# Small wait for recorder thread
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
# only purged old events
assert states.count() == 4
assert events.count() == 4
# run purge method - correct service data
hass.services.call("recorder", "purge", service_data=service_data)
hass.block_till_done()
# Small wait for recorder thread
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
# we should only have 2 states left after purging
assert states.count() == 2
# now we should only have 2 events left
assert events.count() == 2
# now we should only have 3 recorder runs left
assert recorder_runs.count() == 3
assert not ("EVENT_TEST_PURGE" in (event.event_type for event in events.all()))
# run purge method - correct service data, with repack
with patch("homeassistant.components.recorder.purge._LOGGER") as mock_logger:
service_data["repack"] = True
hass.services.call("recorder", "purge", service_data=service_data)
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
assert (
mock_logger.debug.mock_calls[5][1][0]
== "Vacuuming SQL DB to free space"
)
def _add_test_states(hass):
"""Add multiple states to the db for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
eleven_days_ago = now - timedelta(days=11)
attributes = {"test_attr": 5, "test_attr_10": "nice"}
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
with recorder.session_scope(hass=hass) as session:
for event_id in range(6):
if event_id < 2:
timestamp = eleven_days_ago
state = "autopurgeme"
elif event_id < 4:
timestamp = five_days_ago
state = "purgeme"
else:
timestamp = now
state = "dontpurgeme"
session.add(
States(
entity_id="test.recorder2",
domain="sensor",
state=state,
attributes=json.dumps(attributes),
last_changed=timestamp,
last_updated=timestamp,
created=timestamp,
event_id=event_id + 1000,
)
)
def _add_test_events(hass):
"""Add a few events for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
eleven_days_ago = now - timedelta(days=11)
event_data = {"test_attr": 5, "test_attr_10": "nice"}
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
with recorder.session_scope(hass=hass) as session:
for event_id in range(6):
if event_id < 2:
timestamp = eleven_days_ago
event_type = "EVENT_TEST_AUTOPURGE"
elif event_id < 4:
timestamp = five_days_ago
event_type = "EVENT_TEST_PURGE"
else:
timestamp = now
event_type = "EVENT_TEST"
session.add(
Events(
event_type=event_type,
event_data=json.dumps(event_data),
origin="LOCAL",
created=timestamp,
time_fired=timestamp,
)
)
def _add_test_recorder_runs(hass):
"""Add a few recorder_runs for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
eleven_days_ago = now - timedelta(days=11)
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
wait_recording_done(hass)
with recorder.session_scope(hass=hass) as session:
for rec_id in range(6):
if rec_id < 2:
timestamp = eleven_days_ago
elif rec_id < 4:
timestamp = five_days_ago
else:
timestamp = now
session.add(
RecorderRuns(
start=timestamp,
created=dt_util.utcnow(),
end=timestamp + timedelta(days=1),
)
)
|
from flask import Flask, jsonify
from flasgger import Swagger, swag_from
app = Flask(__name__)
swagger_config = {
"headers": [],
"specs": [
{
"endpoint": "swagger",
"route": "/characteristics/swagger.json",
"rule_filter": lambda rule: True, # all in
"model_filter": lambda tag: True, # all in
}
],
"title": "Product Characteristics APIs",
"version": '',
"termsOfService": "",
"static_url_path": "/characteristics/static",
"swagger_ui": True,
"specs_route": "/characteristics/swagger/",
"description": "",
"securityDefinitions": {
"oAuthSample": {
"type": "oauth2",
"flow": "application",
"tokenUrl": "https://api.pgsmartshopassistant.com/o/token/",
}
}
}
colors_spec = {
"tags": [
"colors"
],
"parameters": [
{
"name": "palette",
"in": "path",
"type": "string",
"enum": [
"all",
"rgb",
"cmyk"
],
"required": True,
"default": "all",
"description": "Which palette to filter?"
}
],
"operationId": "get_colors",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"security": {
"colors_oauth": {
"$ref": "#/securityDefinitions/oAuthSample"
}
},
"schemes": [
"http",
"https"
],
"externalDocs": {
"description": "Project repository",
"url": "http://github.com/rochacbruno/flasgger"
},
"definitions": {
"Palette": {
"type": "object",
"properties": {
"palette_name": {
"type": "array",
"items": {
"$ref": "#/definitions/Color"
}
}
}
},
"Color": {
"type": "string"
}
},
"responses": {
"200": {
"description": "A list of colors (may be filtered by palette)",
"schema": {
"$ref": "#/definitions/Palette"
},
"examples": {
"rgb": [
"red",
"green",
"blue"
]
}
}
}
}
@app.route('/colors/<palette>/')
@swag_from(colors_spec)
def colors(palette):
"""
Example using a dictionary as specification
This is the description
You can also set 'summary' and 'description' in
specs_dict
---
# values here overrides the specs dict
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
swag = Swagger(app, config=swagger_config)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
for spec in specs_data.values():
assert 'securityDefinitions' in spec
assert 'oAuthSample' in spec['securityDefinitions']
if __name__ == '__main__':
app.run(debug=True)
|
from datetime import timedelta
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_RADIUS,
LENGTH_KILOMETERS,
LENGTH_METERS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import distance as util_distance, location as util_location
CONF_ALTITUDE = "altitude"
ATTR_CALLSIGN = "callsign"
ATTR_ALTITUDE = "altitude"
ATTR_ON_GROUND = "on_ground"
ATTR_SENSOR = "sensor"
ATTR_STATES = "states"
DOMAIN = "opensky"
DEFAULT_ALTITUDE = 0
EVENT_OPENSKY_ENTRY = f"{DOMAIN}_entry"
EVENT_OPENSKY_EXIT = f"{DOMAIN}_exit"
SCAN_INTERVAL = timedelta(seconds=12) # opensky public limit is 10 seconds
OPENSKY_ATTRIBUTION = (
"Information provided by the OpenSky Network (https://opensky-network.org)"
)
OPENSKY_API_URL = "https://opensky-network.org/api/states/all"
OPENSKY_API_FIELDS = [
"icao24",
ATTR_CALLSIGN,
"origin_country",
"time_position",
"time_velocity",
ATTR_LONGITUDE,
ATTR_LATITUDE,
ATTR_ALTITUDE,
ATTR_ON_GROUND,
"velocity",
"heading",
"vertical_rate",
"sensors",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RADIUS): vol.Coerce(float),
vol.Optional(CONF_NAME): cv.string,
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_ALTITUDE, default=DEFAULT_ALTITUDE): vol.Coerce(float),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Open Sky platform."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
add_entities(
[
OpenSkySensor(
hass,
config.get(CONF_NAME, DOMAIN),
latitude,
longitude,
config.get(CONF_RADIUS),
config.get(CONF_ALTITUDE),
)
],
True,
)
class OpenSkySensor(Entity):
"""Open Sky Network Sensor."""
def __init__(self, hass, name, latitude, longitude, radius, altitude):
"""Initialize the sensor."""
self._session = requests.Session()
self._latitude = latitude
self._longitude = longitude
self._radius = util_distance.convert(radius, LENGTH_KILOMETERS, LENGTH_METERS)
self._altitude = altitude
self._state = 0
self._hass = hass
self._name = name
self._previously_tracked = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def _handle_boundary(self, flights, event, metadata):
"""Handle flights crossing region boundary."""
for flight in flights:
if flight in metadata:
altitude = metadata[flight].get(ATTR_ALTITUDE)
else:
# Assume Flight has landed if missing.
altitude = 0
data = {
ATTR_CALLSIGN: flight,
ATTR_ALTITUDE: altitude,
ATTR_SENSOR: self._name,
}
self._hass.bus.fire(event, data)
def update(self):
"""Update device state."""
currently_tracked = set()
flight_metadata = {}
states = self._session.get(OPENSKY_API_URL).json().get(ATTR_STATES)
for state in states:
flight = dict(zip(OPENSKY_API_FIELDS, state))
callsign = flight[ATTR_CALLSIGN].strip()
if callsign != "":
flight_metadata[callsign] = flight
else:
continue
missing_location = (
flight.get(ATTR_LONGITUDE) is None or flight.get(ATTR_LATITUDE) is None
)
if missing_location:
continue
if flight.get(ATTR_ON_GROUND):
continue
distance = util_location.distance(
self._latitude,
self._longitude,
flight.get(ATTR_LATITUDE),
flight.get(ATTR_LONGITUDE),
)
if distance is None or distance > self._radius:
continue
altitude = flight.get(ATTR_ALTITUDE)
if altitude > self._altitude and self._altitude != 0:
continue
currently_tracked.add(callsign)
if self._previously_tracked is not None:
entries = currently_tracked - self._previously_tracked
exits = self._previously_tracked - currently_tracked
self._handle_boundary(entries, EVENT_OPENSKY_ENTRY, flight_metadata)
self._handle_boundary(exits, EVENT_OPENSKY_EXIT, flight_metadata)
self._state = len(currently_tracked)
self._previously_tracked = currently_tracked
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: OPENSKY_ATTRIBUTION}
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "flights"
@property
def icon(self):
"""Return the icon."""
return "mdi:airplane"
|
from flexx import flx
class AppLayoutExample(flx.Widget):
def init(self):
with flx.VBox():
flx.Label(style='background:#cfc;', wrap=1,
text='Here is some content at the top for which we want to '
'use minimal size. Thus the use of a VBox. '
'Below is a splitter, with a box layout on the left '
'and a fix layout on the right.')
with flx.HSplit(flex=1):
with flx.VBox(style='border:1px solid #777;'):
flx.Label(text='Flex 0 0 0')
with flx.HBox(flex=0):
self.b1 = flx.Button(text='Hi')
self.b2 = flx.Button(text='Helloooo world!')
self.b3 = flx.Button(text='Foo bar')
flx.Label(text='Flex 1 1 1')
with flx.HBox(flex=0):
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=1, text='Helloooo world!')
self.b3 = flx.Button(flex=1, text='Foo bar')
flx.Label(text='Flex 1 0 3')
with flx.HBox(flex=0):
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=0, text='Helloooo world!')
self.b3 = flx.Button(flex=3, text='Foo bar')
# flx.Widget(flex=1) # spacer widget
with flx.VFix(style='border:1px solid #777;'):
flx.Label(text='Flex 0 0 0 (space divided equally)', style='')
with flx.HFix():
self.b1 = flx.Button(text='Hi')
self.b2 = flx.Button(text='Helloooo world!')
self.b3 = flx.Button(text='Foo bar')
flx.Label(text='Flex 1 1 1', style='')
with flx.HFix():
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=1, text='Helloooo world!')
self.b3 = flx.Button(flex=1, text='Foo bar')
flx.Label(text='Flex 1 0 3 (the widget with zero collapses')
with flx.HFix():
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=0, text='Helloooo world!')
self.b3 = flx.Button(flex=3, text='Foo bar')
# If we would put a spacer widget with flex 1 here, the
# above widgets would collapse due to their zero flex value.
if __name__ == '__main__':
m = flx.launch(AppLayoutExample)
flx.run()
|
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from xiaomi_gateway import XiaomiGateway, XiaomiGatewayDiscovery
from homeassistant import config_entries, core
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_VOLTAGE,
CONF_HOST,
CONF_MAC,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import device_registry as dr
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from .const import (
CONF_INTERFACE,
CONF_KEY,
CONF_PROTOCOL,
CONF_SID,
DEFAULT_DISCOVERY_RETRY,
DOMAIN,
GATEWAYS_KEY,
LISTENER_KEY,
)
_LOGGER = logging.getLogger(__name__)
GATEWAY_PLATFORMS = ["binary_sensor", "sensor", "switch", "light", "cover", "lock"]
GATEWAY_PLATFORMS_NO_KEY = ["binary_sensor", "sensor"]
ATTR_GW_MAC = "gw_mac"
ATTR_RINGTONE_ID = "ringtone_id"
ATTR_RINGTONE_VOL = "ringtone_vol"
ATTR_DEVICE_ID = "device_id"
TIME_TILL_UNAVAILABLE = timedelta(minutes=150)
SERVICE_PLAY_RINGTONE = "play_ringtone"
SERVICE_STOP_RINGTONE = "stop_ringtone"
SERVICE_ADD_DEVICE = "add_device"
SERVICE_REMOVE_DEVICE = "remove_device"
SERVICE_SCHEMA_PLAY_RINGTONE = vol.Schema(
{
vol.Required(ATTR_RINGTONE_ID): vol.All(
vol.Coerce(int), vol.NotIn([9, 14, 15, 16, 17, 18, 19])
),
vol.Optional(ATTR_RINGTONE_VOL): vol.All(
vol.Coerce(int), vol.Clamp(min=0, max=100)
),
}
)
SERVICE_SCHEMA_REMOVE_DEVICE = vol.Schema(
{vol.Required(ATTR_DEVICE_ID): vol.All(cv.string, vol.Length(min=14, max=14))}
)
def setup(hass, config):
"""Set up the Xiaomi component."""
def play_ringtone_service(call):
"""Service to play ringtone through Gateway."""
ring_id = call.data.get(ATTR_RINGTONE_ID)
gateway = call.data.get(ATTR_GW_MAC)
kwargs = {"mid": ring_id}
ring_vol = call.data.get(ATTR_RINGTONE_VOL)
if ring_vol is not None:
kwargs["vol"] = ring_vol
gateway.write_to_hub(gateway.sid, **kwargs)
def stop_ringtone_service(call):
"""Service to stop playing ringtone on Gateway."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, mid=10000)
def add_device_service(call):
"""Service to add a new sub-device within the next 30 seconds."""
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, join_permission="yes")
hass.components.persistent_notification.async_create(
"Join permission enabled for 30 seconds! "
"Please press the pairing button of the new device once.",
title="Xiaomi Aqara Gateway",
)
def remove_device_service(call):
"""Service to remove a sub-device from the gateway."""
device_id = call.data.get(ATTR_DEVICE_ID)
gateway = call.data.get(ATTR_GW_MAC)
gateway.write_to_hub(gateway.sid, remove_device=device_id)
gateway_only_schema = _add_gateway_to_schema(hass, vol.Schema({}))
hass.services.register(
DOMAIN,
SERVICE_PLAY_RINGTONE,
play_ringtone_service,
schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_PLAY_RINGTONE),
)
hass.services.register(
DOMAIN, SERVICE_STOP_RINGTONE, stop_ringtone_service, schema=gateway_only_schema
)
hass.services.register(
DOMAIN, SERVICE_ADD_DEVICE, add_device_service, schema=gateway_only_schema
)
hass.services.register(
DOMAIN,
SERVICE_REMOVE_DEVICE,
remove_device_service,
schema=_add_gateway_to_schema(hass, SERVICE_SCHEMA_REMOVE_DEVICE),
)
return True
async def async_setup_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the xiaomi aqara components from a config entry."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(GATEWAYS_KEY, {})
# Connect to Xiaomi Aqara Gateway
xiaomi_gateway = await hass.async_add_executor_job(
XiaomiGateway,
entry.data[CONF_HOST],
entry.data[CONF_SID],
entry.data[CONF_KEY],
DEFAULT_DISCOVERY_RETRY,
entry.data[CONF_INTERFACE],
entry.data[CONF_PORT],
entry.data[CONF_PROTOCOL],
)
hass.data[DOMAIN][GATEWAYS_KEY][entry.entry_id] = xiaomi_gateway
gateway_discovery = hass.data[DOMAIN].setdefault(
LISTENER_KEY,
XiaomiGatewayDiscovery(hass.add_job, [], entry.data[CONF_INTERFACE]),
)
if len(hass.data[DOMAIN][GATEWAYS_KEY]) == 1:
# start listining for local pushes (only once)
await hass.async_add_executor_job(gateway_discovery.listen)
# register stop callback to shutdown listining for local pushes
def stop_xiaomi(event):
"""Stop Xiaomi Socket."""
_LOGGER.debug("Shutting down Xiaomi Gateway Listener")
gateway_discovery.stop_listen()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_xiaomi)
gateway_discovery.gateways[entry.data[CONF_HOST]] = xiaomi_gateway
_LOGGER.debug(
"Gateway with host '%s' connected, listening for broadcasts",
entry.data[CONF_HOST],
)
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, entry.unique_id)},
manufacturer="Xiaomi Aqara",
name=entry.title,
sw_version=entry.data[CONF_PROTOCOL],
)
if entry.data[CONF_KEY] is not None:
platforms = GATEWAY_PLATFORMS
else:
platforms = GATEWAY_PLATFORMS_NO_KEY
for component in platforms:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Unload a config entry."""
if entry.data[CONF_KEY] is not None:
platforms = GATEWAY_PLATFORMS
else:
platforms = GATEWAY_PLATFORMS_NO_KEY
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in platforms
]
)
)
if unload_ok:
hass.data[DOMAIN][GATEWAYS_KEY].pop(entry.entry_id)
if len(hass.data[DOMAIN][GATEWAYS_KEY]) == 0:
# No gateways left, stop Xiaomi socket
hass.data[DOMAIN].pop(GATEWAYS_KEY)
_LOGGER.debug("Shutting down Xiaomi Gateway Listener")
gateway_discovery = hass.data[DOMAIN].pop(LISTENER_KEY)
await hass.async_add_executor_job(gateway_discovery.stop_listen)
return unload_ok
class XiaomiDevice(Entity):
"""Representation a base Xiaomi device."""
def __init__(self, device, device_type, xiaomi_hub, config_entry):
"""Initialize the Xiaomi device."""
self._state = None
self._is_available = True
self._sid = device["sid"]
self._model = device["model"]
self._protocol = device["proto"]
self._name = f"{device_type}_{self._sid}"
self._device_name = f"{self._model}_{self._sid}"
self._type = device_type
self._write_to_hub = xiaomi_hub.write_to_hub
self._get_from_hub = xiaomi_hub.get_from_hub
self._device_state_attributes = {}
self._remove_unavailability_tracker = None
self._xiaomi_hub = xiaomi_hub
self.parse_data(device["data"], device["raw_data"])
self.parse_voltage(device["data"])
if hasattr(self, "_data_key") and self._data_key: # pylint: disable=no-member
self._unique_id = (
f"{self._data_key}{self._sid}" # pylint: disable=no-member
)
else:
self._unique_id = f"{self._type}{self._sid}"
self._gateway_id = config_entry.unique_id
if config_entry.data[CONF_MAC] == format_mac(self._sid):
# this entity belongs to the gateway itself
self._is_gateway = True
self._device_id = config_entry.unique_id
else:
# this entity is connected through zigbee
self._is_gateway = False
self._device_id = self._sid
def _add_push_data_job(self, *args):
self.hass.add_job(self.push_data, *args)
async def async_added_to_hass(self):
"""Start unavailability tracking."""
self._xiaomi_hub.callbacks[self._sid].append(self._add_push_data_job)
self._async_track_unavailable()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def device_id(self):
"""Return the device id of the Xiaomi Aqara device."""
return self._device_id
@property
def device_info(self):
"""Return the device info of the Xiaomi Aqara device."""
if self._is_gateway:
device_info = {
"identifiers": {(DOMAIN, self._device_id)},
"model": self._model,
}
else:
device_info = {
"connections": {(dr.CONNECTION_ZIGBEE, self._device_id)},
"identifiers": {(DOMAIN, self._device_id)},
"manufacturer": "Xiaomi Aqara",
"model": self._model,
"name": self._device_name,
"sw_version": self._protocol,
"via_device": (DOMAIN, self._gateway_id),
}
return device_info
@property
def available(self):
"""Return True if entity is available."""
return self._is_available
@property
def should_poll(self):
"""Return the polling state. No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._device_state_attributes
@callback
def _async_set_unavailable(self, now):
"""Set state to UNAVAILABLE."""
self._remove_unavailability_tracker = None
self._is_available = False
self.async_write_ha_state()
@callback
def _async_track_unavailable(self):
if self._remove_unavailability_tracker:
self._remove_unavailability_tracker()
self._remove_unavailability_tracker = async_track_point_in_utc_time(
self.hass, self._async_set_unavailable, utcnow() + TIME_TILL_UNAVAILABLE
)
if not self._is_available:
self._is_available = True
return True
return False
@callback
def push_data(self, data, raw_data):
"""Push from Hub."""
_LOGGER.debug("PUSH >> %s: %s", self, data)
was_unavailable = self._async_track_unavailable()
is_data = self.parse_data(data, raw_data)
is_voltage = self.parse_voltage(data)
if is_data or is_voltage or was_unavailable:
self.async_write_ha_state()
def parse_voltage(self, data):
"""Parse battery level data sent by gateway."""
if "voltage" in data:
voltage_key = "voltage"
elif "battery_voltage" in data:
voltage_key = "battery_voltage"
else:
return False
max_volt = 3300
min_volt = 2800
voltage = data[voltage_key]
self._device_state_attributes[ATTR_VOLTAGE] = round(voltage / 1000.0, 2)
voltage = min(voltage, max_volt)
voltage = max(voltage, min_volt)
percent = ((voltage - min_volt) / (max_volt - min_volt)) * 100
self._device_state_attributes[ATTR_BATTERY_LEVEL] = round(percent, 1)
return True
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
raise NotImplementedError()
def _add_gateway_to_schema(hass, schema):
"""Extend a voluptuous schema with a gateway validator."""
def gateway(sid):
"""Convert sid to a gateway."""
sid = str(sid).replace(":", "").lower()
for gateway in hass.data[DOMAIN][GATEWAYS_KEY].values():
if gateway.sid == sid:
return gateway
raise vol.Invalid(f"Unknown gateway sid {sid}")
kwargs = {}
xiaomi_data = hass.data.get(DOMAIN)
if xiaomi_data is not None:
gateways = list(xiaomi_data[GATEWAYS_KEY].values())
# If the user has only 1 gateway, make it the default for services.
if len(gateways) == 1:
kwargs["default"] = gateways[0].sid
return schema.extend({vol.Required(ATTR_GW_MAC, **kwargs): gateway})
|
from typing import Sequence, Union
_InnerJsArgType = Union[None, str, bool, int, float]
_JsArgType = Union[_InnerJsArgType, Sequence[_InnerJsArgType]]
def string_escape(text: str) -> str:
"""Escape values special to javascript in strings.
With this we should be able to use something like:
elem.evaluateJavaScript("this.value='{}'".format(string_escape(...)))
And all values should work.
"""
# This is a list of tuples because order matters, and using OrderedDict
# makes no sense because we don't actually need dict-like properties.
replacements = (
('\\', r'\\'), # First escape all literal \ signs as \\.
("'", r"\'"), # Then escape ' and " as \' and \".
('"', r'\"'), # (note it won't hurt when we escape the wrong one).
('\n', r'\n'), # We also need to escape newlines for some reason.
('\r', r'\r'),
('\x00', r'\x00'),
('\ufeff', r'\ufeff'),
# http://stackoverflow.com/questions/2965293/
('\u2028', r'\u2028'),
('\u2029', r'\u2029'),
)
for orig, repl in replacements:
text = text.replace(orig, repl)
return text
def to_js(arg: _JsArgType) -> str:
"""Convert the given argument so it's the equivalent in JS."""
if arg is None:
return 'undefined'
elif isinstance(arg, str):
return '"{}"'.format(string_escape(arg))
elif isinstance(arg, bool):
return str(arg).lower()
elif isinstance(arg, (int, float)):
return str(arg)
elif isinstance(arg, list):
return '[{}]'.format(', '.join(to_js(e) for e in arg))
else:
raise TypeError("Don't know how to handle {!r} of type {}!".format(
arg, type(arg).__name__))
def assemble(module: str, function: str, *args: _JsArgType) -> str:
"""Assemble a javascript file and a function call."""
js_args = ', '.join(to_js(arg) for arg in args)
if module == 'window':
parts = ['window', function]
else:
parts = ['window', '_qutebrowser', module, function]
code = '"use strict";\n{}({});'.format('.'.join(parts), js_args)
return code
def wrap_global(name: str, *sources: str) -> str:
"""Wrap a script using window._qutebrowser."""
from qutebrowser.utils import jinja # circular import
template = jinja.js_environment.get_template('global_wrapper.js')
return template.render(code='\n'.join(sources), name=name)
|
from textwrap import wrap
from weblate.addons.models import ADDONS
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "List installed addons"
def handle(self, *args, **options):
"""List installed addons."""
for _unused, obj in sorted(ADDONS.items()):
self.stdout.write(f".. _addon-{obj.name}:")
self.stdout.write("\n")
self.stdout.write(obj.verbose)
self.stdout.write("-" * len(obj.verbose))
self.stdout.write("\n")
self.stdout.write("\n".join(wrap(obj.description, 79)))
self.stdout.write("\n")
|
from datetime import timedelta
import pytest
from homeassistant.components.google_assistant import helpers
from homeassistant.components.google_assistant.const import ( # noqa: F401
EVENT_COMMAND_RECEIVED,
NOT_EXPOSE_LOCAL,
)
from homeassistant.config import async_process_ha_core_config
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from homeassistant.util import dt
from . import MockConfig
from tests.async_mock import Mock, call, patch
from tests.common import (
async_capture_events,
async_fire_time_changed,
async_mock_service,
)
async def test_google_entity_sync_serialize_with_local_sdk(hass):
"""Test sync serialize attributes of a GoogleEntity."""
hass.states.async_set("light.ceiling_lights", "off")
hass.config.api = Mock(port=1234, use_ssl=True)
await async_process_ha_core_config(
hass,
{"external_url": "https://hostname:1234"},
)
hass.http = Mock(server_port=1234)
config = MockConfig(
hass=hass,
local_sdk_webhook_id="mock-webhook-id",
local_sdk_user_id="mock-user-id",
)
entity = helpers.GoogleEntity(hass, config, hass.states.get("light.ceiling_lights"))
serialized = await entity.sync_serialize(None)
assert "otherDeviceIds" not in serialized
assert "customData" not in serialized
config.async_enable_local_sdk()
with patch("homeassistant.helpers.instance_id.async_get", return_value="abcdef"):
serialized = await entity.sync_serialize(None)
assert serialized["otherDeviceIds"] == [{"deviceId": "light.ceiling_lights"}]
assert serialized["customData"] == {
"httpPort": 1234,
"httpSSL": True,
"proxyDeviceId": None,
"webhookId": "mock-webhook-id",
"baseUrl": "https://hostname:1234",
"uuid": "abcdef",
}
for device_type in NOT_EXPOSE_LOCAL:
with patch(
"homeassistant.components.google_assistant.helpers.get_google_type",
return_value=device_type,
):
serialized = await entity.sync_serialize(None)
assert "otherDeviceIds" not in serialized
assert "customData" not in serialized
async def test_config_local_sdk(hass, hass_client):
"""Test the local SDK."""
command_events = async_capture_events(hass, EVENT_COMMAND_RECEIVED)
turn_on_calls = async_mock_service(hass, "light", "turn_on")
hass.states.async_set("light.ceiling_lights", "off")
assert await async_setup_component(hass, "webhook", {})
config = MockConfig(
hass=hass,
local_sdk_webhook_id="mock-webhook-id",
local_sdk_user_id="mock-user-id",
)
client = await hass_client()
config.async_enable_local_sdk()
resp = await client.post(
"/api/webhook/mock-webhook-id",
json={
"inputs": [
{
"context": {"locale_country": "US", "locale_language": "en"},
"intent": "action.devices.EXECUTE",
"payload": {
"commands": [
{
"devices": [{"id": "light.ceiling_lights"}],
"execution": [
{
"command": "action.devices.commands.OnOff",
"params": {"on": True},
}
],
}
],
"structureData": {},
},
}
],
"requestId": "mock-req-id",
},
)
assert resp.status == 200
result = await resp.json()
assert result["requestId"] == "mock-req-id"
assert len(command_events) == 1
assert command_events[0].context.user_id == config.local_sdk_user_id
assert len(turn_on_calls) == 1
assert turn_on_calls[0].context is command_events[0].context
config.async_disable_local_sdk()
# Webhook is no longer active
resp = await client.post("/api/webhook/mock-webhook-id")
assert resp.status == 200
assert await resp.read() == b""
async def test_config_local_sdk_if_disabled(hass, hass_client):
"""Test the local SDK."""
assert await async_setup_component(hass, "webhook", {})
config = MockConfig(
hass=hass,
local_sdk_webhook_id="mock-webhook-id",
local_sdk_user_id="mock-user-id",
enabled=False,
)
client = await hass_client()
config.async_enable_local_sdk()
resp = await client.post(
"/api/webhook/mock-webhook-id", json={"requestId": "mock-req-id"}
)
assert resp.status == 200
result = await resp.json()
assert result == {
"payload": {"errorCode": "deviceTurnedOff"},
"requestId": "mock-req-id",
}
config.async_disable_local_sdk()
# Webhook is no longer active
resp = await client.post("/api/webhook/mock-webhook-id")
assert resp.status == 200
assert await resp.read() == b""
async def test_agent_user_id_storage(hass, hass_storage):
"""Test a disconnect message."""
hass_storage["google_assistant"] = {
"version": 1,
"key": "google_assistant",
"data": {"agent_user_ids": {"agent_1": {}}},
}
store = helpers.GoogleConfigStore(hass)
await store.async_load()
assert hass_storage["google_assistant"] == {
"version": 1,
"key": "google_assistant",
"data": {"agent_user_ids": {"agent_1": {}}},
}
async def _check_after_delay(data):
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=2))
await hass.async_block_till_done()
assert hass_storage["google_assistant"] == {
"version": 1,
"key": "google_assistant",
"data": data,
}
store.add_agent_user_id("agent_2")
await _check_after_delay({"agent_user_ids": {"agent_1": {}, "agent_2": {}}})
store.pop_agent_user_id("agent_1")
await _check_after_delay({"agent_user_ids": {"agent_2": {}}})
async def test_agent_user_id_connect():
"""Test the connection and disconnection of users."""
config = MockConfig()
store = config._store
await config.async_connect_agent_user("agent_2")
assert store.add_agent_user_id.call_args == call("agent_2")
await config.async_connect_agent_user("agent_1")
assert store.add_agent_user_id.call_args == call("agent_1")
await config.async_disconnect_agent_user("agent_2")
assert store.pop_agent_user_id.call_args == call("agent_2")
await config.async_disconnect_agent_user("agent_1")
assert store.pop_agent_user_id.call_args == call("agent_1")
@pytest.mark.parametrize("agents", [{}, {"1"}, {"1", "2"}])
async def test_report_state_all(agents):
"""Test a disconnect message."""
config = MockConfig(agent_user_ids=agents)
data = {}
with patch.object(config, "async_report_state") as mock:
await config.async_report_state_all(data)
assert sorted(mock.mock_calls) == sorted(
[call(data, agent) for agent in agents]
)
@pytest.mark.parametrize(
"agents, result",
[({}, 204), ({"1": 200}, 200), ({"1": 200, "2": 300}, 300)],
)
async def test_sync_entities_all(agents, result):
"""Test sync entities ."""
config = MockConfig(agent_user_ids=set(agents.keys()))
with patch.object(
config,
"async_sync_entities",
side_effect=lambda agent_user_id: agents[agent_user_id],
) as mock:
res = await config.async_sync_entities_all()
assert sorted(mock.mock_calls) == sorted([call(agent) for agent in agents])
assert res == result
def test_supported_features_string(caplog):
"""Test bad supported features."""
entity = helpers.GoogleEntity(
None, None, State("test.entity_id", "on", {"supported_features": "invalid"})
)
assert entity.is_supported() is False
assert "Entity test.entity_id contains invalid supported_features value invalid"
|
import posixpath
from absl import flags
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker.linux_packages import cuda_toolkit
PACKAGE_NAME = 'hpcg'
HPCG_BASE_URL = 'https://www.hpcg-benchmark.org/downloads'
HPCG_CUDA_11_BINARY = 'xhpcg-3.1_cuda-11_ompi-4.0_sm_60_sm70_sm80'
HPCG_CUDA_10_TAR = 'hpcg-3.1_cuda-10_ompi-3.1_gcc485_sm_35_sm_50_sm_60_sm_70_sm75_ver_10_9_18.tgz'
HPCG_CUDA_9_TAR = (
'hpcg-3.1_cuda9_ompi1.10.2_gcc485_sm_35_sm_50_sm_60_sm_70_ver_10_8_17.tgz')
HPCG_CUDA_8_TAR = (
'hpcg-3.1_cuda8_ompi1.10.2_gcc485_sm_35_sm_50_sm_60_ver_3_28_17.tgz')
HPCG_CUDA_11 = posixpath.join(HPCG_BASE_URL, HPCG_CUDA_11_BINARY)
HPCG_CUDA_10 = posixpath.join(HPCG_BASE_URL, HPCG_CUDA_10_TAR)
HPCG_CUDA_9 = posixpath.join(HPCG_BASE_URL, HPCG_CUDA_9_TAR)
HPCG_CUDA_8 = posixpath.join(HPCG_BASE_URL, HPCG_CUDA_8_TAR)
HPCG_DIR = '%s/%s' % (linux_packages.INSTALL_DIR, 'hpcg')
PREPROVISIONED_DATA = {
HPCG_CUDA_11_BINARY:
'580b609dd4ab866f17346babecbddd5fa2364eca05a802b3f42ac55522fd6be0',
HPCG_CUDA_10_TAR:
'e0a7e6cbac9a4e1cfe0fe584de05f3740fface83ef3ba88313160b5da658fbb7',
HPCG_CUDA_9_TAR:
'384345954c20fbcd03b9d2dcfef9854a16ab942032763eab77c8f8b43a832204',
HPCG_CUDA_8_TAR:
'd63b8653ab3d04d09874c7bc5251d3083ee3d8cfb3105654a367d2aa619571ec'
}
PACKAGE_DATA_URL = {
HPCG_CUDA_11_BINARY: HPCG_CUDA_11,
HPCG_CUDA_10_TAR: HPCG_CUDA_10,
HPCG_CUDA_9_TAR: HPCG_CUDA_9,
HPCG_CUDA_8_TAR: HPCG_CUDA_8
}
HPCG_CUDA_11_BINARY = 'xhpcg-3.1_cuda-11_ompi-4.0_sm_60_sm70_sm80'
HPCG_CUDA_10_BINARY = 'xhpcg-3.1_gcc_485_cuda-10.0.130_ompi-3.1.0_sm_35_sm_50_sm_60_sm_70_sm_75_ver_10_9_18'
HPCG_CUDA_8_BINARY = 'xhpcg-3.1_gcc_485_cuda8061_ompi_1_10_2_sm_35_sm_50_sm_60_ver_3_28_17'
HPCG_CUDA_9_BINARY = 'xhpcg-3.1_gcc_485_cuda90176_ompi_1_10_2_sm_35_sm_50_sm_60_sm_70_ver_10_8_17'
CUDA_FILES = {
'8.0': (HPCG_CUDA_8_TAR, HPCG_CUDA_8_BINARY),
'9.0': (HPCG_CUDA_9_TAR, HPCG_CUDA_9_BINARY),
'10.0': (HPCG_CUDA_10_TAR, HPCG_CUDA_10_BINARY),
# HPCG only release the binary that supports CUDA 11. Use the data from
# HPCG CUDA 10 package.
'11.0': (HPCG_CUDA_10_TAR, HPCG_CUDA_11_BINARY),
}
FLAGS = flags.FLAGS
def AptInstall(vm):
"""Install the HPCG package on the VM.
Args:
vm: vm to target
"""
vm.Install('wget')
vm.InstallPackages('numactl')
vm.Install('cuda_toolkit')
# HPCG CUDA 10 requires Open MPI 3.1 and HPCG CUDA 11 requires Open MPI 4.0
vm.Install('openmpi')
if FLAGS.cuda_toolkit_version not in CUDA_FILES:
raise cuda_toolkit.UnsupportedCudaVersionException(
f'HPCG only supports CUDA {sorted(CUDA_FILES)}')
hpcg_tar, hpcg_binary = CUDA_FILES[FLAGS.cuda_toolkit_version]
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, [hpcg_tar],
linux_packages.INSTALL_DIR)
vm.RemoteCommand('rm -rf %s' % HPCG_DIR)
vm.RemoteCommand('mkdir %s' % HPCG_DIR)
vm.RemoteCommand(
'cd %s && tar xvf %s --directory=%s --strip-components=1' %
(linux_packages.INSTALL_DIR, hpcg_tar, HPCG_DIR))
# Create a symlink from the hpcg binary to 'hpcg'
if FLAGS.cuda_toolkit_version == '11.0':
# HPCG only release the binary that supports CUDA 11. Use the data from
# HPCG CUDA 10 package.
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, [hpcg_binary], HPCG_DIR)
vm.RemoteCommand(f'chmod +x {posixpath.join(HPCG_DIR, hpcg_binary)}')
vm.RemoteCommand('cd %s && ln -s %s %s' % (HPCG_DIR, hpcg_binary, 'hpcg'))
def YumInstall(_):
"""Install the HPCG package on the VM using yum."""
raise NotImplementedError(
'Installation of HPCG is only supported on Ubuntu')
|
import os
import unittest
import roslib.rosenv
class EnvTest(unittest.TestCase):
def test_get_ros_root(self):
from roslib.rosenv import get_ros_root
self.assertEquals(None, get_ros_root(required=False, env={}))
self.assertEquals(None, get_ros_root(False, {}))
try:
get_ros_root(required=True, env={})
self.fail('get_ros_root should have failed')
except Exception:
pass
env = {'ROS_ROOT': '/fake/path'}
self.assertEquals('/fake/path', get_ros_root(required=False, env=env))
try:
get_ros_root(required=True, env=env)
self.fail('get_ros_root should have failed')
except Exception:
pass
def test_get_ros_package_path(self):
from roslib.rosenv import get_ros_package_path
self.assertEquals(None, get_ros_package_path(required=False, env={}))
self.assertEquals(None, get_ros_package_path(False, {}))
try:
get_ros_package_path(required=True, env={})
self.fail('get_ros_package_path should have raised')
except Exception:
pass
env = {'ROS_PACKAGE_PATH': ':'}
self.assertEquals(':', get_ros_package_path(True, env=env))
self.assertEquals(':', get_ros_package_path(False, env=env))
# trip-wire tests. Cannot guarantee that ROS_PACKAGE_PATH is set
# to valid value on test machine, just make sure logic doesn't crash
self.assertEquals(os.environ.get('ROS_PACKAGE_PATH', None), get_ros_package_path(required=False))
def test_get_ros_master_uri(self):
from roslib.rosenv import get_master_uri
self.assertEquals(None, get_master_uri(required=False, env={}))
self.assertEquals(None, get_master_uri(False, {}))
try:
get_master_uri(required=True, env={})
self.fail('get_ros_package_path should have raised')
except Exception:
pass
env = {'ROS_MASTER_URI': 'http://localhost:1234'}
self.assertEquals('http://localhost:1234', get_master_uri(True, env=env))
self.assertEquals('http://localhost:1234', get_master_uri(False, env=env))
argv = ['__master:=http://localhost:5678']
self.assertEquals('http://localhost:5678', get_master_uri(False, env=env, argv=argv))
try:
argv = ['__master:=http://localhost:5678:=http://localhost:1234']
get_master_uri(required=False, env=env, argv=argv)
self.fail('should have thrown')
except roslib.rosenv.ROSEnvException:
pass
try:
argv = ['__master:=']
get_master_uri(False, env=env, argv=argv)
self.fail('should have thrown')
except roslib.rosenv.ROSEnvException:
pass
# make sure test works with os.environ
self.assertEquals(os.environ.get('ROS_MASTER_URI', None), get_master_uri(required=False))
|
from unittest import mock
import pytest
@pytest.fixture
def timer(stubs):
return stubs.FakeTimer()
def test_timeout(timer):
"""Test whether timeout calls the functions."""
func = mock.Mock()
func2 = mock.Mock()
timer.timeout.connect(func)
timer.timeout.connect(func2)
func.assert_not_called()
func2.assert_not_called()
timer.timeout.emit()
func.assert_called_once_with()
func2.assert_called_once_with()
def test_disconnect_all(timer):
"""Test disconnect without arguments."""
func = mock.Mock()
timer.timeout.connect(func)
timer.timeout.disconnect()
timer.timeout.emit()
func.assert_not_called()
def test_disconnect_one(timer):
"""Test disconnect with a single argument."""
func = mock.Mock()
timer.timeout.connect(func)
timer.timeout.disconnect(func)
timer.timeout.emit()
func.assert_not_called()
def test_disconnect_all_invalid(timer):
"""Test disconnecting with no connections."""
with pytest.raises(TypeError):
timer.timeout.disconnect()
def test_disconnect_one_invalid(timer):
"""Test disconnecting with an invalid connection."""
func1 = mock.Mock()
func2 = mock.Mock()
timer.timeout.connect(func1)
with pytest.raises(TypeError):
timer.timeout.disconnect(func2)
func1.assert_not_called()
func2.assert_not_called()
timer.timeout.emit()
func1.assert_called_once_with()
def test_singleshot(timer):
"""Test setting singleShot."""
assert not timer.isSingleShot()
timer.setSingleShot(True)
assert timer.isSingleShot()
timer.start()
assert timer.isActive()
timer.timeout.emit()
assert not timer.isActive()
def test_active(timer):
"""Test isActive."""
assert not timer.isActive()
timer.start()
assert timer.isActive()
timer.stop()
assert not timer.isActive()
def test_interval(timer):
"""Test setting an interval."""
assert timer.interval() == 0
timer.setInterval(1000)
assert timer.interval() == 1000
|
from datetime import timedelta
from homeassistant.components import recorder
from homeassistant.util import dt as dt_util
from tests.common import fire_time_changed
def wait_recording_done(hass):
"""Block till recording is done."""
trigger_db_commit(hass)
hass.block_till_done()
hass.data[recorder.DATA_INSTANCE].block_till_done()
hass.block_till_done()
def trigger_db_commit(hass):
"""Force the recorder to commit."""
for _ in range(recorder.DEFAULT_COMMIT_INTERVAL):
# We only commit on time change
fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
|
import unicodedata
def is_whitespace(char):
"""Checks whether `chars` is a whitespace character."""
# \t, \n, and \r are technically contorl characters but we treat them
# as whitespace since they are generally considered as such.
return (char == " ") or \
(char == "\t") or \
(char == "\n") or \
(char == "\r") or \
(unicodedata.category(char) == "Zs")
def is_control(char):
"""Checks whether `chars` is a control character."""
# These are technically control characters but we count them as whitespace
# characters.
if char == "\t" or char == "\n" or char == "\r":
return False
cat = unicodedata.category(char)
if cat in ["Cc", "Cf"]:
return True
return False
def is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
# We treat all non-letter/number ASCII as punctuation.
# Characters such as "^", "$", and "`" are not in the Unicode
# Punctuation class but we treat them as punctuation anyways, for
# consistency.
condition = (33 <= cp <= 47) or (58 <= cp <= 64) or \
(91 <= cp <= 96) or (123 <= cp <= 126)
cat = unicodedata.category(char)
if condition or cat.startswith("P"):
return True
return False
def is_chinese_char(cp):
"""Checks whether CP is the codepoint of a CJK character."""
# This defines a "chinese character" as anything in the CJK Unicode block:
# https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
#
# Note that the CJK Unicode block is NOT all Japanese and Korean
# characters, despite its name. The modern Korean Hangul alphabet is a
# different block, as is Japanese Hiragana and Katakana. Those alphabets
# are used to write space-separated words, so they are not treated
# specially and handled like the all of the other languages.
return (0x4E00 <= cp <= 0x9FFF) or \
(0x3400 <= cp <= 0x4DBF) or \
(0x20000 <= cp <= 0x2A6DF) or \
(0x2A700 <= cp <= 0x2B73F) or \
(0x2B740 <= cp <= 0x2B81F) or \
(0x2B820 <= cp <= 0x2CEAF) or \
(0xF900 <= cp <= 0xFAFF) or \
(0x2F800 <= cp <= 0x2FA1F)
def run_strip_accents(text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize("NFD", text)
output = [char for char in text if not unicodedata.category(char) == 'Mn']
return "".join(output)
def run_split_on_punc(text):
"""Splits punctuation on a piece of text."""
chars = list(text)
i = 0
start_new_word = True
output = []
while i < len(chars):
char = chars[i]
if is_punctuation(char):
output.append([char])
start_new_word = True
else:
if start_new_word:
output.append([])
start_new_word = False
output[-1].append(char)
i += 1
return ["".join(x) for x in output]
def whitespace_tokenize(text):
"""Runs basic whitespace cleaning and splitting on a piece of text."""
text = text.strip()
tokens = text.split()
return tokens
|
import io
import logging
from operator import itemgetter
import bson
import six
from bson.binary import Binary
from bson.errors import InvalidDocument
from six.moves import cPickle, xrange
from ._version_store_utils import checksum, pickle_compat_load, version_base_or_id
from .._compression import decompress, compress_array
from ..exceptions import UnsupportedPickleStoreVersion
from .._config import SKIP_BSON_ENCODE_PICKLE_STORE, MAX_BSON_ENCODE
# new versions of chunked pickled objects MUST begin with __chunked__
_MAGIC_CHUNKED = '__chunked__'
_MAGIC_CHUNKEDV2 = '__chunked__V2'
_CHUNK_SIZE = 15 * 1024 * 1024 # 15MB
_HARD_MAX_BSON_ENCODE = 10 * 1024 * 1024 # 10MB
logger = logging.getLogger(__name__)
class PickleStore(object):
@classmethod
def initialize_library(cls, *args, **kwargs):
pass
def get_info(self, _version):
return {
'type': 'blob',
'handler': self.__class__.__name__,
}
def read(self, mongoose_lib, version, symbol, **kwargs):
blob = version.get("blob")
if blob is not None:
if blob == _MAGIC_CHUNKEDV2:
collection = mongoose_lib.get_top_level_collection()
data = b''.join(decompress(x['data']) for x in sorted(
collection.find({'symbol': symbol, 'parent': version_base_or_id(version)}),
key=itemgetter('segment')))
elif blob == _MAGIC_CHUNKED:
collection = mongoose_lib.get_top_level_collection()
data = b''.join(x['data'] for x in sorted(
collection.find({'symbol': symbol, 'parent': version_base_or_id(version)}),
key=itemgetter('segment')))
data = decompress(data)
else:
if blob[:len(_MAGIC_CHUNKED)] == _MAGIC_CHUNKED:
logger.error("Data was written by unsupported version of pickle store for symbol %s. Upgrade Arctic and try again" % symbol)
raise UnsupportedPickleStoreVersion("Data was written by unsupported version of pickle store")
try:
data = decompress(blob)
except:
logger.error("Failed to read symbol %s" % symbol)
if six.PY2:
# Providing encoding is not possible on PY2
return pickle_compat_load(io.BytesIO(data))
else:
try:
# The default encoding is ascii.
return pickle_compat_load(io.BytesIO(data))
except UnicodeDecodeError as ue:
# Using encoding='latin1' is required for unpickling NumPy arrays and instances of datetime, date
# and time pickled by Python 2: https://docs.python.org/3/library/pickle.html#pickle.load
logger.info("Could not Unpickle with ascii, Using latin1.")
encoding = kwargs.get('encoding', 'latin_1') # Check if someone has manually specified encoding.
return pickle_compat_load(io.BytesIO(data), encoding=encoding)
return version['data']
@staticmethod
def read_options():
return []
def write(self, arctic_lib, version, symbol, item, _previous_version):
# Currently we try to bson encode if the data is less than a given size and store it in
# the version collection, but pickling might be preferable if we have characters that don't
# play well with the bson encoder or if you always want your data in the data collection.
if not SKIP_BSON_ENCODE_PICKLE_STORE:
try:
# If it's encodeable, then ship it
b = bson.BSON.encode({'data': item})
if len(b) < min(MAX_BSON_ENCODE, _HARD_MAX_BSON_ENCODE):
version['data'] = item
return
except InvalidDocument:
pass
# Pickle, chunk and store the data
collection = arctic_lib.get_top_level_collection()
# Try to pickle it. This is best effort
version['blob'] = _MAGIC_CHUNKEDV2
pickled = cPickle.dumps(item, protocol=cPickle.HIGHEST_PROTOCOL)
data = compress_array([pickled[i * _CHUNK_SIZE: (i + 1) * _CHUNK_SIZE] for i in xrange(int(len(pickled) / _CHUNK_SIZE + 1))])
for seg, d in enumerate(data):
segment = {'data': Binary(d)}
segment['segment'] = seg
seg += 1
sha = checksum(symbol, segment)
collection.update_one({'symbol': symbol, 'sha': sha},
{'$set': segment, '$addToSet': {'parent': version['_id']}},
upsert=True)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ntpath
import os
import re
from absl import flags
import numpy as np
from perfkitbenchmarker import data
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
import six
from six.moves import range
FLAGS = flags.FLAGS
flags.DEFINE_list('hammerdb_tpcc_virtual_user_list', [1],
'The list of numbers of virtual users making '
'transaction at the same time. '
'Default: [1]')
flags.DEFINE_integer('hammerdb_tpcc_warehouse', 1,
'The number of warehouse used in tpcc benchmarking. '
'Default: 1')
flags.DEFINE_integer('hammerdb_tpcc_schema_virtual_user', 1,
'The number of virtual user used when '
'building the schema. '
'Default: 1')
flags.DEFINE_integer('hammerdb_tpcc_runtime', 60,
'The running time for tpcc benchmark test'
'Default: 60. Unit: second')
flags.DEFINE_bool('hammerdb_run_tpcc', True,
'tpcc is a sql benchmark to measure transaction speed.'
'Default: True')
flags.DEFINE_bool('hammerdb_run_tpch', True,
'tpch is a sql benchmark to calculate the query per hour'
'performance metrics.'
'Default: True')
flags.DEFINE_integer('hammerdb_tpch_scale_fact', 1,
'The running time for tpcc benchmark test. '
'Default: 60. Unit: second')
flags.DEFINE_integer('hammerdb_tpch_virtual_user', 4,
'The virtual user number to run tpch test. '
'Default: 4.')
HAMMERDB_RETRIES = 10
HAMMERDB_DIR = 'HammerDB-3.1-Win'
HAMMERDB_ZIP = HAMMERDB_DIR + '.zip'
HAMMERDB_URL = ('https://versaweb.dl.sourceforge.net/project/'
'hammerdb/HammerDB/HammerDB-3.1/' + HAMMERDB_ZIP)
HAMMERDB_LOGFILE = 'hammerdb.log'
# the files that are dynamically generated on the vm
# to be run by the hammerdb test
HAMMERDB_SCHEMA_FILE = 'schema.tcl'
HAMMERDB_SQLRUN_FILE = 'hammsqlrun.tcl'
HAMMERDB_SCHEMA_FILE_TPCH = 'schema-tpch.tcl'
HAMMERDB_SQLRUN_FILE_TPCH_POWER = 'sqlrun-tpch-power.tcl'
HAMMERDB_SQLRUN_FILE_TPCH_THROUGHPUT = 'sqlrun-tpch-throughput.tcl'
HAMMERDB_CLI_FILE = 'hammerdbcli'
HAMMERDB_CLI_FILE_TPCH = 'hammerdbclitpch'
HAMMERDB_CLI_BAT_FILE = 'hammerdbcli.bat'
HAMMERDB_CLI_BAT_FILE_TPCH = 'hammerdbclitpch.bat'
HAMMERDB_TEST_TIMEOUT_MULTIPLIER = 2
HAMMERDB_SCHEMA_WAITTIME = 5000
HAMMERDB_SQLRUN_WAITIME_ADDON = 80
HAMMERDB_CREATE_FILE_TIMEOUT = 10
HAMMERDB_DB_CONFIG_TIMEOUT = 200
HAMMERDB_SCALE_TO_STREAMS = {
'1': 2,
'10': 3,
'30': 4,
'100': 5,
'300': 6,
'1000': 7,
'3000': 8,
'10000': 9,
'30000': 10,
'100000': 11
}
def _GetDataContents(file_name):
"""Cet the files in the data folder."""
path = data.ResourcePath('hammerdb/' + file_name)
with open(path) as fp:
contents = fp.read()
return contents
def Install(vm):
"""Installs the HammerDB package on the VM."""
zip_path = ntpath.join(vm.temp_dir, HAMMERDB_DIR)
vm.DownloadFile(HAMMERDB_URL, zip_path)
vm.UnzipFile(zip_path, vm.temp_dir)
def _CreateSingleScript(vm, contents, filename):
"""Create a single file named as <filename> with <contents> as contents."""
hammerdb_exe_dir = ntpath.join(vm.temp_dir, 'HammerDB-3.1')
command = ('cd {hammerdb_exe_dir}; echo \"{contents}\" > .\\tmp.txt; '
'cat tmp.txt | Out-File -FilePath {filename}'
' -Encoding ascii').format(
hammerdb_exe_dir=hammerdb_exe_dir,
contents=contents,
filename=filename)
vm.RemoteCommand(command, timeout=HAMMERDB_CREATE_FILE_TIMEOUT)
HAMMERDB_SCHEMA_FILE = 'schema.tcl'
HAMMERDB_SQLRUN_FILE = 'sqlrun.tcl'
HAMMERDB_SCHEMA_FILE_TPCH = 'schema-tpch.tcl'
HAMMERDB_SQLRUN_FILE_TPCH_POWER = 'sqlrun-tpch-power.tcl'
HAMMERDB_SQLRUN_FILE_TPCH_THROUGHPUT = 'sqlrun-tpch-throughput.tcl'
HAMMERDB_CLI_FILE = 'hammerdbcli'
HAMMERDB_CLI_FILE_TPCH = 'hammerdbclitpch'
HAMMERDB_CLI_BAT_FILE = 'hammerdbcli.bat'
HAMMERDB_CLI_BAT_FILE_TPCH = 'hammerdbclitpch.bat'
def _CreateFiles(vm):
"""Create the file dynamically used by the hammerdb.
This function creates the following files:
- schema.tcl and schema-tpch.tcl, the files for hammerdb to
build the schema of tpcc and tpch benchmarks
- sqlrun.tcl, sqlrun-tpch-power.tcl and sqlrun-tpch-throughput.tcl, the
benchmark test script that does the actual measurements.
- hammerdbcli, hammerdbclitpch, hammerdbcli.bat and hammerdbclitpch.bat, the
cli tool and batch file for starting the hammerdb and run the different
scripts of each benchmarking stage.
"""
# create the content for the schema building file of tpcc
schema = _GetDataContents('hammerdb_schema_tpcc.txt').replace(
'*ware_house_num*', str(FLAGS.hammerdb_tpcc_warehouse)).replace(
'*virtual_user_num*', str(FLAGS.hammerdb_tpcc_schema_virtual_user))
# create the content for the tpcc benchmark run time file.
virtual_user_seq = ''
for virtual_user_num in FLAGS.hammerdb_tpcc_virtual_user_list:
virtual_user_seq += str(virtual_user_num)
virtual_user_seq += ' '
sqlrun = _GetDataContents('hammerdb_run_tpcc.txt').replace(
'*virtual_user_seq*', virtual_user_seq).replace(
'*timer*', str(FLAGS.hammerdb_tpcc_runtime + 60)).replace(
'*duration*', str(FLAGS.hammerdb_tpcc_runtime / 60))
whether_run_tpcc = 'true' if FLAGS.hammerdb_run_tpcc else 'false'
whether_run_tpch = 'true' if FLAGS.hammerdb_run_tpch else 'false'
# create the content for the tpcc cli tool for tun time.
cli = _GetDataContents('hammerdb_cli_tpcc.txt').replace(
'*schema_file_name*', HAMMERDB_SCHEMA_FILE).replace(
'*sqlrun_file_name*', HAMMERDB_SQLRUN_FILE).replace(
'*whether_run_tpcc*', whether_run_tpcc)
# create the content for the tpch cli tool for tun time.
cli_tpch = _GetDataContents('hammerdb_cli_tpch.txt').replace(
'*whether_run_tpch*', whether_run_tpch)
cli_tpch = cli_tpch.replace('*schema_file_name_tpch*',
HAMMERDB_SCHEMA_FILE_TPCH)
cli_tpch = cli_tpch.replace('*sqlrun_power_file_name*',
HAMMERDB_SQLRUN_FILE_TPCH_POWER)
cli_tpch = cli_tpch.replace('*sqlrun_throughput_file_name*',
HAMMERDB_SQLRUN_FILE_TPCH_THROUGHPUT)
cli_bat = _GetDataContents('hammerdb_cli_bat_tpcc.txt')
cli_bat_tpch = _GetDataContents('hammerdb_cli_bat_tpch.txt')
schema_tpch = _GetDataContents('hammerdb_schema_tpch.txt')
sqlrun_tpch_power = _GetDataContents('hammerdb_run_tpch.txt').replace(
'*virtual_user*', str(1)).replace(
'*test_sequence_complete_sentence*', '\"TPCH POWER COMPLETE\"')
sqlrun_tpch_throughput = _GetDataContents('hammerdb_run_tpch.txt').replace(
'*virtual_user*', str(FLAGS.hammerdb_tpch_virtual_user)).replace(
'*test_sequence_complete_sentence*', '\"TPCH THROUGHPUT COMPLETE\"')
schema = schema.replace('\"', '`\"')
sqlrun = sqlrun.replace('\"', '`\"')
schema_tpch = schema_tpch.replace('\"', '`\"')
sqlrun_tpch_power = sqlrun_tpch_power.replace('\"', '`\"')
sqlrun_tpch_throughput = sqlrun_tpch_throughput.replace('\"', '`\"')
cli = cli.replace('\"', '`\"')
cli_tpch = cli_tpch.replace('\"', '`\"')
cli_bat = cli_bat.replace('\"', '`\"')
cli_bat_tpch = cli_bat_tpch.replace('\"', '`\"')
# create the necessary files of running hammerdb
_CreateSingleScript(vm, schema, HAMMERDB_SCHEMA_FILE)
_CreateSingleScript(vm, sqlrun, HAMMERDB_SQLRUN_FILE)
_CreateSingleScript(vm, cli, HAMMERDB_CLI_FILE)
_CreateSingleScript(vm, cli_bat, HAMMERDB_CLI_BAT_FILE)
_CreateSingleScript(vm, cli_tpch, HAMMERDB_CLI_FILE_TPCH)
_CreateSingleScript(vm, cli_bat_tpch, HAMMERDB_CLI_BAT_FILE_TPCH)
_CreateSingleScript(vm, schema_tpch, HAMMERDB_SCHEMA_FILE_TPCH)
_CreateSingleScript(vm, sqlrun_tpch_power, HAMMERDB_SQLRUN_FILE_TPCH_POWER)
_CreateSingleScript(vm, sqlrun_tpch_throughput,
HAMMERDB_SQLRUN_FILE_TPCH_THROUGHPUT)
def _CatFile(vm, filename):
"""Cat out the content of a file."""
hammerdb_exe_dir = ntpath.join(vm.temp_dir, 'HammerDB-3.1')
command = 'cd {hammerdb_exe_dir}; cat {filename}'.format(
hammerdb_exe_dir=hammerdb_exe_dir, filename=filename)
cat_output, _ = vm.RemoteCommand(command)
return cat_output
def _RunHammerDbTPCC(vm):
"""Run the tpcc benchmark by starting the batch script."""
hammerdb_exe_dir = ntpath.join(vm.temp_dir, 'HammerDB-3.1')
command = 'cd {hammerdb_exe_dir}; .\\hammerdbcli.bat'.format(
hammerdb_exe_dir=hammerdb_exe_dir)
total_time_out = ((HAMMERDB_SCHEMA_WAITTIME + HAMMERDB_SQLRUN_WAITIME_ADDON +
FLAGS.hammerdb_tpcc_runtime) *
HAMMERDB_TEST_TIMEOUT_MULTIPLIER)
vm.RemoteCommand(command, timeout=total_time_out)
def _RunHammerDbTPCH(vm):
"""Run the tpch benchmark by starting the batch script."""
hammerdb_exe_dir = ntpath.join(vm.temp_dir, 'HammerDB-3.1')
command = 'cd {hammerdb_exe_dir}; .\\hammerdbclitpch.bat'.format(
hammerdb_exe_dir=hammerdb_exe_dir)
total_time_out = ((HAMMERDB_SCHEMA_WAITTIME + HAMMERDB_SQLRUN_WAITIME_ADDON +
FLAGS.hammerdb_tpcc_runtime) *
HAMMERDB_TEST_TIMEOUT_MULTIPLIER)
vm.RemoteCommand(command, timeout=total_time_out)
@vm_util.Retry(max_retries=HAMMERDB_RETRIES)
def RunHammerDB(vm):
"""Run HammerDB and return the samples collected from the run."""
_CreateFiles(vm)
if FLAGS.hammerdb_run_tpcc:
_RunHammerDbTPCC(vm)
if FLAGS.hammerdb_run_tpch:
_RunHammerDbTPCH(vm)
hammer_result = _CatFile(vm, 'C://hammerdb.log')
metadata = {}
for k, v in six.iteritems(vm.GetResourceMetadata()):
metadata[k] = v
metadata['hammerdb_tpcc_warehouse'] = FLAGS.hammerdb_tpcc_warehouse
metadata['hammerdb_tpcc_runtime'] = FLAGS.hammerdb_tpcc_runtime
metadata['hammerdb_run_tpcc'] = FLAGS.hammerdb_run_tpcc
metadata['hammerdb_run_tpch'] = FLAGS.hammerdb_run_tpch
return _ParseHammerDBResults(hammer_result, metadata,
FLAGS.hammerdb_tpcc_virtual_user_list)
def _ParseHammerDBResults(result, metadata, virtual_user_list):
samples = []
if FLAGS.hammerdb_run_tpcc:
samples.extend(ParseHammerDBResultTPCC(result, metadata, virtual_user_list))
if FLAGS.hammerdb_run_tpch:
samples.extend(ParseHammerDBResultTPCH(result, metadata,
FLAGS.hammerdb_tpch_scale_fact))
return samples
def ParseHammerDBResultTPCC(result, metadata, virtual_user_list):
"""Parses the text log file from TPCC benchmark and returns a list of samples.
each list of sample only have one sample with read speed as value
all the other information is stored in the meta data
Args:
result: HammerDB output
metadata: the running info of vm
virtual_user_list: the list of virtual user number
Returns:
list of samples from the results of the HammerDB tests.
"""
samples = []
result_prefix = 'TEST RESULT : System achieved '
result_suffix = ' SQL Server TPM at'
start_list = [m.start() for m in re.finditer(result_prefix, result)]
end_list = [m.start() for m in re.finditer(result_suffix, result)]
for i, virtual_user_num in enumerate(virtual_user_list):
metadata['hammerdb_tpcc_virtual_user'] = virtual_user_num
start_pos = start_list[i] + len(result_prefix)
end_pos = end_list[i]
result_tpm = int(result[start_pos: end_pos])
samples.append(
sample.Sample('TPM', result_tpm, 'times/minutes',
metadata.copy()))
return samples
def ParseHammerDBResultTPCH(result, metadata, scale_fact):
"""Parses the text log file from TPCH benchmark and returns a list of samples.
each list of sample only have one sample with read speed as value
all the other information is stored in the meta data, this uses the equation:
https://www.hammerdb.com/docs/ch09s02.html
Args:
result: HammerDB output
metadata: the running info of vm
scale_fact: the scale factor of running tpch
Returns:
list of samples from the results of the HammerDB tests.
"""
samples = []
query_time_list = []
refresh_time_list = []
for i in range(22):
result_prefix = 'query {0} completed in '.format(str(i + 1))
result_suffix = ' seconds'
start_pos = result.find(result_prefix) + len(result_prefix)
end_pos = result.find(result_suffix, start_pos)
query_time_list.append(float(result[start_pos: end_pos]))
result_prefix = 'New Sales refresh complete in '
result_suffix = ' seconds'
start_pos = result.find(result_prefix) + len(result_prefix)
end_pos = result.find(result_suffix, start_pos)
refresh_time_list.append(float(result[start_pos: end_pos]))
result_prefix = 'Old Sales refresh complete in '
result_suffix = ' seconds'
start_pos = result.find(result_prefix) + len(result_prefix)
end_pos = result.find(result_suffix, start_pos)
refresh_time_list.append(float(result[start_pos: end_pos]))
result_prefix = ' query set.s. in '
result_suffix = ' seconds'
throughput_time = 0
start_list = [m.start() for m in re.finditer(result_prefix, result)]
for index in start_list[1:]:
start_pos = index + len(result_prefix)
end_pos = result.find(result_suffix, start_pos)
throughput_time = max(throughput_time, int(result[start_pos: end_pos]))
tpch_power = _CalculateTPCHPower(query_time_list, refresh_time_list,
scale_fact)
stream_num = HAMMERDB_SCALE_TO_STREAMS[str(scale_fact)]
tpch_throughput = stream_num * 22.0 * 3600 * scale_fact / throughput_time
qphh = np.sqrt(tpch_power * tpch_throughput)
samples.append(
sample.Sample('qphh', qphh, 'N/A',
metadata.copy()))
return samples
def _CalculateTPCHPower(query_time_list, refresh_time_list, scale_fact):
"""helper function for calculating tpch power test result.
This uses the equation given by:
https://www.hammerdb.com/docs/ch09s02.html
"""
maxi = np.amax(query_time_list)
mini = np.amin(query_time_list)
if mini < maxi / 1000:
query_time_list = [maxi / 1000 for x in query_time_list if x < maxi / 1000]
query_time_sum = np.sum([np.log(x) for x in query_time_list])
refresh_time_sum = np.sum([np.log(x) for x in refresh_time_list])
norm_factor = -1 / float((len(query_time_list) + len(refresh_time_list)))
return 3600 * np.exp(norm_factor * (query_time_sum + refresh_time_sum)) * \
scale_fact
|
from django.db.models import Count, Sum
from django.utils.translation import gettext_lazy as _
from django.views.generic import TemplateView
from weblate.accounts.models import Profile
from weblate.checks.models import Check
from weblate.trans.models import Component, Project
from weblate.utils.requirements import get_versions_list
from weblate.utils.stats import GlobalStats
from weblate.vcs.gpg import get_gpg_public_key, get_gpg_sign_key
from weblate.vcs.ssh import get_key_data
MENU = (
("index", "about", _("About Weblate")),
("stats", "stats", _("Statistics")),
("keys", "keys", _("Keys")),
)
class AboutView(TemplateView):
page = "index"
def page_context(self, context):
context.update(
{
"title": _("About Weblate"),
"versions": get_versions_list(),
"allow_index": True,
}
)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["menu_items"] = MENU
context["menu_page"] = self.page
self.page_context(context)
return context
def get_template_names(self):
return [f"about/{self.page}.html"]
class StatsView(AboutView):
page = "stats"
def page_context(self, context):
context["title"] = _("Weblate statistics")
stats = GlobalStats()
totals = Profile.objects.aggregate(
Sum("translated"), Sum("suggested"), Count("id")
)
context["total_translations"] = totals["translated__sum"]
context["total_suggestions"] = totals["suggested__sum"]
context["total_users"] = totals["id__count"]
context["stats"] = stats
context["total_checks"] = Check.objects.count()
context["total_projects"] = Project.objects.count()
context["total_components"] = Component.objects.count()
context["dismissed_checks"] = Check.objects.filter(dismissed=True).count()
top_translations = Profile.objects.order_by("-translated")[:10]
top_suggestions = Profile.objects.order_by("-suggested")[:10]
top_uploads = Profile.objects.order_by("-uploaded")[:10]
top_comments = Profile.objects.order_by("-commented")[:10]
context["top_translations"] = top_translations.select_related("user")
context["top_suggestions"] = top_suggestions.select_related("user")
context["top_uploads"] = top_uploads.select_related("user")
context["top_comments"] = top_comments.select_related("user")
class KeysView(AboutView):
page = "keys"
def page_context(self, context):
context.update(
{
"title": _("Weblate keys"),
"gpg_key_id": get_gpg_sign_key(),
"gpg_key": get_gpg_public_key(),
"ssh_key": get_key_data(),
"allow_index": True,
}
)
|
from flexx import flx
class ThemedForm(flx.Widget):
CSS = """
.flx-Button {
background: #9d9;
}
.flx-LineEdit {
border: 2px solid #9d9;
}
"""
def init(self):
with flx.HFix():
with flx.FormLayout() as self.form:
self.b1 = flx.LineEdit(title='Name:', text='Hola')
self.b2 = flx.LineEdit(title='Age:', text='Hello world')
self.b3 = flx.LineEdit(title='Favorite color:', text='Foo bar')
flx.Button(text='Submit')
with flx.FormLayout() as self.form:
self.b4 = flx.LineEdit(title='Name:', text='Hola')
self.b5 = flx.LineEdit(title='Age:', text='Hello world')
self.b6 = flx.LineEdit(title='Favorite color:', text='Foo bar')
flx.Button(text='Submit')
flx.Widget(flex=1) # Add a spacer
if __name__ == '__main__':
m = flx.launch(ThemedForm, 'app')
flx.run()
|
from pscript import window
from flexx import flx
SPLINES = ['linear', 'basis', 'cardinal', 'catmullrom', 'lagrange', 'lanczos']
GENERAL_TEXT = """
The splines in this exampe are used to interpolate a line between
control points. The the range of influence is shown when a control point
is clicked. Move the control points by dragging them. Points can be
added and deleted by holding shift and clicking.
"""
LINEAR_TEXT = """
This is not really a spline, but its included for reference. Linear
interpolation is C0 continuous, and relatively easy to implement.
"""
BASIS_TEXT = """
A B-spline is a C2 continuous non-interpolating spline, used extensively
in (3D) modeling.
"""
CARDINAL_TEXT = """
A Cardinal spline is a specific type of cubic Hermite spline, and is
C1 continous. Its tension parameter makes it very versatile.
"""
CATMULLROM_TEXT = """
The Catmull–Rom spline is a Cardinal spline with a tension of 0. It is
commonly used in computer graphics to interpolate motion between key frames.
"""
LAGRANGE_TEXT = """
The Lagrange polynomials result in (C0 continous) interpolation
equivalent to Newton a polynomial. It is, however, know to suffer from
Runge's phenomenon (oscilating).
"""
LANCZOS_TEXT = """
Lanczos interpolation (C1 continous) is based on a windowed sinc
function and is usually considered to produced the best result from the
perspective of the fourier domain. It's mainly used in applications
related audio.
"""
class SplineWidget(flx.CanvasWidget):
spline_type = flx.EnumProp(SPLINES, 'cardinal', settable=True, doc="""
"The type of spline
""")
closed = flx.BoolProp(False, settable=True, doc="""
Whether the spline is closed
""")
tension = flx.FloatProp(0.5, settable=True, doc="""
The tension parameter for the Cardinal spline.
""")
_current_node = flx.Property(None, settable=True)
def init(self):
self.ctx = self.node.getContext('2d')
self.xx = [0.90, 0.80, 0.70, 0.60, 0.50, 0.40, 0.10, 0.23, 0.61, 0.88]
self.yy = [0.90, 0.60, 0.90, 0.60, 0.90, 0.70, 0.55, 0.19, 0.11, 0.38]
def factors_linear(self, t):
return [0, t, (1-t), 0]
def factors_basis(self, t):
f0 = (1 - t)**3 / 6.0
f1 = (3 * t**3 - 6 * t**2 + 4) / 6.0
f2 = (-3 * t**3 + 3 * t**2 + 3 * t + 1) / 6.0
f3 = t**3 / 6.0
return f0, f1, f2, f3
def factors_cardinal(self, t):
tension = self.tension
tau = 0.5 * (1 - tension)
f0 = - tau * (t**3 - 2 * t**2 + t)
f3 = + tau * (t**3 - 1 * t**2)
f1 = 2 * t**3 - 3 * t**2 + 1 - f3
f2 = - 2 * t**3 + 3 * t**2 - f0
return f0, f1, f2, f3
def factors_catmullrom(self, t):
f0 = - 0.5 * t**3 + 1.0 * t**2 - 0.5 * t
f1 = + 1.5 * t**3 - 2.5 * t**2 + 1
f2 = - 1.5 * t**3 + 2.0 * t**2 + 0.5 * t
f3 = + 0.5 * t**3 - 0.5 * t**2
return f0, f1, f2, f3
def factors_lagrange(self, t):
k = -1.0
f0 = t / k * (t-1) / (k-1) * (t-2) / (k-2)
k = 0
f1 = (t+1) / (k+1) * (t-1) / (k-1) * (t-2) / (k-2)
k= 1
f2 = (t+1) / (k+1) * t / k * (t-2) / (k-2)
k = 2
f3 = (t + 1) / (k+1) * t / k * (t-1) / (k-1)
return f0, f1, f2, f3
def factors_lanczos(self, t):
sin = window.Math.sin
pi = window.Math.PI
tt = (1+t)
f0 = 2*sin(pi*tt)*sin(pi*tt/2) / (pi*pi*tt*tt)
tt = (2-t)
f3 = 2*sin(pi*tt)*sin(pi*tt/2) / (pi*pi*tt*tt)
if t != 0:
tt = t
f1 = 2*sin(pi*tt)*sin(pi*tt/2) / (pi*pi*tt*tt)
else:
f1 =1
if t != 1:
tt = (1-t)
f2 = 2*sin(pi*tt)*sin(pi*tt/2) / (pi*pi*tt*tt)
else:
f2 = 1
return f0, f1, f2, f3
@flx.reaction('pointer_down')
def _on_pointer_down(self, *events):
for ev in events:
w, h = self.size
# Get closest point
closest, dist = -1, 999999
for i in range(len(self.xx)):
x, y = self.xx[i] * w, self.yy[i] * h
d = ((x - ev.pos[0]) ** 2 + (y - ev.pos[1]) ** 2) ** 0.5
if d < dist:
closest, dist = i, d
# Did we touch it or not
if dist < 9:
i = closest
if 'Shift' in ev.modifiers: # Remove point
self.xx.pop(i)
self.yy.pop(i)
self._set_current_node(None)
self.update()
else:
self._set_current_node(i)
else:
if 'Shift' in ev.modifiers:
# Add point
if not self.xx:
i = 0 # There were no points
else:
# Add in between two points. Compose the vectors
# from closest points to neightbour points and to the
# cicked point. Check with which vector the latter vector
# aligns the best by calculating their angles.
#
# Get the three points
p0 = self.xx[closest+0] * w, self.yy[closest+0] * h
if closest == 0:
p2 = self.xx[closest+1] * w, self.yy[closest+1] * h
p1 = p0[0] - (p2[0] - p0[0]), p0[1] - (p2[1] - p0[1])
elif closest == len(self.xx) - 1:
p1 = self.xx[closest-1] * w, self.yy[closest-1] * h
p2 = p0[0] - (p1[0] - p0[0]), p0[1] - (p1[1] - p0[1])
else:
p1 = self.xx[closest-1] * w, self.yy[closest-1] * h
p2 = self.xx[closest+1] * w, self.yy[closest+1] * h
# Calculate vectors, and normalize
v1 = p1[0] - p0[0], p1[1] - p0[1]
v2 = p2[0] - p0[0], p2[1] - p0[1]
v3 = ev.pos[0] - p0[0], ev.pos[1] - p0[1]
m1 = (v1[0]**2 + v1[1]**2)**0.5
m2 = (v2[0]**2 + v2[1]**2)**0.5
m3 = (v3[0]**2 + v3[1]**2)**0.5
v1 = v1[0] / m1, v1[1] / m1
v2 = v2[0] / m2, v2[1] / m2
v3 = v3[0] / m3, v3[1] / m3
# Calculate angle
a1 = window.Math.acos(v1[0] * v3[0] + v1[1] * v3[1])
a2 = window.Math.acos(v2[0] * v3[0] + v2[1] * v3[1])
i = closest if a1 < a2 else closest + 1
self.xx.insert(i, ev.pos[0] / w)
self.yy.insert(i, ev.pos[1] / h)
self._set_current_node(i)
@flx.reaction('pointer_up')
def _on_pointer_up(self, *events):
self._set_current_node(None)
@flx.reaction('pointer_move')
def _on_pointer_move(self, *events):
ev = events[-1]
if self._current_node is not None:
i = self._current_node
w, h = self.size
self.xx[i] = ev.pos[0] / w
self.yy[i] = ev.pos[1] / h
self.update()
@flx.reaction('size', 'spline_type', 'tension', 'closed', '_current_node')
def update(self, *events):
# Init
ctx = self.ctx
w, h = self.size
ctx.clearRect(0, 0, w, h)
# Get coordinates
xx = [x * w for x in self.xx]
yy = [y * h for y in self.yy]
#
if self.closed:
xx = xx[-1:] + xx + xx[:2]
yy = yy[-1:] + yy + yy[:2]
else:
xx = [xx[0] - (xx[1] - xx[0])] + xx + [xx[-1] - (xx[-2] - xx[-1])]
yy = [yy[0] - (yy[1] - yy[0])] + yy + [yy[-1] - (yy[-2] - yy[-1])]
# Draw grid
ctx.strokeStyle = '#eee'
ctx.lineWidth = 1
for y in range(0, h, 20):
ctx.beginPath()
ctx.moveTo(0, y)
ctx.lineTo(w, y)
ctx.stroke()
for x in range(0, w, 20):
ctx.beginPath()
ctx.moveTo(x, 0)
ctx.lineTo(x, h)
ctx.stroke()
# Draw nodes
ctx.fillStyle = '#acf'
ctx.strokeStyle = '#000'
ctx.lineWidth = 2
for i in range(1, len(xx)-1):
ctx.beginPath()
ctx.arc(xx[i], yy[i], 9, 0, 6.2831)
ctx.fill()
ctx.stroke()
# Select interpolation function
fun = self['factors_' + self.spline_type.lower()]
if not fun:
fun = lambda : (0, 1, 0, 0)
# Draw lines
for i in range(1, len(xx)-2):
ctx.lineCap = "round"
ctx.lineWidth = 3
ctx.strokeStyle = '#008'
support = 1 if self.spline_type == 'LINEAR' else 2
if self._current_node is not None:
if i - (support + 1) < self._current_node < i + support:
ctx.strokeStyle = '#08F'
ctx.lineWidth = 5
# Get coordinates of the four points
x0, y0 = xx[i-1], yy[i-1]
x1, y1 = xx[i+0], yy[i+0]
x2, y2 = xx[i+1], yy[i+1]
x3, y3 = xx[i+2], yy[i+2]
# Interpolate
ctx.beginPath()
# lineto = ctx.moveTo.bind(ctx)
lineto = ctx.lineTo.bind(ctx)
n = 30
for t in [i/n for i in range(n+1)]:
f0, f1, f2, f3 = fun(t)
x = x0 * f0 + x1 * f1 + x2 * f2 + x3 * f3
y = y0 * f0 + y1 * f1 + y2 * f2 + y3 * f3
lineto(x, y)
lineto = ctx.lineTo.bind(ctx)
ctx.stroke()
class Splines(flx.Widget):
def init(self):
with flx.HBox():
with flx.VBox(flex=0, minsize=150):
self.b1 = flx.RadioButton(text='Linear')
self.b2 = flx.RadioButton(text='Basis')
self.b3 = flx.RadioButton(text='Cardinal', checked=True)
self.b4 = flx.RadioButton(text='Catmull Rom')
self.b5 = flx.RadioButton(text='Lagrange')
self.b6 = flx.RadioButton(text='Lanczos')
flx.Widget(minsize=10)
closed = flx.CheckBox(text='Closed')
flx.Widget(minsize=10)
self.tension = flx.Slider(min=-0.5, max=1, value=0.5,
text=lambda: 'Tension: {value}')
flx.Widget(flex=1)
with flx.VBox(flex=1):
flx.Label(text=GENERAL_TEXT, wrap=True, style='font-size: 12px;')
self.explanation = flx.Label(text=CARDINAL_TEXT, wrap=True,
style='font-size: 12px;')
self.spline = SplineWidget(flex=1,
closed=lambda: closed.checked,
tension=lambda: self.tension.value)
LINEAR_TEXT = LINEAR_TEXT
BASIS_TEXT = BASIS_TEXT
CARDINAL_TEXT = CARDINAL_TEXT
CATMULLROM_TEXT = CATMULLROM_TEXT
LAGRANGE_TEXT = LAGRANGE_TEXT
LANCZOS_TEXT = LANCZOS_TEXT
@flx.reaction('b1.checked', 'b2.checked', 'b3.checked', 'b4.checked',
'b5.checked', 'b6.checked')
def _set_spline_type(self, *events):
ev = events[-1]
if not ev.new_value:
return # init event
type = ev.source.text.replace(' ', '')
self.spline.set_spline_type(type)
self.explanation.set_text(self[type.upper() + '_TEXT'])
@flx.reaction
def __show_hide_tension_slider(self):
if self.spline.spline_type == 'CARDINAL':
self.tension.apply_style('visibility: visible')
else:
self.tension.apply_style('visibility: hidden')
if __name__ == '__main__':
a = flx.App(Splines)
a.launch('firefox-browser')
flx.run()
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.media_player import DOMAIN
from homeassistant.const import (
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a media_player."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_idle",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_paused",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_playing",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions)
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("media_player.entity", STATE_ON)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "media_player.entity",
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "media_player.entity",
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event3"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "media_player.entity",
"type": "is_idle",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_idle - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event4"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "media_player.entity",
"type": "is_paused",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_paused - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event5"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "media_player.entity",
"type": "is_playing",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_playing - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
hass.bus.async_fire("test_event4")
hass.bus.async_fire("test_event5")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on - event - test_event1"
hass.states.async_set("media_player.entity", STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
hass.bus.async_fire("test_event4")
hass.bus.async_fire("test_event5")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off - event - test_event2"
hass.states.async_set("media_player.entity", STATE_IDLE)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
hass.bus.async_fire("test_event4")
hass.bus.async_fire("test_event5")
await hass.async_block_till_done()
assert len(calls) == 3
assert calls[2].data["some"] == "is_idle - event - test_event3"
hass.states.async_set("media_player.entity", STATE_PAUSED)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
hass.bus.async_fire("test_event4")
hass.bus.async_fire("test_event5")
await hass.async_block_till_done()
assert len(calls) == 4
assert calls[3].data["some"] == "is_paused - event - test_event4"
hass.states.async_set("media_player.entity", STATE_PLAYING)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
hass.bus.async_fire("test_event4")
hass.bus.async_fire("test_event5")
await hass.async_block_till_done()
assert len(calls) == 5
assert calls[4].data["some"] == "is_playing - event - test_event5"
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONCENTRATION_PARTS_PER_MILLION,
CONF_MONITORED_CONDITIONS,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from . import ATTRIBUTION, DATA_ARLO, DEFAULT_BRAND, SIGNAL_UPDATE_ARLO
_LOGGER = logging.getLogger(__name__)
# sensor_type [ description, unit, icon ]
SENSOR_TYPES = {
"last_capture": ["Last", None, "run-fast"],
"total_cameras": ["Arlo Cameras", None, "video"],
"captured_today": ["Captured Today", None, "file-video"],
"battery_level": ["Battery Level", PERCENTAGE, "battery-50"],
"signal_strength": ["Signal Strength", None, "signal"],
"temperature": ["Temperature", TEMP_CELSIUS, "thermometer"],
"humidity": ["Humidity", PERCENTAGE, "water-percent"],
"air_quality": ["Air Quality", CONCENTRATION_PARTS_PER_MILLION, "biohazard"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an Arlo IP sensor."""
arlo = hass.data.get(DATA_ARLO)
if not arlo:
return
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if sensor_type == "total_cameras":
sensors.append(ArloSensor(SENSOR_TYPES[sensor_type][0], arlo, sensor_type))
else:
for camera in arlo.cameras:
if sensor_type in ("temperature", "humidity", "air_quality"):
continue
name = f"{SENSOR_TYPES[sensor_type][0]} {camera.name}"
sensors.append(ArloSensor(name, camera, sensor_type))
for base_station in arlo.base_stations:
if (
sensor_type in ("temperature", "humidity", "air_quality")
and base_station.model_id == "ABC1000"
):
name = f"{SENSOR_TYPES[sensor_type][0]} {base_station.name}"
sensors.append(ArloSensor(name, base_station, sensor_type))
add_entities(sensors, True)
class ArloSensor(Entity):
"""An implementation of a Netgear Arlo IP sensor."""
def __init__(self, name, device, sensor_type):
"""Initialize an Arlo sensor."""
_LOGGER.debug("ArloSensor created for %s", name)
self._name = name
self._data = device
self._sensor_type = sensor_type
self._state = None
self._icon = f"mdi:{SENSOR_TYPES.get(self._sensor_type)[2]}"
@property
def name(self):
"""Return the name of this camera."""
return self._name
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_ARLO, self._update_callback
)
)
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == "battery_level" and self._state is not None:
return icon_for_battery_level(
battery_level=int(self._state), charging=False
)
return self._icon
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return SENSOR_TYPES.get(self._sensor_type)[1]
@property
def device_class(self):
"""Return the device class of the sensor."""
if self._sensor_type == "temperature":
return DEVICE_CLASS_TEMPERATURE
if self._sensor_type == "humidity":
return DEVICE_CLASS_HUMIDITY
return None
def update(self):
"""Get the latest data and updates the state."""
_LOGGER.debug("Updating Arlo sensor %s", self.name)
if self._sensor_type == "total_cameras":
self._state = len(self._data.cameras)
elif self._sensor_type == "captured_today":
self._state = len(self._data.captured_today)
elif self._sensor_type == "last_capture":
try:
video = self._data.last_video
self._state = video.created_at_pretty("%m-%d-%Y %H:%M:%S")
except (AttributeError, IndexError):
error_msg = (
f"Video not found for {self.name}. "
f"Older than {self._data.min_days_vdo_cache} days?"
)
_LOGGER.debug(error_msg)
self._state = None
elif self._sensor_type == "battery_level":
try:
self._state = self._data.battery_level
except TypeError:
self._state = None
elif self._sensor_type == "signal_strength":
try:
self._state = self._data.signal_strength
except TypeError:
self._state = None
elif self._sensor_type == "temperature":
try:
self._state = self._data.ambient_temperature
except TypeError:
self._state = None
elif self._sensor_type == "humidity":
try:
self._state = self._data.ambient_humidity
except TypeError:
self._state = None
elif self._sensor_type == "air_quality":
try:
self._state = self._data.ambient_air_quality
except TypeError:
self._state = None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attrs = {}
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
attrs["brand"] = DEFAULT_BRAND
if self._sensor_type != "total_cameras":
attrs["model"] = self._data.model_id
return attrs
|
import astroid
from pylint import interfaces, checkers
from pylint.checkers import utils
class OpenEncodingChecker(checkers.BaseChecker):
"""Checker to check open() has an encoding set."""
__implements__ = interfaces.IAstroidChecker
name = 'open-encoding'
msgs = {
'W9400': ('open() called without encoding', 'open-without-encoding',
None),
}
@utils.check_messages('open-without-encoding')
def visit_call(self, node):
"""Visit a Call node."""
if hasattr(node, 'func'):
infer = utils.safe_infer(node.func)
if infer and infer.root().name == '_io':
if getattr(node.func, 'name', None) in ['open', 'file']:
self._check_open_encoding(node)
def _check_open_encoding(self, node):
"""Check that an open() call always has an encoding set."""
try:
mode_arg = utils.get_argument_from_call(node, position=1,
keyword='mode')
except utils.NoSuchArgumentError:
mode_arg = None
_encoding = None
try:
_encoding = utils.get_argument_from_call(node, position=2)
except utils.NoSuchArgumentError:
try:
_encoding = utils.get_argument_from_call(node,
keyword='encoding')
except utils.NoSuchArgumentError:
pass
if _encoding is None:
if mode_arg is None:
mode = None
else:
mode = utils.safe_infer(mode_arg)
if mode is not None and not isinstance(mode, astroid.Const):
# We can't say what mode is exactly.
return
if mode is None:
self.add_message('open-without-encoding', node=node)
elif 'b' in getattr(mode, 'value', ''):
# Files opened as binary don't need an encoding.
return
else:
self.add_message('open-without-encoding', node=node)
def register(linter):
"""Register this checker."""
linter.register_checker(OpenEncodingChecker(linter))
|
from pcal9535a import PCAL9535A
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
CONF_INVERT_LOGIC = "invert_logic"
CONF_I2C_ADDRESS = "i2c_address"
CONF_I2C_BUS = "i2c_bus"
CONF_PINS = "pins"
CONF_PULL_MODE = "pull_mode"
MODE_UP = "UP"
MODE_DOWN = "DOWN"
MODE_DISABLED = "DISABLED"
DEFAULT_INVERT_LOGIC = False
DEFAULT_I2C_ADDRESS = 0x20
DEFAULT_I2C_BUS = 1
DEFAULT_PULL_MODE = MODE_DISABLED
_SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PINS): _SENSORS_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.All(
vol.Upper, vol.In([MODE_UP, MODE_DOWN, MODE_DISABLED])
),
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int),
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PCAL9535A binary sensors."""
pull_mode = config[CONF_PULL_MODE]
invert_logic = config[CONF_INVERT_LOGIC]
i2c_address = config[CONF_I2C_ADDRESS]
bus = config[CONF_I2C_BUS]
pcal = PCAL9535A(bus, i2c_address)
binary_sensors = []
pins = config[CONF_PINS]
for pin_num, pin_name in pins.items():
pin = pcal.get_pin(pin_num // 8, pin_num % 8)
binary_sensors.append(
PCAL9535ABinarySensor(pin_name, pin, pull_mode, invert_logic)
)
add_entities(binary_sensors, True)
class PCAL9535ABinarySensor(BinarySensorEntity):
"""Represent a binary sensor that uses PCAL9535A."""
def __init__(self, name, pin, pull_mode, invert_logic):
"""Initialize the PCAL9535A binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._pin = pin
self._pin.input = True
self._pin.inverted = invert_logic
if pull_mode == "DISABLED":
self._pin.pullup = 0
elif pull_mode == "DOWN":
self._pin.pullup = -1
else:
self._pin.pullup = 1
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the cached state of the entity."""
return self._state
def update(self):
"""Update the GPIO state."""
self._state = self._pin.level
|
import json
import urllib.request
import urllib.error
from http import HTTPStatus
import pytest
@pytest.mark.parametrize('path, content, expected', [
('/', 'qutebrowser test webserver', True),
# https://github.com/Runscope/server/issues/245
('/', 'www.google-analytics.com', False),
('/data/hello.txt', 'Hello World!', True),
])
def test_server(server, qtbot, path, content, expected):
with qtbot.waitSignal(server.new_request, timeout=100):
url = 'http://localhost:{}{}'.format(server.port, path)
try:
response = urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
# "Though being an exception (a subclass of URLError), an HTTPError
# can also function as a non-exceptional file-like return value
# (the same thing that urlopen() returns)."
# ...wat
print(e.read().decode('utf-8'))
raise
data = response.read().decode('utf-8')
assert server.get_requests() == [server.ExpectedRequest('GET', path)]
assert (content in data) == expected
@pytest.mark.parametrize('line, verb, path, equal', [
({'verb': 'GET', 'path': '/', 'status': HTTPStatus.OK}, 'GET', '/', True),
({'verb': 'GET', 'path': '/foo/', 'status': HTTPStatus.OK},
'GET', '/foo', True),
({'verb': 'GET', 'path': '/relative-redirect', 'status': HTTPStatus.FOUND},
'GET', '/relative-redirect', True),
({'verb': 'GET', 'path': '/absolute-redirect', 'status': HTTPStatus.FOUND},
'GET', '/absolute-redirect', True),
({'verb': 'GET', 'path': '/redirect-to', 'status': HTTPStatus.FOUND},
'GET', '/redirect-to', True),
({'verb': 'GET', 'path': '/redirect-self', 'status': HTTPStatus.FOUND},
'GET', '/redirect-self', True),
({'verb': 'GET', 'path': '/content-size', 'status': HTTPStatus.OK},
'GET', '/content-size', True),
({'verb': 'GET', 'path': '/twenty-mb', 'status': HTTPStatus.OK},
'GET', '/twenty-mb', True),
({'verb': 'GET', 'path': '/500-inline',
'status': HTTPStatus.INTERNAL_SERVER_ERROR}, 'GET', '/500-inline', True),
({'verb': 'GET', 'path': '/basic-auth/user1/password1',
'status': HTTPStatus.UNAUTHORIZED},
'GET', '/basic-auth/user1/password1', True),
({'verb': 'GET', 'path': '/drip', 'status': HTTPStatus.OK},
'GET', '/drip', True),
({'verb': 'GET', 'path': '/404', 'status': HTTPStatus.NOT_FOUND},
'GET', '/404', True),
({'verb': 'GET', 'path': '/', 'status': HTTPStatus.OK},
'GET', '/foo', False),
({'verb': 'POST', 'path': '/', 'status': HTTPStatus.OK},
'GET', '/', False),
({'verb': 'GET', 'path': '/basic-auth/user/password',
'status': HTTPStatus.UNAUTHORIZED},
'GET', '/basic-auth/user/passwd', False),
])
def test_expected_request(server, line, verb, path, equal):
expected = server.ExpectedRequest(verb, path)
request = server.Request(json.dumps(line))
assert (expected == request) == equal
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import context
from perfkitbenchmarker import pkb # pylint: disable=unused-import
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
_BENCHMARK_NAME = 'name'
_BENCHMARK_UID = 'uid'
class _DiskTypeRenamingTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(_DiskTypeRenamingTestCase, self).setUp()
get_tmp_dir_mock = mock.patch(
vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir')
get_tmp_dir_mock.start()
self.addCleanup(get_tmp_dir_mock.stop)
self.addCleanup(context.SetThreadBenchmarkSpec, None)
def _CreateBenchmarkSpec(self, config_dict):
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
_BENCHMARK_NAME, flag_values=FLAGS, **config_dict)
spec = benchmark_spec.BenchmarkSpec(mock.MagicMock(), config_spec,
_BENCHMARK_UID)
spec.ConstructVirtualMachines()
return spec
class GcpDiskTypeRenamingTest(_DiskTypeRenamingTestCase):
"""Test that the disk type renaming works for GCP.
"""
def testPDStandard(self):
config = {
'vm_groups': {
'vm_group_1': {
'cloud': 'GCP',
'vm_spec': {
'GCP': {
'machine_type': 'test_machine_type',
}
},
'disk_spec': {
'GCP': {
'disk_type': 'standard',
'disk_size': 2,
}
}
}
}
}
spec = self._CreateBenchmarkSpec(config)
self.assertEqual(spec.vms[0].disk_specs[0].disk_type, 'pd-standard')
def testPDSSD(self):
config = {
'vm_groups': {
'vm_group_1': {
'cloud': 'GCP',
'vm_spec': {
'GCP': {
'machine_type': 'test_machine_type',
}
},
'disk_spec': {
'GCP': {
'disk_type': 'remote_ssd',
'disk_size': 2,
}
}
}
}
}
spec = self._CreateBenchmarkSpec(config)
self.assertEqual(spec.vms[0].disk_specs[0].disk_type, 'pd-ssd')
class AwsDiskTypeRenamingTest(_DiskTypeRenamingTestCase):
def testEBSStandard(self):
config = {
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'test_machine_type',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2
}
}
}
}
}
spec = self._CreateBenchmarkSpec(config)
self.assertEqual(spec.vms[0].disk_specs[0].disk_type, 'standard')
def testEBSGP(self):
config = {
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'test_machine_type',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2
}
}
}
}
}
spec = self._CreateBenchmarkSpec(config)
self.assertEqual(spec.vms[0].disk_specs[0].disk_type, 'gp2')
def testEBSPIOPS(self):
config = {
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'test_machine_type',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'piops',
'disk_size': 2
}
}
}
}
}
spec = self._CreateBenchmarkSpec(config)
self.assertEqual(spec.vms[0].disk_specs[0].disk_type, 'io1')
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, unicode_literals
import atexit
import functools
import sys
import threading
import time
import cursor
from log_symbols.symbols import LogSymbols
from spinners.spinners import Spinners
from halo._utils import (colored_frame, decode_utf_8_text, get_environment,
get_terminal_columns, is_supported, is_text_type,
encode_utf_8_text)
class Halo(object):
"""Halo library.
Attributes
----------
CLEAR_LINE : str
Code to clear the line
"""
CLEAR_LINE = '\033[K'
SPINNER_PLACEMENTS = ('left', 'right',)
def __init__(self, text='', color='cyan', text_color=None, spinner=None,
animation=None, placement='left', interval=-1, enabled=True, stream=sys.stdout):
"""Constructs the Halo object.
Parameters
----------
text : str, optional
Text to display.
text_color : str, optional
Color of the text.
color : str, optional
Color of the text to display.
spinner : str|dict, optional
String or dictionary representing spinner. String can be one of 60+ spinners
supported.
animation: str, optional
Animation to apply if text is too large. Can be one of `bounce`, `marquee`.
Defaults to ellipses.
placement: str, optional
Side of the text to place the spinner on. Can be `left` or `right`.
Defaults to `left`.
interval : integer, optional
Interval between each frame of the spinner in milliseconds.
enabled : boolean, optional
Spinner enabled or not.
stream : io, optional
Output.
"""
self._color = color
self._animation = animation
self.spinner = spinner
self.text = text
self._text_color = text_color
self._interval = int(interval) if int(interval) > 0 else self._spinner['interval']
self._stream = stream
self.placement = placement
self._frame_index = 0
self._text_index = 0
self._spinner_thread = None
self._stop_spinner = None
self._spinner_id = None
self.enabled = enabled
environment = get_environment()
def clean_up():
"""Handle cell execution"""
self.stop()
if environment in ('ipython', 'jupyter'):
from IPython import get_ipython
ip = get_ipython()
ip.events.register('post_run_cell', clean_up)
else: # default terminal
atexit.register(clean_up)
def __enter__(self):
"""Starts the spinner on a separate thread. For use in context managers.
Returns
-------
self
"""
return self.start()
def __exit__(self, type, value, traceback):
"""Stops the spinner. For use in context managers."""
self.stop()
def __call__(self, f):
"""Allow the Halo object to be used as a regular function decorator."""
@functools.wraps(f)
def wrapped(*args, **kwargs):
with self:
return f(*args, **kwargs)
return wrapped
@property
def spinner(self):
"""Getter for spinner property.
Returns
-------
dict
spinner value
"""
return self._spinner
@spinner.setter
def spinner(self, spinner=None):
"""Setter for spinner property.
Parameters
----------
spinner : dict, str
Defines the spinner value with frame and interval
"""
self._spinner = self._get_spinner(spinner)
self._frame_index = 0
self._text_index = 0
@property
def text(self):
"""Getter for text property.
Returns
-------
str
text value
"""
return self._text['original']
@text.setter
def text(self, text):
"""Setter for text property.
Parameters
----------
text : str
Defines the text value for spinner
"""
self._text = self._get_text(text)
@property
def text_color(self):
"""Getter for text color property.
Returns
-------
str
text color value
"""
return self._text_color
@text_color.setter
def text_color(self, text_color):
"""Setter for text color property.
Parameters
----------
text_color : str
Defines the text color value for spinner
"""
self._text_color = text_color
@property
def color(self):
"""Getter for color property.
Returns
-------
str
color value
"""
return self._color
@color.setter
def color(self, color):
"""Setter for color property.
Parameters
----------
color : str
Defines the color value for spinner
"""
self._color = color
@property
def placement(self):
"""Getter for placement property.
Returns
-------
str
spinner placement
"""
return self._placement
@placement.setter
def placement(self, placement):
"""Setter for placement property.
Parameters
----------
placement: str
Defines the placement of the spinner
"""
if placement not in self.SPINNER_PLACEMENTS:
raise ValueError(
"Unknown spinner placement '{0}', available are {1}".format(placement, self.SPINNER_PLACEMENTS))
self._placement = placement
@property
def spinner_id(self):
"""Getter for spinner id
Returns
-------
str
Spinner id value
"""
return self._spinner_id
@property
def animation(self):
"""Getter for animation property.
Returns
-------
str
Spinner animation
"""
return self._animation
@animation.setter
def animation(self, animation):
"""Setter for animation property.
Parameters
----------
animation: str
Defines the animation of the spinner
"""
self._animation = animation
self._text = self._get_text(self._text['original'])
def _check_stream(self):
"""Returns whether the stream is open, and if applicable, writable
Returns
-------
bool
Whether the stream is open
"""
if self._stream.closed:
return False
try:
# Attribute access kept separate from invocation, to avoid
# swallowing AttributeErrors from the call which should bubble up.
check_stream_writable = self._stream.writable
except AttributeError:
pass
else:
return check_stream_writable()
return True
def _write(self, s):
"""Write to the stream, if writable
Parameters
----------
s : str
Characters to write to the stream
"""
if self._check_stream():
self._stream.write(s)
def _hide_cursor(self):
"""Disable the user's blinking cursor
"""
if self._check_stream() and self._stream.isatty():
cursor.hide(stream=self._stream)
def _show_cursor(self):
"""Re-enable the user's blinking cursor
"""
if self._check_stream() and self._stream.isatty():
cursor.show(stream=self._stream)
def _get_spinner(self, spinner):
"""Extracts spinner value from options and returns value
containing spinner frames and interval, defaults to 'dots' spinner.
Parameters
----------
spinner : dict, str
Contains spinner value or type of spinner to be used
Returns
-------
dict
Contains frames and interval defining spinner
"""
default_spinner = Spinners['dots'].value
if spinner and type(spinner) == dict:
return spinner
if is_supported():
if all([is_text_type(spinner), spinner in Spinners.__members__]):
return Spinners[spinner].value
else:
return default_spinner
else:
return Spinners['line'].value
def _get_text(self, text):
"""Creates frames based on the selected animation
Returns
-------
self
"""
animation = self._animation
stripped_text = text.strip()
# Check which frame of the animation is the widest
max_spinner_length = max([len(i) for i in self._spinner['frames']])
# Subtract to the current terminal size the max spinner length
# (-1 to leave room for the extra space between spinner and text)
terminal_width = get_terminal_columns() - max_spinner_length - 1
text_length = len(stripped_text)
frames = []
if terminal_width < text_length and animation:
if animation == 'bounce':
"""
Make the text bounce back and forth
"""
for x in range(0, text_length - terminal_width + 1):
frames.append(stripped_text[x:terminal_width + x])
frames.extend(list(reversed(frames)))
elif 'marquee':
"""
Make the text scroll like a marquee
"""
stripped_text = stripped_text + ' ' + stripped_text[:terminal_width]
for x in range(0, text_length + 1):
frames.append(stripped_text[x:terminal_width + x])
elif terminal_width < text_length and not animation:
# Add ellipsis if text is larger than terminal width and no animation was specified
frames = [stripped_text[:terminal_width - 6] + ' (...)']
else:
frames = [stripped_text]
return {
'original': text,
'frames': frames
}
def clear(self):
"""Clears the line and returns cursor to the start.
of line
Returns
-------
self
"""
self._write('\r')
self._write(self.CLEAR_LINE)
return self
def _render_frame(self):
"""Renders the frame on the line after clearing it.
"""
if not self.enabled:
# in case we're disabled or stream is closed while still rendering,
# we render the frame and increment the frame index, so the proper
# frame is rendered if we're reenabled or the stream opens again.
return
self.clear()
frame = self.frame()
output = '\r{}'.format(frame)
try:
self._write(output)
except UnicodeEncodeError:
self._write(encode_utf_8_text(output))
def render(self):
"""Runs the render until thread flag is set.
Returns
-------
self
"""
while not self._stop_spinner.is_set():
self._render_frame()
time.sleep(0.001 * self._interval)
return self
def frame(self):
"""Builds and returns the frame to be rendered
Returns
-------
self
"""
frames = self._spinner['frames']
frame = frames[self._frame_index]
if self._color:
frame = colored_frame(frame, self._color)
self._frame_index += 1
self._frame_index = self._frame_index % len(frames)
text_frame = self.text_frame()
return u'{0} {1}'.format(*[
(text_frame, frame)
if self._placement == 'right' else
(frame, text_frame)
][0])
def text_frame(self):
"""Builds and returns the text frame to be rendered
Returns
-------
self
"""
if len(self._text['frames']) == 1:
if self._text_color:
return colored_frame(self._text['frames'][0], self._text_color)
# Return first frame (can't return original text because at this point it might be ellipsed)
return self._text['frames'][0]
frames = self._text['frames']
frame = frames[self._text_index]
self._text_index += 1
self._text_index = self._text_index % len(frames)
if self._text_color:
return colored_frame(frame, self._text_color)
return frame
def start(self, text=None):
"""Starts the spinner on a separate thread.
Parameters
----------
text : None, optional
Text to be used alongside spinner
Returns
-------
self
"""
if text is not None:
self.text = text
if self._spinner_id is not None:
return self
if not (self.enabled and self._check_stream()):
return self
self._hide_cursor()
self._stop_spinner = threading.Event()
self._spinner_thread = threading.Thread(target=self.render)
self._spinner_thread.setDaemon(True)
self._render_frame()
self._spinner_id = self._spinner_thread.name
self._spinner_thread.start()
return self
def stop(self):
"""Stops the spinner and clears the line.
Returns
-------
self
"""
if self._spinner_thread and self._spinner_thread.is_alive():
self._stop_spinner.set()
self._spinner_thread.join()
if self.enabled:
self.clear()
self._frame_index = 0
self._spinner_id = None
self._show_cursor()
return self
def succeed(self, text=None):
"""Shows and persists success symbol and text and exits.
Parameters
----------
text : None, optional
Text to be shown alongside success symbol.
Returns
-------
self
"""
return self.stop_and_persist(symbol=LogSymbols.SUCCESS.value, text=text)
def fail(self, text=None):
"""Shows and persists fail symbol and text and exits.
Parameters
----------
text : None, optional
Text to be shown alongside fail symbol.
Returns
-------
self
"""
return self.stop_and_persist(symbol=LogSymbols.ERROR.value, text=text)
def warn(self, text=None):
"""Shows and persists warn symbol and text and exits.
Parameters
----------
text : None, optional
Text to be shown alongside warn symbol.
Returns
-------
self
"""
return self.stop_and_persist(symbol=LogSymbols.WARNING.value, text=text)
def info(self, text=None):
"""Shows and persists info symbol and text and exits.
Parameters
----------
text : None, optional
Text to be shown alongside info symbol.
Returns
-------
self
"""
return self.stop_and_persist(symbol=LogSymbols.INFO.value, text=text)
def stop_and_persist(self, symbol=' ', text=None):
"""Stops the spinner and persists the final frame to be shown.
Parameters
----------
symbol : str, optional
Symbol to be shown in final frame
text: str, optional
Text to be shown in final frame
Returns
-------
self
"""
if not self.enabled:
return self
symbol = decode_utf_8_text(symbol)
if text is not None:
text = decode_utf_8_text(text)
else:
text = self._text['original']
text = text.strip()
if self._text_color:
text = colored_frame(text, self._text_color)
self.stop()
output = u'{0} {1}\n'.format(*[
(text, symbol)
if self._placement == 'right' else
(symbol, text)
][0])
try:
self._write(output)
except UnicodeEncodeError:
self._write(encode_utf_8_text(output))
return self
|
import diamond.collector
class NetstatCollector(diamond.collector.Collector):
PROC_TCP = "/proc/net/tcp"
STATE = {
'01': 'ESTABLISHED',
'02': 'SYN_SENT',
'03': 'SYN_RECV',
'04': 'FIN_WAIT1',
'05': 'FIN_WAIT2',
'06': 'TIME_WAIT',
'07': 'CLOSE',
'08': 'CLOSE_WAIT',
'09': 'LAST_ACK',
'0A': 'LISTEN',
'0B': 'CLOSING'
}
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NetstatCollector, self).get_default_config()
config.update({
'path': 'netstat',
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
content = self._load()
result = dict((self.STATE[num], 0) for num in self.STATE)
for line in content:
line_array = self._remove_empty(line.split(' '))
state = self.STATE[line_array[3]]
result[state] += 1
for state in result:
self.publish(state, result[state])
@staticmethod
def _load():
""" Read the table of tcp connections & remove header """
with open(NetstatCollector.PROC_TCP, 'r') as f:
content = f.readlines()
content.pop(0)
return content
@staticmethod
def _hex2dec(s):
return str(int(s, 16))
@staticmethod
def _ip(s):
ip = [(NetstatCollector._hex2dec(s[6:8])),
(NetstatCollector._hex2dec(s[4:6])),
(NetstatCollector._hex2dec(s[2:4])),
(NetstatCollector._hex2dec(s[0:2]))]
return '.'.join(ip)
@staticmethod
def _remove_empty(array):
return [x for x in array if x != '']
@staticmethod
def _convert_ip_port(array):
host, port = array.split(':')
return NetstatCollector._ip(host), NetstatCollector._hex2dec(port)
|
import random
import time
from hashlib import sha1
from django.conf import settings
from weblate.machinery.base import (
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
NETEASE_API_ROOT = "https://jianwai.netease.com/api/text/trans"
class NeteaseSightTranslation(MachineTranslation):
"""Netease Sight API machine translation support."""
name = "Netease Sight"
max_score = 90
# Map codes used by Netease Sight to codes used by Weblate
language_map = {"zh_Hans": "zh"}
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_NETEASE_KEY is None:
raise MissingConfiguration("Netease Sight Translate requires app key")
if settings.MT_NETEASE_SECRET is None:
raise MissingConfiguration("Netease Sight Translate requires app secret")
def download_languages(self):
"""List of supported languages."""
return ["zh", "en"]
def get_authentication(self):
"""Hook for backends to allow add authentication headers to request."""
nonce = str(random.randint(1000, 99999999))
timestamp = str(int(1000 * time.time()))
sign = settings.MT_NETEASE_SECRET + nonce + timestamp
sign = sign.encode()
sign = sha1(sign).hexdigest() # nosec
return {
"Content-Type": "application/json",
"appkey": settings.MT_NETEASE_KEY,
"nonce": nonce,
"timestamp": timestamp,
"signature": sign,
}
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
response = self.request(
"post", NETEASE_API_ROOT, json={"lang": source, "content": text}
)
payload = response.json()
if not payload["success"]:
raise MachineTranslationError(payload["message"])
translation = payload["relatedObject"]["content"][0]["transContent"]
yield {
"text": translation,
"quality": self.max_score,
"service": self.name,
"source": text,
}
|
from __future__ import unicode_literals
import os
import itertools
import traceback
from lib.data.data import pyoptions
from lib.fun.fun import finishprinter, cool, finalsavepath, fun_name
def hybrider_magic(*args):
"""[file1] [file2] ..."""
args = list(args[0])
filepaths = []
hybrid_list = []
if len(args) >= 2:
for count in range(1, len(args)):
directory = os.path.abspath(args[count])
if not os.path.isfile(os.path.abspath(directory)):
exit(pyoptions.CRLF + cool.red("[-] file: {} don't exists".format(directory)))
else:
filepaths.append(directory)
else:
exit(pyoptions.CRLF + cool.fuchsia("[!] Usage: {} {}".format(args[0], pyoptions.tools_info.get(args[0]))))
storepath = finalsavepath(fun_name())
try:
for fp in filepaths:
tmp = set()
with open(fp, "r") as f:
for line in f.readlines():
tmp.add(line.strip())
hybrid_list.append(tmp)
with open(storepath, "a") as f:
for item in itertools.product(*hybrid_list):
f.write(pyoptions.operator.get(pyoptions.encode)(pyoptions.head + "".join(item) + pyoptions.tail) +
pyoptions.CRLF)
finishprinter(storepath)
except Exception as ex:
print(pyoptions.CRLF + cool.red("[-] Hybrid files failed, Looking: "))
exit(pyoptions.CRLF + traceback.print_exc())
|
revision = "5ae0ecefb01f"
down_revision = "1db4f82bc780"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
table_name="pending_certs", column_name="status", nullable=True, type_=sa.TEXT()
)
def downgrade():
op.alter_column(
table_name="pending_certs",
column_name="status",
nullable=True,
type_=sa.VARCHAR(128),
)
|
import logging
from streamlabswater import streamlabswater
import voluptuous as vol
from homeassistant.const import CONF_API_KEY
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
DOMAIN = "streamlabswater"
_LOGGER = logging.getLogger(__name__)
ATTR_AWAY_MODE = "away_mode"
SERVICE_SET_AWAY_MODE = "set_away_mode"
AWAY_MODE_AWAY = "away"
AWAY_MODE_HOME = "home"
STREAMLABSWATER_COMPONENTS = ["sensor", "binary_sensor"]
CONF_LOCATION_ID = "location_id"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_LOCATION_ID): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SET_AWAY_MODE_SCHEMA = vol.Schema(
{vol.Required(ATTR_AWAY_MODE): vol.In([AWAY_MODE_AWAY, AWAY_MODE_HOME])}
)
def setup(hass, config):
"""Set up the streamlabs water component."""
conf = config[DOMAIN]
api_key = conf.get(CONF_API_KEY)
location_id = conf.get(CONF_LOCATION_ID)
client = streamlabswater.StreamlabsClient(api_key)
locations = client.get_locations().get("locations")
if locations is None:
_LOGGER.error("Unable to retrieve locations. Verify API key")
return False
if location_id is None:
location = locations[0]
location_id = location["locationId"]
_LOGGER.info(
"Streamlabs Water Monitor auto-detected location_id=%s", location_id
)
else:
location = next(
(loc for loc in locations if location_id == loc["locationId"]), None
)
if location is None:
_LOGGER.error("Supplied location_id is invalid")
return False
location_name = location["name"]
hass.data[DOMAIN] = {
"client": client,
"location_id": location_id,
"location_name": location_name,
}
for component in STREAMLABSWATER_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
def set_away_mode(service):
"""Set the StreamLabsWater Away Mode."""
away_mode = service.data.get(ATTR_AWAY_MODE)
client.update_location(location_id, away_mode)
hass.services.register(
DOMAIN, SERVICE_SET_AWAY_MODE, set_away_mode, schema=SET_AWAY_MODE_SCHEMA
)
return True
|
from . import helpers, pprint
def parser(subparsers, _):
"""Adds the publish parser to the given subparsers object."""
desc = 'publish commits upstream'
publish_parser = subparsers.add_parser(
'publish', help=desc, description=desc.capitalize(), aliases=['pb'])
publish_parser.add_argument(
'dst', nargs='?', help='the branch where to publish commits')
publish_parser.set_defaults(func=main)
def main(args, repo):
current_b = repo.current_branch
dst_b = helpers.get_branch_or_use_upstream(args.dst, 'dst', repo)
current_b.publish(dst_b)
pprint.ok(
'Publish of commits from branch {0} to branch {1} succeeded'.format(
current_b, dst_b))
return True
|
import mock
import pytest
from kubernetes.client import V1DeleteOptions
from kubernetes.client.rest import ApiException
from paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes import does_instance_exist
from paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes import main
from paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes import nodes_for_cleanup
from paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes import terminate_nodes
def test_nodes_for_cleanup():
with mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.terminated_nodes",
autospec=True,
) as mock_terminated_nodes:
m1, m2, m3 = mock.MagicMock(), mock.MagicMock(), mock.MagicMock()
m4 = mock.MagicMock()
m4.metadata.labels = {"node-role.kubernetes.io/master": ""}
mock_ec2_client = mock.Mock()
mock_terminated_nodes.return_value = [m2, m3]
for_cleanup = nodes_for_cleanup(mock_ec2_client, [m1, m2, m3, m4])
assert for_cleanup == [m2, m3]
def test_terminate_nodes():
mock_client = mock.MagicMock()
mock_client.core.delete_node.side_effect = [None, ApiException(404), None]
m1, m2, m3 = mock.Mock(), mock.Mock(), mock.Mock()
success, errors = terminate_nodes(client=mock_client, nodes=[m1, m2, m3])
expected_calls = [
mock.call.core.delete_node(
node, body=V1DeleteOptions(), propagation_policy="foreground"
)
for node in [m1, m2, m3]
]
assert mock_client.mock_calls == expected_calls
assert success == [m1, m3]
assert errors[0][0] == m2
assert isinstance(errors[0][1], ApiException)
mock_client.reset_mock()
mock_client.core.delete_node.side_effect = [None, ApiException(404), None]
success, errors = terminate_nodes(client=mock_client, nodes=[m1, m2, m3])
expected_calls = [
mock.call.core.delete_node(
node, body=V1DeleteOptions(), propagation_policy="foreground"
)
for node in [m1, m2, m3]
]
assert mock_client.mock_calls == expected_calls
assert success == [m1, m3]
assert errors[0][0] == m2
assert isinstance(errors[0][1], ApiException)
def test_does_instance_exist():
# if the node doesn't exist at all, then the client will raise an exception
with mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.ClientError",
autospec=True,
) as mock_error:
mock_error.response = {"Error": {"Code": 404}}
mock_client = mock.MagicMock()
mock_client.side_effect = mock_error
assert does_instance_exist(mock_client, "i-12345") is False
statuses = [
"pending",
"running",
"shutting-down",
"terminated",
"stopping",
"stopped",
]
running = [True, True, False, False, False, False]
for status, running in zip(statuses, running):
mock_client.reset_mock()
mock_client.describe_instance_status.return_value = {
"InstanceStatuses": [{"InstanceState": {"Name": status}}]
}
assert does_instance_exist(mock_client, "i-12345") is running
# finally, there have been instances where the client doesn't 404, but there
# isn't a status attached to the instance
mock_client.reset_mock()
mock_client.describe_instance_status.return_value = {"InstanceStatuses": []}
assert does_instance_exist(mock_client, "i-12345") is False
def test_main():
with mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.get_all_nodes",
autospec=True,
) as mock_get_all_nodes, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.KubeClient",
autospec=True,
) as mock_kube_client, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.nodes_for_cleanup",
autospec=True,
) as mock_nodes_for_cleanup, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.terminate_nodes",
autospec=True,
) as mock_terminate_nodes, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.parse_args",
autospec=True,
) as mock_parse_args, mock.patch(
"boto3.client", autospec=True
):
mock_args = mock.MagicMock()
mock_args.dry_run = False
mock_parse_args.return_value = mock_args
m1 = mock.MagicMock(metadata=mock.Mock())
m2 = mock.Mock(metadata=mock.Mock())
m3 = mock.Mock(metadata=mock.Mock())
m4 = mock.Mock(metadata=mock.Mock())
for i, m in enumerate([m1, m2, m3, m4]):
m.metadata.name = f"m{i+1}"
if i < 3:
m.metadata.labels = {
"failure-domain.beta.kubernetes.io/region": "us-west-1"
}
else:
m.metadata.labels = {
"failure-domain.beta.kubernetes.io/region": "us-west-1",
"node-role.kubernetes.io/master": "",
}
mock_get_all_nodes.return_value = [m1, m2, m3, m4]
mock_nodes_for_cleanup.return_value = [m2, m3]
mock_terminate_nodes.return_value = (["m2"], [("m3", mock.MagicMock())])
with pytest.raises(SystemExit) as e:
main()
mock_terminate_nodes.assert_called_once_with(mock_kube_client(), ["m2", "m3"])
assert e.value.code == 1
mock_terminate_nodes.reset_mock()
mock_terminate_nodes.return_value = (["m2", "m3"], [])
main()
mock_terminate_nodes.assert_called_once_with(mock_kube_client(), ["m2", "m3"])
def test_main_dry_run():
with mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.get_all_nodes",
autospec=True,
) as mock_get_all_nodes, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.KubeClient",
autospec=True,
), mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.is_node_ready",
autospec=True,
) as mock_is_node_ready, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.terminate_nodes",
autospec=True,
) as mock_terminate_nodes, mock.patch(
"paasta_tools.kubernetes.bin.paasta_cleanup_stale_nodes.parse_args",
autospec=True,
) as mock_parse_args, mock.patch(
"boto3.client", autospec=True
):
mock_args = mock.MagicMock()
mock_args.dry_run = True
mock_parse_args.return_value = mock_args
m1, m2, m3 = mock.MagicMock(), mock.MagicMock(), mock.MagicMock()
mock_get_all_nodes.return_value = [m1, m2, m3]
mock_is_node_ready.side_effect = [True, False, False]
print(mock_terminate_nodes)
main()
# https://bugs.python.org/issue28380
# we can't just use assert_not_called() here,
# so inspect the list of calls instead
assert len(mock_terminate_nodes.mock_calls) == 0
|
from unittest import mock
import pytest
from homeassistant.components.modbus.const import (
CALL_TYPE_COIL,
CALL_TYPE_DISCRETE,
CALL_TYPE_REGISTER_INPUT,
DEFAULT_HUB,
MODBUS_DOMAIN as DOMAIN,
)
from homeassistant.const import CONF_PLATFORM, CONF_SCAN_INTERVAL
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
@pytest.fixture()
def mock_hub(hass):
"""Mock hub."""
with patch("homeassistant.components.modbus.setup", return_value=True):
hub = mock.MagicMock()
hub.name = "hub"
hass.data[DOMAIN] = {DEFAULT_HUB: hub}
yield hub
class ReadResult:
"""Storage class for register read results."""
def __init__(self, register_words):
"""Init."""
self.registers = register_words
self.bits = register_words
async def setup_base_test(
sensor_name,
hass,
use_mock_hub,
data_array,
entity_domain,
scan_interval,
):
"""Run setup device for given config."""
# Full sensor configuration
config = {
entity_domain: {
CONF_PLATFORM: "modbus",
CONF_SCAN_INTERVAL: scan_interval,
**data_array,
}
}
# Initialize sensor
now = dt_util.utcnow()
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
assert await async_setup_component(hass, entity_domain, config)
await hass.async_block_till_done()
entity_id = f"{entity_domain}.{sensor_name}"
device = hass.states.get(entity_id)
return entity_id, now, device
async def run_base_read_test(
entity_id,
hass,
use_mock_hub,
register_type,
register_words,
expected,
now,
):
"""Run test for given config."""
# Setup inputs for the sensor
read_result = ReadResult(register_words)
if register_type == CALL_TYPE_COIL:
use_mock_hub.read_coils.return_value = read_result
elif register_type == CALL_TYPE_DISCRETE:
use_mock_hub.read_discrete_inputs.return_value = read_result
elif register_type == CALL_TYPE_REGISTER_INPUT:
use_mock_hub.read_input_registers.return_value = read_result
else: # CALL_TYPE_REGISTER_HOLDING
use_mock_hub.read_holding_registers.return_value = read_result
# Trigger update call with time_changed event
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Check state
state = hass.states.get(entity_id).state
assert state == expected
|
from pydeconz.sensor import Switch
from homeassistant.const import CONF_EVENT, CONF_ID, CONF_UNIQUE_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import slugify
from .const import CONF_ANGLE, CONF_GESTURE, CONF_XY, LOGGER, NEW_SENSOR
from .deconz_device import DeconzBase
CONF_DECONZ_EVENT = "deconz_event"
async def async_setup_events(gateway) -> None:
"""Set up the deCONZ events."""
@callback
def async_add_sensor(sensors):
"""Create DeconzEvent."""
for sensor in sensors:
if not gateway.option_allow_clip_sensor and sensor.type.startswith("CLIP"):
continue
if sensor.type not in Switch.ZHATYPE or sensor.uniqueid in {
event.unique_id for event in gateway.events
}:
continue
new_event = DeconzEvent(sensor, gateway)
gateway.hass.async_create_task(new_event.async_update_device_registry())
gateway.events.append(new_event)
gateway.listeners.append(
async_dispatcher_connect(
gateway.hass, gateway.async_signal_new_device(NEW_SENSOR), async_add_sensor
)
)
async_add_sensor(
[gateway.api.sensors[key] for key in sorted(gateway.api.sensors, key=int)]
)
@callback
def async_unload_events(gateway) -> None:
"""Unload all deCONZ events."""
for event in gateway.events:
event.async_will_remove_from_hass()
gateway.events.clear()
class DeconzEvent(DeconzBase):
"""When you want signals instead of entities.
Stateless sensors such as remotes are expected to generate an event
instead of a sensor entity in hass.
"""
def __init__(self, device, gateway):
"""Register callback that will be used for signals."""
super().__init__(device, gateway)
self._device.register_callback(self.async_update_callback)
self.device_id = None
self.event_id = slugify(self._device.name)
LOGGER.debug("deCONZ event created: %s", self.event_id)
@property
def device(self):
"""Return Event device."""
return self._device
@callback
def async_will_remove_from_hass(self) -> None:
"""Disconnect event object when removed."""
self._device.remove_callback(self.async_update_callback)
@callback
def async_update_callback(self, force_update=False):
"""Fire the event if reason is that state is updated."""
if (
self.gateway.ignore_state_updates
or "state" not in self._device.changed_keys
):
return
data = {
CONF_ID: self.event_id,
CONF_UNIQUE_ID: self.serial,
CONF_EVENT: self._device.state,
}
if self._device.gesture is not None:
data[CONF_GESTURE] = self._device.gesture
if self._device.angle is not None:
data[CONF_ANGLE] = self._device.angle
if self._device.xy is not None:
data[CONF_XY] = self._device.xy
self.gateway.hass.bus.async_fire(CONF_DECONZ_EVENT, data)
async def async_update_device_registry(self):
"""Update device registry."""
device_registry = (
await self.gateway.hass.helpers.device_registry.async_get_registry()
)
entry = device_registry.async_get_or_create(
config_entry_id=self.gateway.config_entry.entry_id, **self.device_info
)
self.device_id = entry.id
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
from absl import flags
from absl.testing import absltest
FLAGS = flags.FLAGS
flags.DEFINE_boolean('set_up_module_error', False,
'Cause setupModule to error.')
flags.DEFINE_boolean('tear_down_module_error', False,
'Cause tearDownModule to error.')
flags.DEFINE_boolean('set_up_class_error', False, 'Cause setUpClass to error.')
flags.DEFINE_boolean('tear_down_class_error', False,
'Cause tearDownClass to error.')
flags.DEFINE_boolean('set_up_error', False, 'Cause setUp to error.')
flags.DEFINE_boolean('tear_down_error', False, 'Cause tearDown to error.')
flags.DEFINE_boolean('test_error', False, 'Cause the test to error.')
flags.DEFINE_boolean('set_up_fail', False, 'Cause setUp to fail.')
flags.DEFINE_boolean('tear_down_fail', False, 'Cause tearDown to fail.')
flags.DEFINE_boolean('test_fail', False, 'Cause the test to fail.')
flags.DEFINE_float('random_error', 0.0,
'0 - 1.0: fraction of a random failure at any step',
lower_bound=0.0, upper_bound=1.0)
def _random_error():
return random.random() < FLAGS.random_error
def setUpModule():
if FLAGS.set_up_module_error or _random_error():
raise Exception('setUpModule Errored!')
def tearDownModule():
if FLAGS.tear_down_module_error or _random_error():
raise Exception('tearDownModule Errored!')
class FailableTest(absltest.TestCase):
@classmethod
def setUpClass(cls):
if FLAGS.set_up_class_error or _random_error():
raise Exception('setUpClass Errored!')
@classmethod
def tearDownClass(cls):
if FLAGS.tear_down_class_error or _random_error():
raise Exception('tearDownClass Errored!')
def setUp(self):
if FLAGS.set_up_error or _random_error():
raise Exception('setUp Errored!')
if FLAGS.set_up_fail:
self.fail('setUp Failed!')
def tearDown(self):
if FLAGS.tear_down_error or _random_error():
raise Exception('tearDown Errored!')
if FLAGS.tear_down_fail:
self.fail('tearDown Failed!')
def test(self):
if FLAGS.test_error or _random_error():
raise Exception('test Errored!')
if FLAGS.test_fail:
self.fail('test Failed!')
if __name__ == '__main__':
absltest.main()
|
import argparse
import concurrent.futures
import json
import logging
import os
import re
from typing import Any
from typing import Dict
from typing import List
from typing import Tuple
from mypy_extensions import TypedDict
from paasta_tools import remote_git
from paasta_tools.cli.utils import get_instance_configs_for_service
from paasta_tools.utils import atomic_file_write
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_git_url
log = logging.getLogger(__name__)
TARGET_FILE = "deployments.json"
V1_Mapping = TypedDict(
"V1_Mapping", {"docker_image": str, "desired_state": str, "force_bounce": str}
)
V2_Deployment = TypedDict("V2_Deployment", {"docker_image": str, "git_sha": str})
V2_Control = TypedDict("V2_Control", {"desired_state": str, "force_bounce": str})
V2_Mappings = TypedDict(
"V2_Mappings",
{"deployments": Dict[str, V2_Deployment], "controls": Dict[str, V2_Control]},
)
DeploymentsDict = TypedDict(
"DeploymentsDict", {"v1": Dict[str, V1_Mapping], "v2": V2_Mappings}
)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Creates marathon jobs.")
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False
)
parser.add_argument(
"-s",
"--service",
required=True,
help="Service name to make the deployments.json for",
)
args = parser.parse_args()
return args
def get_latest_deployment_tag(
refs: Dict[str, str], deploy_group: str
) -> Tuple[str, str]:
"""Gets the latest deployment tag and sha for the specified deploy_group
:param refs: A dictionary mapping git refs to shas
:param deploy_group: The deployment group to return a deploy tag for
:returns: A tuple of the form (ref, sha) where ref is the actual deployment
tag (with the most recent timestamp) and sha is the sha it points at
"""
most_recent_dtime = None
most_recent_ref = None
most_recent_sha = None
pattern = re.compile(r"^refs/tags/paasta-%s-(\d{8}T\d{6})-deploy$" % deploy_group)
for ref_name, sha in refs.items():
match = pattern.match(ref_name)
if match:
dtime = match.groups()[0]
if most_recent_dtime is None or dtime > most_recent_dtime:
most_recent_dtime = dtime
most_recent_ref = ref_name
most_recent_sha = sha
return most_recent_ref, most_recent_sha
def get_deploy_group_mappings(
soa_dir: str, service: str
) -> Tuple[Dict[str, V1_Mapping], V2_Mappings]:
"""Gets mappings from service:deploy_group to services-service:paasta-hash,
where hash is the current SHA at the HEAD of branch_name.
This is done for all services in soa_dir.
:param soa_dir: The SOA configuration directory to read from
:returns: A dictionary mapping service:deploy_group to a dictionary
containing:
- 'docker_image': something like "services-service:paasta-hash". This is
relative to the paasta docker registry.
- 'desired_state': either 'start' or 'stop'. Says whether this branch
should be running.
- 'force_bounce': An arbitrary value, which may be None. A change in this
value should trigger a bounce, even if the other properties of this app
have not changed.
"""
mappings: Dict[str, V1_Mapping] = {}
v2_mappings: V2_Mappings = {"deployments": {}, "controls": {}}
git_url = get_git_url(service=service, soa_dir=soa_dir)
# Most of the time of this function is in two parts:
# 1. getting remote refs from git. (Mostly IO, just waiting for git to get back to us.)
# 2. loading instance configs. (Mostly CPU, copy.deepcopying yaml over and over again)
# Let's do these two things in parallel.
executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
remote_refs_future = executor.submit(remote_git.list_remote_refs, git_url)
service_configs = get_instance_configs_for_service(soa_dir=soa_dir, service=service)
deploy_group_branch_mappings = {
config.get_branch(): config.get_deploy_group() for config in service_configs
}
if not deploy_group_branch_mappings:
log.info("Service %s has no valid deploy groups. Skipping.", service)
return mappings, v2_mappings
remote_refs = remote_refs_future.result()
tag_by_deploy_group = {
dg: get_latest_deployment_tag(remote_refs, dg)
for dg in set(deploy_group_branch_mappings.values())
}
state_by_branch_and_sha = get_desired_state_by_branch_and_sha(remote_refs)
for control_branch, deploy_group in deploy_group_branch_mappings.items():
(deploy_ref_name, deploy_ref_sha) = tag_by_deploy_group[deploy_group]
if deploy_ref_name in remote_refs:
commit_sha = remote_refs[deploy_ref_name]
control_branch_alias = f"{service}:paasta-{control_branch}"
control_branch_alias_v2 = f"{service}:{control_branch}"
docker_image = build_docker_image_name(service, commit_sha)
desired_state, force_bounce = state_by_branch_and_sha.get(
(control_branch, deploy_ref_sha), ("start", None)
)
log.info("Mapping %s to docker image %s", control_branch, docker_image)
v2_mappings["deployments"][deploy_group] = {
"docker_image": docker_image,
"git_sha": commit_sha,
}
mappings[control_branch_alias] = {
"docker_image": docker_image,
"desired_state": desired_state,
"force_bounce": force_bounce,
}
v2_mappings["controls"][control_branch_alias_v2] = {
"desired_state": desired_state,
"force_bounce": force_bounce,
}
return mappings, v2_mappings
def build_docker_image_name(service: str, sha: str) -> str:
return f"services-{service}:paasta-{sha}"
def get_service_from_docker_image(image_name: str) -> str:
"""Does the opposite of build_docker_image_name and retrieves the
name of a service our of a provided docker image
An image name has the full path, including the registry. Like:
docker-paasta.yelpcorp.com:443/services-example_service:paasta-591ae8a7b3224e3b3322370b858377dd6ef335b6
"""
matches = re.search(".*/services-(.*?):paasta-.*?", image_name)
return matches.group(1)
def get_desired_state_by_branch_and_sha(
remote_refs: Dict[str, str]
) -> Dict[Tuple[str, str], Tuple[str, Any]]:
tag_pattern = r"^refs/tags/(?:paasta-){0,2}(?P<branch>[a-zA-Z0-9-_.]+)-(?P<force_bounce>[^-]+)-(?P<state>(start|stop))$"
states_by_branch_and_sha: Dict[Tuple[str, str], List[Tuple[str, Any]]] = {}
for ref_name, sha in remote_refs.items():
match = re.match(tag_pattern, ref_name)
if match:
gd = match.groupdict()
states_by_branch_and_sha.setdefault((gd["branch"], sha), []).append(
(gd["state"], gd["force_bounce"])
)
return {
(branch, sha): sorted(states, key=lambda x: x[1])[-1]
for ((branch, sha), states) in states_by_branch_and_sha.items()
}
def get_deployments_dict_from_deploy_group_mappings(
deploy_group_mappings: Dict[str, V1_Mapping], v2_deploy_group_mappings: V2_Mappings
) -> DeploymentsDict:
return {"v1": deploy_group_mappings, "v2": v2_deploy_group_mappings}
def generate_deployments_for_service(service: str, soa_dir: str) -> None:
try:
with open(os.path.join(soa_dir, service, TARGET_FILE), "r") as oldf:
old_deployments_dict = json.load(oldf)
except (IOError, ValueError):
old_deployments_dict = {}
mappings, v2_mappings = get_deploy_group_mappings(soa_dir=soa_dir, service=service)
deployments_dict = get_deployments_dict_from_deploy_group_mappings(
mappings, v2_mappings
)
if deployments_dict != old_deployments_dict:
with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as newf:
json.dump(deployments_dict, newf)
def main() -> None:
args = parse_args()
soa_dir = os.path.abspath(args.soa_dir)
service = args.service
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
generate_deployments_for_service(service=service, soa_dir=soa_dir)
if __name__ == "__main__":
main()
|
from __future__ import division
from __future__ import print_function
from builtins import object
import os
import numpy as np
from scipy.linalg import orth
class PPCA(object):
def __init__(self):
self.raw = None
self.data = None
self.C = None
self.means = None
self.stds = None
def _standardize(self, X):
if self.means is None or self.stds is None:
raise RuntimeError("Fit model first")
return (X - self.means) / self.stds
def fit(self, data, d=None, tol=1e-4, min_obs=10, verbose=False):
self.raw = data
self.raw[np.isinf(self.raw)] = np.max(self.raw[np.isfinite(self.raw)])
valid_series = np.sum(~np.isnan(self.raw), axis=0) >= min_obs
data = self.raw[:, valid_series].copy()
N = data.shape[0]
D = data.shape[1]
self.means = np.nanmean(data, axis=0)
self.stds = np.nanstd(data, axis=0)
data = self._standardize(data)
observed = ~np.isnan(data)
missing = np.sum(~observed)
data[~observed] = 0
# initial
if d is None:
d = data.shape[1]
if self.C is None:
C = np.random.randn(D, d)
else:
C = self.C
CC = np.dot(C.T, C)
X = np.dot(np.dot(data, C), np.linalg.inv(CC))
recon = np.dot(X, C.T)
recon[~observed] = 0
ss = np.sum((recon - data)**2)/(N*D - missing)
v0 = np.inf
counter = 0
while True:
Sx = np.linalg.inv(np.eye(d) + CC/ss)
# e-step
ss0 = ss
if missing > 0:
proj = np.dot(X, C.T)
data[~observed] = proj[~observed]
X = np.dot(np.dot(data, C), Sx) / ss
# m-step
XX = np.dot(X.T, X)
C = np.dot(np.dot(data.T, X), np.linalg.pinv(XX + N*Sx))
CC = np.dot(C.T, C)
recon = np.dot(X, C.T)
recon[~observed] = 0
ss = (np.sum((recon-data)**2) + N*np.sum(CC*Sx) + missing*ss0)/(N*D)
# calc diff for convergence
det = np.log(np.linalg.det(Sx))
if np.isinf(det):
det = abs(np.linalg.slogdet(Sx)[1])
v1 = N*(D*np.log(ss) + np.trace(Sx) - det) \
+ np.trace(XX) - missing*np.log(ss0)
diff = abs(v1/v0 - 1)
if verbose:
print(diff)
if (diff < tol) and (counter > 5):
break
counter += 1
v0 = v1
C = orth(C)
vals, vecs = np.linalg.eig(np.cov(np.dot(data, C).T))
order = np.flipud(np.argsort(vals))
vecs = vecs[:, order]
vals = vals[order]
C = np.dot(C, vecs)
# attach objects to class
self.C = C
self.data = data
self.eig_vals = vals
self._calc_var()
def transform(self, data=None):
if self.C is None:
raise RuntimeError('Fit the data model first.')
if data is None:
return np.dot(self.data, self.C)
return np.dot(data, self.C)
def _calc_var(self):
if self.data is None:
raise RuntimeError('Fit the data model first.')
data = self.data.T
# variance calc
var = np.nanvar(data, axis=1)
total_var = var.sum()
self.var_exp = self.eig_vals.cumsum() / total_var
def save(self, fpath):
np.save(fpath, self.C)
def load(self, fpath):
assert os.path.isfile(fpath)
self.C = np.load(fpath)
|
from flexx import app, ui
class Red(ui.Widget):
CSS = '.flx-Red { background: #ff0000;}'
class Deep2(ui.Widget):
def init(self):
with ui.VBox():
ui.Label(text='Widgets in BoxPanels in a widget in a vbox')
with ui.Widget(flex=1):
with ui.VFix():
with ui.HFix():
Red(flex=1)
Red(flex=1)
with ui.HFix():
Red(flex=1)
Red(flex=1)
if __name__ == '__main__':
m = app.launch(Deep2, 'app')
app.run()
|
import mock
from paasta_tools.cli.cmds.cook_image import paasta_cook_image
from paasta_tools.utils import get_username
@mock.patch("paasta_tools.cli.cmds.cook_image.validate_service_name", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image.makefile_responds_to", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._run", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._log_audit", autospec=True)
def test_run_success(
mock_log_audit, mock_run, mock_makefile_responds_to, mock_validate_service_name
):
mock_run.return_value = (0, "Output")
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = "fake_service"
assert paasta_cook_image(args) == 0
mock_log_audit.assert_called_once_with(
action="cook-image",
action_details={
"tag": "paasta-cook-image-fake_service-{}".format(get_username())
},
service="fake_service",
)
@mock.patch("paasta_tools.cli.cmds.cook_image.validate_service_name", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image.makefile_responds_to", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._run", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._log_audit", autospec=True)
def test_run_makefile_fail(
mock_log_audit, mock_run, mock_makefile_responds_to, mock_validate_service_name
):
mock_run.return_value = (0, "Output")
mock_makefile_responds_to.return_value = False
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = "fake_service"
assert paasta_cook_image(args) == 1
assert not mock_log_audit.called
class FakeKeyboardInterrupt(KeyboardInterrupt):
pass
@mock.patch("paasta_tools.cli.cmds.cook_image.validate_service_name", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image.makefile_responds_to", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._run", autospec=True)
@mock.patch("paasta_tools.cli.cmds.cook_image._log_audit", autospec=True)
def test_run_keyboard_interrupt(
mock_log_audit, mock_run, mock_makefile_responds_to, mock_validate_service_name
):
mock_run.return_value = (0, "Output")
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
mock_run.side_effect = FakeKeyboardInterrupt
args = mock.MagicMock()
args.service = "fake_service"
assert paasta_cook_image(args) == 2
assert not mock_log_audit.called
|
from pypjlink import MUTE_AUDIO, Projector
from pypjlink.projector import ProjectorError
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
CONF_ENCODING = "encoding"
DEFAULT_PORT = 4352
DEFAULT_ENCODING = "utf-8"
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
}
)
SUPPORT_PJLINK = (
SUPPORT_VOLUME_MUTE | SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PJLink platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
name = config.get(CONF_NAME)
encoding = config.get(CONF_ENCODING)
password = config.get(CONF_PASSWORD)
if "pjlink" not in hass.data:
hass.data["pjlink"] = {}
hass_data = hass.data["pjlink"]
device_label = f"{host}:{port}"
if device_label in hass_data:
return
device = PjLinkDevice(host, port, name, encoding, password)
hass_data[device_label] = device
add_entities([device], True)
def format_input_source(input_source_name, input_source_number):
"""Format input source for display in UI."""
return f"{input_source_name} {input_source_number}"
class PjLinkDevice(MediaPlayerEntity):
"""Representation of a PJLink device."""
def __init__(self, host, port, name, encoding, password):
"""Iinitialize the PJLink device."""
self._host = host
self._port = port
self._name = name
self._password = password
self._encoding = encoding
self._muted = False
self._pwstate = STATE_OFF
self._current_source = None
with self.projector() as projector:
if not self._name:
self._name = projector.get_name()
inputs = projector.get_inputs()
self._source_name_mapping = {format_input_source(*x): x for x in inputs}
self._source_list = sorted(self._source_name_mapping.keys())
def projector(self):
"""Create PJLink Projector instance."""
projector = Projector.from_address(
self._host, self._port, self._encoding, DEFAULT_TIMEOUT
)
projector.authenticate(self._password)
return projector
def update(self):
"""Get the latest state from the device."""
with self.projector() as projector:
try:
pwstate = projector.get_power()
if pwstate in ("on", "warm-up"):
self._pwstate = STATE_ON
self._muted = projector.get_mute()[1]
self._current_source = format_input_source(*projector.get_input())
else:
self._pwstate = STATE_OFF
self._muted = False
self._current_source = None
except KeyError as err:
if str(err) == "'OK'":
self._pwstate = STATE_OFF
self._muted = False
self._current_source = None
else:
raise
except ProjectorError as err:
if str(err) == "unavailable time":
self._pwstate = STATE_OFF
self._muted = False
self._current_source = None
else:
raise
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._pwstate
@property
def is_volume_muted(self):
"""Return boolean indicating mute status."""
return self._muted
@property
def source(self):
"""Return current input source."""
return self._current_source
@property
def source_list(self):
"""Return all available input sources."""
return self._source_list
@property
def supported_features(self):
"""Return projector supported features."""
return SUPPORT_PJLINK
def turn_off(self):
"""Turn projector off."""
if self._pwstate == STATE_ON:
with self.projector() as projector:
projector.set_power("off")
def turn_on(self):
"""Turn projector on."""
if self._pwstate == STATE_OFF:
with self.projector() as projector:
projector.set_power("on")
def mute_volume(self, mute):
"""Mute (true) of unmute (false) media player."""
with self.projector() as projector:
projector.set_mute(MUTE_AUDIO, mute)
def select_source(self, source):
"""Set the input source."""
source = self._source_name_mapping[source]
with self.projector() as projector:
projector.set_input(*source)
|
import numpy as np
from .constants import FIFF
from .tag import Tag
from .tag import read_tag
from .write import write_id, start_block, end_block, _write
from ..utils import logger, verbose
def dir_tree_find(tree, kind):
"""Find nodes of the given kind from a directory tree structure.
Parameters
----------
tree : dict
Directory tree.
kind : int
Kind to find.
Returns
-------
nodes : list
List of matching nodes.
"""
nodes = []
if isinstance(tree, list):
for t in tree:
nodes += dir_tree_find(t, kind)
else:
# Am I desirable myself?
if tree['block'] == kind:
nodes.append(tree)
# Search the subtrees
for child in tree['children']:
nodes += dir_tree_find(child, kind)
return nodes
@verbose
def make_dir_tree(fid, directory, start=0, indent=0, verbose=None):
"""Create the directory tree structure."""
FIFF_BLOCK_START = 104
FIFF_BLOCK_END = 105
FIFF_FILE_ID = 100
FIFF_BLOCK_ID = 103
FIFF_PARENT_BLOCK_ID = 110
if directory[start].kind == FIFF_BLOCK_START:
tag = read_tag(fid, directory[start].pos)
block = tag.data
else:
block = 0
logger.debug(' ' * indent + 'start { %d' % block)
this = start
tree = dict()
tree['block'] = block
tree['id'] = None
tree['parent_id'] = None
tree['nent'] = 0
tree['nchild'] = 0
tree['directory'] = directory[this]
tree['children'] = []
while this < len(directory):
if directory[this].kind == FIFF_BLOCK_START:
if this != start:
child, this = make_dir_tree(fid, directory, this, indent + 1)
tree['nchild'] += 1
tree['children'].append(child)
elif directory[this].kind == FIFF_BLOCK_END:
tag = read_tag(fid, directory[start].pos)
if tag.data == block:
break
else:
tree['nent'] += 1
if tree['nent'] == 1:
tree['directory'] = list()
tree['directory'].append(directory[this])
# Add the id information if available
if block == 0:
if directory[this].kind == FIFF_FILE_ID:
tag = read_tag(fid, directory[this].pos)
tree['id'] = tag.data
else:
if directory[this].kind == FIFF_BLOCK_ID:
tag = read_tag(fid, directory[this].pos)
tree['id'] = tag.data
elif directory[this].kind == FIFF_PARENT_BLOCK_ID:
tag = read_tag(fid, directory[this].pos)
tree['parent_id'] = tag.data
this += 1
# Eliminate the empty directory
if tree['nent'] == 0:
tree['directory'] = None
logger.debug(' ' * (indent + 1) + 'block = %d nent = %d nchild = %d'
% (tree['block'], tree['nent'], tree['nchild']))
logger.debug(' ' * indent + 'end } %d' % block)
last = this
return tree, last
###############################################################################
# Writing
def copy_tree(fidin, in_id, nodes, fidout):
"""Copy directory subtrees from fidin to fidout."""
if len(nodes) <= 0:
return
if not isinstance(nodes, list):
nodes = [nodes]
for node in nodes:
start_block(fidout, node['block'])
if node['id'] is not None:
if in_id is not None:
write_id(fidout, FIFF.FIFF_PARENT_FILE_ID, in_id)
write_id(fidout, FIFF.FIFF_BLOCK_ID, in_id)
write_id(fidout, FIFF.FIFF_PARENT_BLOCK_ID, node['id'])
if node['directory'] is not None:
for d in node['directory']:
# Do not copy these tags
if d.kind == FIFF.FIFF_BLOCK_ID or \
d.kind == FIFF.FIFF_PARENT_BLOCK_ID or \
d.kind == FIFF.FIFF_PARENT_FILE_ID:
continue
# Read and write tags, pass data through transparently
fidin.seek(d.pos, 0)
tag = Tag(*np.fromfile(fidin, ('>i4,>I4,>i4,>i4'), 1)[0])
tag.data = np.fromfile(fidin, '>B', tag.size)
_write(fidout, tag.data, tag.kind, 1, tag.type, '>B')
for child in node['children']:
copy_tree(fidin, in_id, child, fidout)
end_block(fidout, node['block'])
|
from __future__ import absolute_import
from __future__ import print_function
import pytest
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Activation, Input
from elephas.spark_model import SparkModel
def test_sequential_serialization():
# Create Spark context
pytest.mark.usefixtures("spark_context")
seq_model = Sequential()
seq_model.add(Dense(128, input_dim=784))
seq_model.add(Activation('relu'))
seq_model.add(Dropout(0.2))
seq_model.add(Dense(128))
seq_model.add(Activation('relu'))
seq_model.add(Dropout(0.2))
seq_model.add(Dense(10))
seq_model.add(Activation('softmax'))
seq_model.compile(
optimizer="sgd", loss="categorical_crossentropy", metrics=["acc"])
spark_model = SparkModel(seq_model, frequency='epoch', mode='synchronous')
spark_model.save("elephas_sequential.h5")
def test_model_serialization():
# This returns a tensor
inputs = Input(shape=(784,))
# a layer instance is callable on a tensor, and returns a tensor
x = Dense(64, activation='relu')(inputs)
x = Dense(64, activation='relu')(x)
predictions = Dense(10, activation='softmax')(x)
# This creates a model that includes
# the Input layer and three Dense layers
model = Model(inputs=inputs, outputs=predictions)
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
spark_model = SparkModel(model, frequency='epoch',
mode='synchronous', foo="bar")
spark_model.save("elephas_model.h5")
@pytest.mark.skip(reason="not feasible on travis right now")
def test_java_avg_serde():
from elephas.dl4j import ParameterAveragingModel, ParameterSharingModel
inputs = Input(shape=(784,))
x = Dense(64, activation='relu')(inputs)
x = Dense(64, activation='relu')(x)
predictions = Dense(10, activation='softmax')(x)
# This creates a model that includes
# the Input layer and three Dense layers
model = Model(inputs=inputs, outputs=predictions)
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
spark_model = ParameterAveragingModel(java_spark_context=None, model=model, num_workers=4, batch_size=32,
averaging_frequency=5, num_batches_prefetch=0, collect_stats=False,
save_file='temp.h5')
spark_model.save("java_param_averaging_model.h5")
@pytest.mark.skip(reason="not feasible on travis right now")
def test_java_sharing_serde():
from elephas.dl4j import ParameterAveragingModel, ParameterSharingModel
inputs = Input(shape=(784,))
x = Dense(64, activation='relu')(inputs)
x = Dense(64, activation='relu')(x)
predictions = Dense(10, activation='softmax')(x)
model = Model(inputs=inputs, outputs=predictions)
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
spark_model = ParameterSharingModel(java_spark_context=None, model=model, num_workers=4, batch_size=32,
shake_frequency=0, min_threshold=1e-5, update_threshold=1e-3,
workers_per_node=-1, num_batches_prefetch=0, step_delay=50, step_trigger=0.05,
threshold_step=1e-5, collect_stats=False, save_file='temp.h5')
spark_model.save("java_param_sharing_model.h5")
if __name__ == '__main__':
pytest.main([__file__])
|
from datetime import timedelta
import logging
from pybotvac.exceptions import NeatoRobotException
from homeassistant.components.sensor import DEVICE_CLASS_BATTERY
from homeassistant.const import PERCENTAGE
from homeassistant.helpers.entity import Entity
from .const import NEATO_DOMAIN, NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
BATTERY = "Battery"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Neato sensor using config entry."""
dev = []
neato = hass.data.get(NEATO_LOGIN)
for robot in hass.data[NEATO_ROBOTS]:
dev.append(NeatoSensor(neato, robot))
if not dev:
return
_LOGGER.debug("Adding robots for sensors %s", dev)
async_add_entities(dev, True)
class NeatoSensor(Entity):
"""Neato sensor."""
def __init__(self, neato, robot):
"""Initialize Neato sensor."""
self.robot = robot
self._available = neato.logged_in if neato is not None else False
self._robot_name = f"{self.robot.name} {BATTERY}"
self._robot_serial = self.robot.serial
self._state = None
def update(self):
"""Update Neato Sensor."""
try:
self._state = self.robot.state
except NeatoRobotException as ex:
if self._available:
_LOGGER.error(
"Neato sensor connection error for '%s': %s", self.entity_id, ex
)
self._state = None
self._available = False
return
self._available = True
_LOGGER.debug("self._state=%s", self._state)
@property
def name(self):
"""Return the name of this sensor."""
return self._robot_name
@property
def unique_id(self):
"""Return unique ID."""
return self._robot_serial
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_BATTERY
@property
def available(self):
"""Return availability."""
return self._available
@property
def state(self):
"""Return the state."""
return self._state["details"]["charge"]
@property
def unit_of_measurement(self):
"""Return unit of measurement."""
return PERCENTAGE
@property
def device_info(self):
"""Device info for neato robot."""
return {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}}
|
from django.contrib.sites.models import Site
from django.views.generic.base import TemplateView
from zinnia.settings import COPYRIGHT
from zinnia.settings import FEEDS_FORMAT
from zinnia.settings import PROTOCOL
class CapabilityView(TemplateView):
"""
Base view for the weblog capabilities.
"""
def get_context_data(self, **kwargs):
"""
Populate the context of the template
with technical informations for building urls.
"""
context = super(CapabilityView, self).get_context_data(**kwargs)
context.update({'protocol': PROTOCOL,
'copyright': COPYRIGHT,
'feeds_format': FEEDS_FORMAT,
'site': Site.objects.get_current()})
return context
class HumansTxt(CapabilityView):
"""
http://humanstxt.org/
"""
content_type = 'text/plain'
template_name = 'zinnia/humans.txt'
class RsdXml(CapabilityView):
"""
http://en.wikipedia.org/wiki/Really_Simple_Discovery
"""
content_type = 'application/rsd+xml'
template_name = 'zinnia/rsd.xml'
class WLWManifestXml(CapabilityView):
"""
http://msdn.microsoft.com/en-us/library/bb463260.aspx
"""
content_type = 'application/wlwmanifest+xml'
template_name = 'zinnia/wlwmanifest.xml'
class OpenSearchXml(CapabilityView):
"""
http://www.opensearch.org/
"""
content_type = 'application/opensearchdescription+xml'
template_name = 'zinnia/opensearch.xml'
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.