text
stringlengths 213
32.3k
|
---|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import re
import struct
import sys
import textwrap
try:
import fcntl
except ImportError:
fcntl = None
try:
# Importing termios will fail on non-unix platforms.
import termios
except ImportError:
termios = None
import six
from six.moves import range # pylint: disable=redefined-builtin
_DEFAULT_HELP_WIDTH = 80 # Default width of help output.
_MIN_HELP_WIDTH = 40 # Minimal "sane" width of help output. We assume that any
# value below 40 is unreasonable.
# Define the allowed error rate in an input string to get suggestions.
#
# We lean towards a high threshold because we tend to be matching a phrase,
# and the simple algorithm used here is geared towards correcting word
# spellings.
#
# For manual testing, consider "<command> --list" which produced a large number
# of spurious suggestions when we used "least_errors > 0.5" instead of
# "least_erros >= 0.5".
_SUGGESTION_ERROR_RATE_THRESHOLD = 0.50
# Characters that cannot appear or are highly discouraged in an XML 1.0
# document. (See http://www.w3.org/TR/REC-xml/#charsets or
# https://en.wikipedia.org/wiki/Valid_characters_in_XML#XML_1.0)
_ILLEGAL_XML_CHARS_REGEX = re.compile(
u'[\x00-\x08\x0b\x0c\x0e-\x1f\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]')
# This is a set of module ids for the modules that disclaim key flags.
# This module is explicitly added to this set so that we never consider it to
# define key flag.
disclaim_module_ids = set([id(sys.modules[__name__])])
# Define special flags here so that help may be generated for them.
# NOTE: Please do NOT use SPECIAL_FLAGS from outside flags module.
# Initialized inside flagvalues.py.
SPECIAL_FLAGS = None
# This points to the flags module, initialized in flags/__init__.py.
# This should only be used in adopt_module_key_flags to take SPECIAL_FLAGS into
# account.
FLAGS_MODULE = None
class _ModuleObjectAndName(
collections.namedtuple('_ModuleObjectAndName', 'module module_name')):
"""Module object and name.
Fields:
- module: object, module object.
- module_name: str, module name.
"""
def get_module_object_and_name(globals_dict):
"""Returns the module that defines a global environment, and its name.
Args:
globals_dict: A dictionary that should correspond to an environment
providing the values of the globals.
Returns:
_ModuleObjectAndName - pair of module object & module name.
Returns (None, None) if the module could not be identified.
"""
name = globals_dict.get('__name__', None)
module = sys.modules.get(name, None)
# Pick a more informative name for the main module.
return _ModuleObjectAndName(module,
(sys.argv[0] if name == '__main__' else name))
def get_calling_module_object_and_name():
"""Returns the module that's calling into this module.
We generally use this function to get the name of the module calling a
DEFINE_foo... function.
Returns:
The module object that called into this one.
Raises:
AssertionError: Raised when no calling module could be identified.
"""
for depth in range(1, sys.getrecursionlimit()):
# sys._getframe is the right thing to use here, as it's the best
# way to walk up the call stack.
globals_for_frame = sys._getframe(depth).f_globals # pylint: disable=protected-access
module, module_name = get_module_object_and_name(globals_for_frame)
if id(module) not in disclaim_module_ids and module_name is not None:
return _ModuleObjectAndName(module, module_name)
raise AssertionError('No module was found')
def get_calling_module():
"""Returns the name of the module that's calling into this module."""
return get_calling_module_object_and_name().module_name
def str_or_unicode(value):
"""Converts a value to a python string.
Behavior of this function is intentionally different in Python2/3.
In Python2, the given value is attempted to convert to a str (byte string).
If it contains non-ASCII characters, it is converted to a unicode instead.
In Python3, the given value is always converted to a str (unicode string).
This behavior reflects the (bad) practice in Python2 to try to represent
a string as str as long as it contains ASCII characters only.
Args:
value: An object to be converted to a string.
Returns:
A string representation of the given value. See the description above
for its type.
"""
try:
return str(value)
except UnicodeEncodeError:
return unicode(value) # Python3 should never come here
def create_xml_dom_element(doc, name, value):
"""Returns an XML DOM element with name and text value.
Args:
doc: minidom.Document, the DOM document it should create nodes from.
name: str, the tag of XML element.
value: object, whose string representation will be used
as the value of the XML element. Illegal or highly discouraged xml 1.0
characters are stripped.
Returns:
An instance of minidom.Element.
"""
s = str_or_unicode(value)
if six.PY2 and not isinstance(s, unicode):
# Get a valid unicode string.
s = s.decode('utf-8', 'ignore')
if isinstance(value, bool):
# Display boolean values as the C++ flag library does: no caps.
s = s.lower()
# Remove illegal xml characters.
s = _ILLEGAL_XML_CHARS_REGEX.sub(u'', s)
e = doc.createElement(name)
e.appendChild(doc.createTextNode(s))
return e
def get_help_width():
"""Returns the integer width of help lines that is used in TextWrap."""
if not sys.stdout.isatty() or termios is None or fcntl is None:
return _DEFAULT_HELP_WIDTH
try:
data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
columns = struct.unpack('hh', data)[1]
# Emacs mode returns 0.
# Here we assume that any value below 40 is unreasonable.
if columns >= _MIN_HELP_WIDTH:
return columns
# Returning an int as default is fine, int(int) just return the int.
return int(os.getenv('COLUMNS', _DEFAULT_HELP_WIDTH))
except (TypeError, IOError, struct.error):
return _DEFAULT_HELP_WIDTH
def get_flag_suggestions(attempt, longopt_list):
"""Returns helpful similar matches for an invalid flag."""
# Don't suggest on very short strings, or if no longopts are specified.
if len(attempt) <= 2 or not longopt_list:
return []
option_names = [v.split('=')[0] for v in longopt_list]
# Find close approximations in flag prefixes.
# This also handles the case where the flag is spelled right but ambiguous.
distances = [(_damerau_levenshtein(attempt, option[0:len(attempt)]), option)
for option in option_names]
# t[0] is distance, and sorting by t[1] allows us to have stable output.
distances.sort()
least_errors, _ = distances[0]
# Don't suggest excessively bad matches.
if least_errors >= _SUGGESTION_ERROR_RATE_THRESHOLD * len(attempt):
return []
suggestions = []
for errors, name in distances:
if errors == least_errors:
suggestions.append(name)
else:
break
return suggestions
def _damerau_levenshtein(a, b):
"""Returns Damerau-Levenshtein edit distance from a to b."""
memo = {}
def distance(x, y):
"""Recursively defined string distance with memoization."""
if (x, y) in memo:
return memo[x, y]
if not x:
d = len(y)
elif not y:
d = len(x)
else:
d = min(
distance(x[1:], y) + 1, # correct an insertion error
distance(x, y[1:]) + 1, # correct a deletion error
distance(x[1:], y[1:]) + (x[0] != y[0])) # correct a wrong character
if len(x) >= 2 and len(y) >= 2 and x[0] == y[1] and x[1] == y[0]:
# Correct a transposition.
t = distance(x[2:], y[2:]) + 1
if d > t:
d = t
memo[x, y] = d
return d
return distance(a, b)
def text_wrap(text, length=None, indent='', firstline_indent=None):
"""Wraps a given text to a maximum line length and returns it.
It turns lines that only contain whitespace into empty lines, keeps new lines,
and expands tabs using 4 spaces.
Args:
text: str, text to wrap.
length: int, maximum length of a line, includes indentation.
If this is None then use get_help_width()
indent: str, indent for all but first line.
firstline_indent: str, indent for first line; if None, fall back to indent.
Returns:
str, the wrapped text.
Raises:
ValueError: Raised if indent or firstline_indent not shorter than length.
"""
# Get defaults where callee used None
if length is None:
length = get_help_width()
if indent is None:
indent = ''
if firstline_indent is None:
firstline_indent = indent
if len(indent) >= length:
raise ValueError('Length of indent exceeds length')
if len(firstline_indent) >= length:
raise ValueError('Length of first line indent exceeds length')
text = text.expandtabs(4)
result = []
# Create one wrapper for the first paragraph and one for subsequent
# paragraphs that does not have the initial wrapping.
wrapper = textwrap.TextWrapper(
width=length, initial_indent=firstline_indent, subsequent_indent=indent)
subsequent_wrapper = textwrap.TextWrapper(
width=length, initial_indent=indent, subsequent_indent=indent)
# textwrap does not have any special treatment for newlines. From the docs:
# "...newlines may appear in the middle of a line and cause strange output.
# For this reason, text should be split into paragraphs (using
# str.splitlines() or similar) which are wrapped separately."
for paragraph in (p.strip() for p in text.splitlines()):
if paragraph:
result.extend(wrapper.wrap(paragraph))
else:
result.append('') # Keep empty lines.
# Replace initial wrapper with wrapper for subsequent paragraphs.
wrapper = subsequent_wrapper
return '\n'.join(result)
def flag_dict_to_args(flag_map, multi_flags=None):
"""Convert a dict of values into process call parameters.
This method is used to convert a dictionary into a sequence of parameters
for a binary that parses arguments using this module.
Args:
flag_map: dict, a mapping where the keys are flag names (strings).
values are treated according to their type:
* If value is None, then only the name is emitted.
* If value is True, then only the name is emitted.
* If value is False, then only the name prepended with 'no' is emitted.
* If value is a string then --name=value is emitted.
* If value is a collection, this will emit --name=value1,value2,value3,
unless the flag name is in multi_flags, in which case this will emit
--name=value1 --name=value2 --name=value3.
* Everything else is converted to string an passed as such.
multi_flags: set, names (strings) of flags that should be treated as
multi-flags.
Yields:
sequence of string suitable for a subprocess execution.
"""
for key, value in six.iteritems(flag_map):
if value is None:
yield '--%s' % key
elif isinstance(value, bool):
if value:
yield '--%s' % key
else:
yield '--no%s' % key
elif isinstance(value, (bytes, type(u''))):
# We don't want strings to be handled like python collections.
yield '--%s=%s' % (key, value)
else:
# Now we attempt to deal with collections.
try:
if multi_flags and key in multi_flags:
for item in value:
yield '--%s=%s' % (key, str(item))
else:
yield '--%s=%s' % (key, ','.join(str(item) for item in value))
except TypeError:
# Default case.
yield '--%s=%s' % (key, value)
def trim_docstring(docstring):
"""Removes indentation from triple-quoted strings.
This is the function specified in PEP 257 to handle docstrings:
https://www.python.org/dev/peps/pep-0257/.
Args:
docstring: str, a python docstring.
Returns:
str, docstring with indentation removed.
"""
if not docstring:
return ''
# If you've got a line longer than this you have other problems...
max_indent = 1 << 29
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = max_indent
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < max_indent:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
def doc_to_help(doc):
"""Takes a __doc__ string and reformats it as help."""
# Get rid of starting and ending white space. Using lstrip() or even
# strip() could drop more than maximum of first line and right space
# of last line.
doc = doc.strip()
# Get rid of all empty lines.
whitespace_only_line = re.compile('^[ \t]+$', re.M)
doc = whitespace_only_line.sub('', doc)
# Cut out common space at line beginnings.
doc = trim_docstring(doc)
# Just like this module's comment, comments tend to be aligned somehow.
# In other words they all start with the same amount of white space.
# 1) keep double new lines;
# 2) keep ws after new lines if not empty line;
# 3) all other new lines shall be changed to a space;
# Solution: Match new lines between non white space and replace with space.
doc = re.sub(r'(?<=\S)\n(?=\S)', ' ', doc, flags=re.M)
return doc
def is_bytes_or_string(maybe_string):
if str is bytes:
return isinstance(maybe_string, basestring)
else:
return isinstance(maybe_string, (str, bytes))
|
from homeassistant.components.ozw.light import byte_to_zwave_brightness
from .common import setup_ozw
async def test_light(hass, light_data, light_msg, light_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_data)
# Test loaded
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test turning on
# Beware that due to rounding, a roundtrip conversion does not always work
new_brightness = 44
new_transition = 0
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"brightness": new_brightness,
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[0]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 1407375551070225}
msg = sent_messages[1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
# Test turning off
new_transition = 6553
await hass.services.async_call(
"light",
"turn_off",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 237, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = 0
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test turn on without brightness
new_transition = 127.0
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 6
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 127, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 255,
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
# Test set brightness to 0
new_brightness = 0
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"brightness": new_brightness,
},
blocking=True,
)
assert len(sent_messages) == 7
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test setting color_name
new_color = "blue"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_name": new_color},
blocking=True,
)
assert len(sent_messages) == 9
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#0000ff0000", "ValueIDKey": 659341335}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#0000ff0000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["rgb_color"] == (0, 0, 255)
# Test setting hs_color
new_color = [300, 70]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "hs_color": new_color},
blocking=True,
)
assert len(sent_messages) == 11
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ff4cff0000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ff4cff0000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["hs_color"] == (300.0, 70.196)
# Test setting rgb_color
new_color = [255, 154, 0]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "rgb_color": new_color},
blocking=True,
)
assert len(sent_messages) == 13
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ff99000000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ff99000000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["rgb_color"] == (255, 153, 0)
# Test setting xy_color
new_color = [0.52, 0.43]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "xy_color": new_color},
blocking=True,
)
assert len(sent_messages) == 15
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ffbb370000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ffbb370000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["xy_color"] == (0.519, 0.429)
# Test setting color temp
new_color = 200
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_temp": new_color},
blocking=True,
)
assert len(sent_messages) == 17
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#00000037c8", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#00000037c8"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["color_temp"] == 200
# Test setting invalid color temp
new_color = 120
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_temp": new_color},
blocking=True,
)
assert len(sent_messages) == 19
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#00000000ff", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#00000000ff"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["color_temp"] == 153
async def test_pure_rgb_dimmer_light(
hass, light_data, light_pure_rgb_msg, sent_messages
):
"""Test light with no color channels command class."""
receive_message = await setup_ozw(hass, fixture=light_data)
# Test loaded
state = hass.states.get("light.kitchen_rgb_strip_level")
assert state is not None
assert state.state == "on"
assert state.attributes["supported_features"] == 17
# Test setting hs_color
new_color = [300, 70]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.kitchen_rgb_strip_level", "hs_color": new_color},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 122257425}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ff4cff00", "ValueIDKey": 122470423}
# Feedback on state
light_pure_rgb_msg.decode()
light_pure_rgb_msg.payload["Value"] = "#ff4cff00"
light_pure_rgb_msg.encode()
receive_message(light_pure_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.kitchen_rgb_strip_level")
assert state is not None
assert state.state == "on"
assert state.attributes["hs_color"] == (300.0, 70.196)
async def test_no_rgb_light(hass, light_data, light_no_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_data)
# Test loaded no RGBW support (dimmer only)
state = hass.states.get("light.master_bedroom_l_level")
assert state is not None
assert state.state == "off"
# Turn on the light
new_brightness = 44
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.master_bedroom_l_level", "brightness": new_brightness},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 38371345,
}
# Feedback on state
light_no_rgb_msg.decode()
light_no_rgb_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_no_rgb_msg.encode()
receive_message(light_no_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.master_bedroom_l_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
async def test_no_ww_light(
hass, light_no_ww_data, light_msg, light_rgb_msg, sent_messages
):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_no_ww_data)
# Test loaded no ww support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Turn on the light
white_color = 190
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"white_value": white_color,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#00000000be", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#00000000be"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["white_value"] == 190
async def test_no_cw_light(
hass, light_no_cw_data, light_msg, light_rgb_msg, sent_messages
):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_no_cw_data)
# Test loaded no cw support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Turn on the light
white_color = 190
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"white_value": white_color,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#000000be", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#000000be"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["white_value"] == 190
async def test_wc_light(hass, light_wc_data, light_msg, light_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_wc_data)
# Test loaded only white LED support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
assert state.attributes["min_mireds"] == 153
assert state.attributes["max_mireds"] == 370
# Turn on the light
new_color = 190
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_temp": new_color},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#0000002bd4", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#0000002bd4"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["color_temp"] == 190
async def test_new_ozw_light(hass, light_new_ozw_data, light_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_new_ozw_data)
# Test loaded only white LED support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test turning on with new duration (newer openzwave)
new_transition = 4180
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 4180, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = 255
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
# Test turning off with new duration (newer openzwave)(new max)
await hass.services.async_call(
"light",
"turn_off",
{"entity_id": "light.led_bulb_6_multi_colour_level"},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 7621, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = 0
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
# Test turning on with new duration (newer openzwave)(factory default)
new_transition = 8000
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 6
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 6553, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = 255
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
|
from __future__ import division
import argparse
import multiprocessing
import numpy as np
import chainer
from chainer import iterators
from chainer.links import Classifier
from chainer.optimizer import WeightDecay
from chainer.optimizers import CorrectedMomentumSGD
from chainer import training
from chainer.training import extensions
from chainercv.chainer_experimental.datasets.sliceable import TransformDataset
from chainercv.datasets import directory_parsing_label_names
from chainercv.datasets import DirectoryParsingLabelDataset
from chainercv.transforms import center_crop
from chainercv.transforms import random_flip
from chainercv.transforms import random_sized_crop
from chainercv.transforms import resize
from chainercv.transforms import scale
from chainercv.chainer_experimental.training.extensions import make_shift
from chainercv.links.model.resnet import Bottleneck
from chainercv.links import ResNet101
from chainercv.links import ResNet152
from chainercv.links import ResNet50
import chainermn
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
class TrainTransform(object):
def __init__(self, mean):
self.mean = mean
def __call__(self, in_data):
img, label = in_data
img = random_sized_crop(img)
img = resize(img, (224, 224))
img = random_flip(img, x_random=True)
img -= self.mean
return img, label
class ValTransform(object):
def __init__(self, mean):
self.mean = mean
def __call__(self, in_data):
img, label = in_data
img = scale(img, 256)
img = center_crop(img, (224, 224))
img -= self.mean
return img, label
def main():
model_cfgs = {
'resnet50': {'class': ResNet50, 'score_layer_name': 'fc6',
'kwargs': {'arch': 'fb'}},
'resnet101': {'class': ResNet101, 'score_layer_name': 'fc6',
'kwargs': {'arch': 'fb'}},
'resnet152': {'class': ResNet152, 'score_layer_name': 'fc6',
'kwargs': {'arch': 'fb'}}
}
parser = argparse.ArgumentParser(
description='Learning convnet from ILSVRC2012 dataset')
parser.add_argument('train', help='Path to root of the train dataset')
parser.add_argument('val', help='Path to root of the validation dataset')
parser.add_argument('--model',
'-m', choices=model_cfgs.keys(), default='resnet50',
help='Convnet models')
parser.add_argument('--communicator', type=str,
default='pure_nccl', help='Type of communicator')
parser.add_argument('--loaderjob', type=int, default=4)
parser.add_argument('--batchsize', type=int, default=32,
help='Batch size for each worker')
parser.add_argument('--lr', type=float)
parser.add_argument('--momentum', type=float, default=0.9)
parser.add_argument('--weight-decay', type=float, default=0.0001)
parser.add_argument('--out', type=str, default='result')
parser.add_argument('--epoch', type=int, default=90)
args = parser.parse_args()
# https://docs.chainer.org/en/stable/chainermn/tutorial/tips_faqs.html#using-multiprocessiterator
if hasattr(multiprocessing, 'set_start_method'):
multiprocessing.set_start_method('forkserver')
p = multiprocessing.Process()
p.start()
p.join()
comm = chainermn.create_communicator(args.communicator)
device = comm.intra_rank
if args.lr is not None:
lr = args.lr
else:
lr = 0.1 * (args.batchsize * comm.size) / 256
if comm.rank == 0:
print('lr={}: lr is selected based on the linear '
'scaling rule'.format(lr))
label_names = directory_parsing_label_names(args.train)
model_cfg = model_cfgs[args.model]
extractor = model_cfg['class'](
n_class=len(label_names), **model_cfg['kwargs'])
extractor.pick = model_cfg['score_layer_name']
model = Classifier(extractor)
# Following https://arxiv.org/pdf/1706.02677.pdf,
# the gamma of the last BN of each resblock is initialized by zeros.
for l in model.links():
if isinstance(l, Bottleneck):
l.conv3.bn.gamma.data[:] = 0
train_data = DirectoryParsingLabelDataset(args.train)
val_data = DirectoryParsingLabelDataset(args.val)
train_data = TransformDataset(
train_data, ('img', 'label'), TrainTransform(extractor.mean))
val_data = TransformDataset(
val_data, ('img', 'label'), ValTransform(extractor.mean))
print('finished loading dataset')
if comm.rank == 0:
train_indices = np.arange(len(train_data))
val_indices = np.arange(len(val_data))
else:
train_indices = None
val_indices = None
train_indices = chainermn.scatter_dataset(
train_indices, comm, shuffle=True)
val_indices = chainermn.scatter_dataset(val_indices, comm, shuffle=True)
train_data = train_data.slice[train_indices]
val_data = val_data.slice[val_indices]
train_iter = chainer.iterators.MultiprocessIterator(
train_data, args.batchsize, n_processes=args.loaderjob)
val_iter = iterators.MultiprocessIterator(
val_data, args.batchsize,
repeat=False, shuffle=False, n_processes=args.loaderjob)
optimizer = chainermn.create_multi_node_optimizer(
CorrectedMomentumSGD(lr=lr, momentum=args.momentum), comm)
optimizer.setup(model)
for param in model.params():
if param.name not in ('beta', 'gamma'):
param.update_rule.add_hook(WeightDecay(args.weight_decay))
if device >= 0:
chainer.cuda.get_device(device).use()
model.to_gpu()
updater = chainer.training.StandardUpdater(
train_iter, optimizer, device=device)
trainer = training.Trainer(
updater, (args.epoch, 'epoch'), out=args.out)
@make_shift('lr')
def warmup_and_exponential_shift(trainer):
epoch = trainer.updater.epoch_detail
warmup_epoch = 5
if epoch < warmup_epoch:
if lr > 0.1:
warmup_rate = 0.1 / lr
rate = warmup_rate \
+ (1 - warmup_rate) * epoch / warmup_epoch
else:
rate = 1
elif epoch < 30:
rate = 1
elif epoch < 60:
rate = 0.1
elif epoch < 80:
rate = 0.01
else:
rate = 0.001
return rate * lr
trainer.extend(warmup_and_exponential_shift)
evaluator = chainermn.create_multi_node_evaluator(
extensions.Evaluator(val_iter, model, device=device), comm)
trainer.extend(evaluator, trigger=(1, 'epoch'))
log_interval = 0.1, 'epoch'
print_interval = 0.1, 'epoch'
if comm.rank == 0:
trainer.extend(chainer.training.extensions.observe_lr(),
trigger=log_interval)
trainer.extend(
extensions.snapshot_object(
extractor, 'snapshot_model_{.updater.epoch}.npz'),
trigger=(args.epoch, 'epoch'))
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport(
['iteration', 'epoch', 'elapsed_time', 'lr',
'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy']
), trigger=print_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.run()
if __name__ == '__main__':
main()
|
from weblate.accounts.notifications import FREQ_INSTANT, SCOPE_ADMIN, SCOPE_WATCHED
DEFAULT_NOTIFICATIONS = [
(SCOPE_WATCHED, FREQ_INSTANT, "LastAuthorCommentNotificaton"),
(SCOPE_WATCHED, FREQ_INSTANT, "MentionCommentNotificaton"),
(SCOPE_WATCHED, FREQ_INSTANT, "NewAnnouncementNotificaton"),
(SCOPE_ADMIN, FREQ_INSTANT, "MergeFailureNotification"),
(SCOPE_ADMIN, FREQ_INSTANT, "ParseErrorNotification"),
(SCOPE_ADMIN, FREQ_INSTANT, "NewTranslationNotificaton"),
(SCOPE_ADMIN, FREQ_INSTANT, "NewAlertNotificaton"),
(SCOPE_ADMIN, FREQ_INSTANT, "NewAnnouncementNotificaton"),
]
def create_default_notifications(user):
for scope, frequency, notification in DEFAULT_NOTIFICATIONS:
user.subscription_set.get_or_create(
scope=scope, notification=notification, defaults={"frequency": frequency}
)
|
from aiohttp.web_exceptions import (
HTTPBadRequest,
HTTPInternalServerError,
HTTPUnauthorized,
)
import pytest
import voluptuous as vol
from homeassistant.components.http.view import (
HomeAssistantView,
request_handler_factory,
)
from homeassistant.exceptions import ServiceNotFound, Unauthorized
from tests.async_mock import AsyncMock, Mock
@pytest.fixture
def mock_request():
"""Mock a request."""
return Mock(app={"hass": Mock(is_stopping=False)}, match_info={})
@pytest.fixture
def mock_request_with_stopping():
"""Mock a request."""
return Mock(app={"hass": Mock(is_stopping=True)}, match_info={})
async def test_invalid_json(caplog):
"""Test trying to return invalid JSON."""
view = HomeAssistantView()
with pytest.raises(HTTPInternalServerError):
view.json(float("NaN"))
assert str(float("NaN")) in caplog.text
async def test_handling_unauthorized(mock_request):
"""Test handling unauth exceptions."""
with pytest.raises(HTTPUnauthorized):
await request_handler_factory(
Mock(requires_auth=False), AsyncMock(side_effect=Unauthorized)
)(mock_request)
async def test_handling_invalid_data(mock_request):
"""Test handling unauth exceptions."""
with pytest.raises(HTTPBadRequest):
await request_handler_factory(
Mock(requires_auth=False), AsyncMock(side_effect=vol.Invalid("yo"))
)(mock_request)
async def test_handling_service_not_found(mock_request):
"""Test handling unauth exceptions."""
with pytest.raises(HTTPInternalServerError):
await request_handler_factory(
Mock(requires_auth=False),
AsyncMock(side_effect=ServiceNotFound("test", "test")),
)(mock_request)
async def test_not_running(mock_request_with_stopping):
"""Test we get a 503 when not running."""
response = await request_handler_factory(
Mock(requires_auth=False), AsyncMock(side_effect=Unauthorized)
)(mock_request_with_stopping)
assert response.status == 503
|
import logging
from requests import RequestException
from requests.exceptions import HTTPError
from stringcase import camelcase, snakecase
import thermoworks_smoke
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
CONF_EMAIL,
CONF_EXCLUDE,
CONF_MONITORED_CONDITIONS,
CONF_PASSWORD,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
PROBE_1 = "probe1"
PROBE_2 = "probe2"
PROBE_1_MIN = "probe1_min"
PROBE_1_MAX = "probe1_max"
PROBE_2_MIN = "probe2_min"
PROBE_2_MAX = "probe2_max"
BATTERY_LEVEL = "battery"
FIRMWARE = "firmware"
SERIAL_REGEX = "^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$"
# map types to labels
SENSOR_TYPES = {
PROBE_1: "Probe 1",
PROBE_2: "Probe 2",
PROBE_1_MIN: "Probe 1 Min",
PROBE_1_MAX: "Probe 1 Max",
PROBE_2_MIN: "Probe 2 Min",
PROBE_2_MAX: "Probe 2 Max",
}
# exclude these keys from thermoworks data
EXCLUDE_KEYS = [FIRMWARE]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=[PROBE_1, PROBE_2]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_EXCLUDE, default=[]): vol.All(
cv.ensure_list, [cv.matches_regex(SERIAL_REGEX)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the thermoworks sensor."""
email = config[CONF_EMAIL]
password = config[CONF_PASSWORD]
monitored_variables = config[CONF_MONITORED_CONDITIONS]
excluded = config[CONF_EXCLUDE]
try:
mgr = thermoworks_smoke.initialize_app(email, password, True, excluded)
# list of sensor devices
dev = []
# get list of registered devices
for serial in mgr.serials():
for variable in monitored_variables:
dev.append(ThermoworksSmokeSensor(variable, serial, mgr))
add_entities(dev, True)
except HTTPError as error:
msg = f"{error.strerror}"
if "EMAIL_NOT_FOUND" in msg or "INVALID_PASSWORD" in msg:
_LOGGER.error("Invalid email and password combination")
else:
_LOGGER.error(msg)
class ThermoworksSmokeSensor(Entity):
"""Implementation of a thermoworks smoke sensor."""
def __init__(self, sensor_type, serial, mgr):
"""Initialize the sensor."""
self._name = "{name} {sensor}".format(
name=mgr.name(serial), sensor=SENSOR_TYPES[sensor_type]
)
self.type = sensor_type
self._state = None
self._attributes = {}
self._unit_of_measurement = TEMP_FAHRENHEIT
self._unique_id = f"{serial}-{sensor_type}"
self.serial = serial
self.mgr = mgr
self.update_unit()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id for the sensor."""
return self._unique_id
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return self._unit_of_measurement
def update_unit(self):
"""Set the units from the data."""
if PROBE_2 in self.type:
self._unit_of_measurement = self.mgr.units(self.serial, PROBE_2)
else:
self._unit_of_measurement = self.mgr.units(self.serial, PROBE_1)
def update(self):
"""Get the monitored data from firebase."""
try:
values = self.mgr.data(self.serial)
# set state from data based on type of sensor
self._state = values.get(camelcase(self.type))
# set units
self.update_unit()
# set basic attributes for all sensors
self._attributes = {
"time": values["time"],
"localtime": values["localtime"],
}
# set extended attributes for main probe sensors
if self.type in [PROBE_1, PROBE_2]:
for key, val in values.items():
# add all attributes that don't contain any probe name
# or contain a matching probe name
if (self.type == PROBE_1 and key.find(PROBE_2) == -1) or (
self.type == PROBE_2 and key.find(PROBE_1) == -1
):
if key == BATTERY_LEVEL:
key = ATTR_BATTERY_LEVEL
else:
# strip probe label and convert to snake_case
key = snakecase(key.replace(self.type, ""))
# add to attrs
if key and key not in EXCLUDE_KEYS:
self._attributes[key] = val
# store actual unit because attributes are not converted
self._attributes["unit_of_min_max"] = self._unit_of_measurement
except (RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update status for %s", self.name)
|
import logging
from typing import List, Optional, cast
import voluptuous as vol
from homeassistant.auth import EVENT_USER_REMOVED
from homeassistant.components import websocket_api
from homeassistant.components.device_tracker import (
ATTR_SOURCE_TYPE,
DOMAIN as DEVICE_TRACKER_DOMAIN,
SOURCE_TYPE_GPS,
)
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_ENTITY_ID,
ATTR_GPS_ACCURACY,
ATTR_ID,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_ID,
CONF_NAME,
CONF_TYPE,
EVENT_HOMEASSISTANT_START,
SERVICE_RELOAD,
STATE_HOME,
STATE_NOT_HOME,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import (
Event,
HomeAssistant,
ServiceCall,
State,
callback,
split_entity_id,
)
from homeassistant.helpers import (
collection,
config_validation as cv,
entity_registry,
service,
)
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
ATTR_SOURCE = "source"
ATTR_USER_ID = "user_id"
CONF_DEVICE_TRACKERS = "device_trackers"
CONF_USER_ID = "user_id"
CONF_PICTURE = "picture"
DOMAIN = "person"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 2
# Device tracker states to ignore
IGNORE_STATES = (STATE_UNKNOWN, STATE_UNAVAILABLE)
PERSON_SCHEMA = vol.Schema(
{
vol.Required(CONF_ID): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_USER_ID): cv.string,
vol.Optional(CONF_DEVICE_TRACKERS, default=[]): vol.All(
cv.ensure_list, cv.entities_domain(DEVICE_TRACKER_DOMAIN)
),
vol.Optional(CONF_PICTURE): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN, default=[]): vol.All(
cv.ensure_list, cv.remove_falsy, [PERSON_SCHEMA]
)
},
extra=vol.ALLOW_EXTRA,
)
_UNDEF = object()
@bind_hass
async def async_create_person(hass, name, *, user_id=None, device_trackers=None):
"""Create a new person."""
await hass.data[DOMAIN][1].async_create_item(
{
ATTR_NAME: name,
ATTR_USER_ID: user_id,
CONF_DEVICE_TRACKERS: device_trackers or [],
}
)
@bind_hass
async def async_add_user_device_tracker(
hass: HomeAssistant, user_id: str, device_tracker_entity_id: str
):
"""Add a device tracker to a person linked to a user."""
coll = cast(PersonStorageCollection, hass.data[DOMAIN][1])
for person in coll.async_items():
if person.get(ATTR_USER_ID) != user_id:
continue
device_trackers = person[CONF_DEVICE_TRACKERS]
if device_tracker_entity_id in device_trackers:
return
await coll.async_update_item(
person[collection.CONF_ID],
{CONF_DEVICE_TRACKERS: device_trackers + [device_tracker_entity_id]},
)
break
CREATE_FIELDS = {
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Optional(CONF_USER_ID): vol.Any(str, None),
vol.Optional(CONF_DEVICE_TRACKERS, default=list): vol.All(
cv.ensure_list, cv.entities_domain(DEVICE_TRACKER_DOMAIN)
),
vol.Optional(CONF_PICTURE): vol.Any(str, None),
}
UPDATE_FIELDS = {
vol.Optional(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Optional(CONF_USER_ID): vol.Any(str, None),
vol.Optional(CONF_DEVICE_TRACKERS, default=list): vol.All(
cv.ensure_list, cv.entities_domain(DEVICE_TRACKER_DOMAIN)
),
vol.Optional(CONF_PICTURE): vol.Any(str, None),
}
class PersonStore(Store):
"""Person storage."""
async def _async_migrate_func(self, old_version, old_data):
"""Migrate to the new version.
Migrate storage to use format of collection helper.
"""
return {"items": old_data["persons"]}
class PersonStorageCollection(collection.StorageCollection):
"""Person collection stored in storage."""
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
def __init__(
self,
store: Store,
logger: logging.Logger,
id_manager: collection.IDManager,
yaml_collection: collection.YamlCollection,
):
"""Initialize a person storage collection."""
super().__init__(store, logger, id_manager)
self.yaml_collection = yaml_collection
async def _async_load_data(self) -> Optional[dict]:
"""Load the data.
A past bug caused onboarding to create invalid person objects.
This patches it up.
"""
data = await super()._async_load_data()
if data is None:
return data
for person in data["items"]:
if person[CONF_DEVICE_TRACKERS] is None:
person[CONF_DEVICE_TRACKERS] = []
return data
async def async_load(self) -> None:
"""Load the Storage collection."""
await super().async_load()
self.hass.bus.async_listen(
entity_registry.EVENT_ENTITY_REGISTRY_UPDATED, self._entity_registry_updated
)
async def _entity_registry_updated(self, event) -> None:
"""Handle entity registry updated."""
if event.data["action"] != "remove":
return
entity_id = event.data[ATTR_ENTITY_ID]
if split_entity_id(entity_id)[0] != "device_tracker":
return
for person in list(self.data.values()):
if entity_id not in person[CONF_DEVICE_TRACKERS]:
continue
await self.async_update_item(
person[collection.CONF_ID],
{
CONF_DEVICE_TRACKERS: [
devt
for devt in person[CONF_DEVICE_TRACKERS]
if devt != entity_id
]
},
)
async def _process_create_data(self, data: dict) -> dict:
"""Validate the config is valid."""
data = self.CREATE_SCHEMA(data)
user_id = data.get(CONF_USER_ID)
if user_id is not None:
await self._validate_user_id(user_id)
return data
@callback
def _get_suggested_id(self, info: dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_NAME]
async def _update_data(self, data: dict, update_data: dict) -> dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
user_id = update_data.get(CONF_USER_ID)
if user_id is not None and user_id != data.get(CONF_USER_ID):
await self._validate_user_id(user_id)
return {**data, **update_data}
async def _validate_user_id(self, user_id):
"""Validate the used user_id."""
if await self.hass.auth.async_get_user(user_id) is None:
raise ValueError("User does not exist")
for persons in (self.data.values(), self.yaml_collection.async_items()):
if any(person for person in persons if person.get(CONF_USER_ID) == user_id):
raise ValueError("User already taken")
async def filter_yaml_data(hass: HomeAssistantType, persons: List[dict]) -> List[dict]:
"""Validate YAML data that we can't validate via schema."""
filtered = []
person_invalid_user = []
for person_conf in persons:
user_id = person_conf.get(CONF_USER_ID)
if user_id is not None:
if await hass.auth.async_get_user(user_id) is None:
_LOGGER.error(
"Invalid user_id detected for person %s",
person_conf[collection.CONF_ID],
)
person_invalid_user.append(
f"- Person {person_conf[CONF_NAME]} (id: {person_conf[collection.CONF_ID]}) points at invalid user {user_id}"
)
continue
filtered.append(person_conf)
if person_invalid_user:
hass.components.persistent_notification.async_create(
f"""
The following persons point at invalid users:
{"- ".join(person_invalid_user)}
""",
"Invalid Person Configuration",
DOMAIN,
)
return filtered
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the person component."""
entity_component = EntityComponent(_LOGGER, DOMAIN, hass)
id_manager = collection.IDManager()
yaml_collection = collection.YamlCollection(
logging.getLogger(f"{__name__}.yaml_collection"), id_manager
)
storage_collection = PersonStorageCollection(
PersonStore(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
yaml_collection,
)
collection.attach_entity_component_collection(
entity_component, yaml_collection, lambda conf: Person(conf, False)
)
collection.attach_entity_component_collection(
entity_component, storage_collection, lambda conf: Person(conf, True)
)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, yaml_collection)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, storage_collection)
await yaml_collection.async_load(
await filter_yaml_data(hass, config.get(DOMAIN, []))
)
await storage_collection.async_load()
hass.data[DOMAIN] = (yaml_collection, storage_collection)
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass, create_list=False)
websocket_api.async_register_command(hass, ws_list_person)
async def _handle_user_removed(event: Event) -> None:
"""Handle a user being removed."""
user_id = event.data[ATTR_USER_ID]
for person in storage_collection.async_items():
if person[CONF_USER_ID] == user_id:
await storage_collection.async_update_item(
person[CONF_ID], {CONF_USER_ID: None}
)
hass.bus.async_listen(EVENT_USER_REMOVED, _handle_user_removed)
async def async_reload_yaml(call: ServiceCall):
"""Reload YAML."""
conf = await entity_component.async_prepare_reload(skip_reset=True)
if conf is None:
return
await yaml_collection.async_load(
await filter_yaml_data(hass, conf.get(DOMAIN, []))
)
service.async_register_admin_service(
hass, DOMAIN, SERVICE_RELOAD, async_reload_yaml
)
return True
class Person(RestoreEntity):
"""Represent a tracked person."""
def __init__(self, config, editable):
"""Set up person."""
self._config = config
self._editable = editable
self._latitude = None
self._longitude = None
self._gps_accuracy = None
self._source = None
self._state = None
self._unsub_track_device = None
@property
def name(self):
"""Return the name of the entity."""
return self._config[CONF_NAME]
@property
def entity_picture(self) -> Optional[str]:
"""Return entity picture."""
return self._config.get(CONF_PICTURE)
@property
def should_poll(self):
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
def state(self):
"""Return the state of the person."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes of the person."""
data = {ATTR_EDITABLE: self._editable, ATTR_ID: self.unique_id}
if self._latitude is not None:
data[ATTR_LATITUDE] = self._latitude
if self._longitude is not None:
data[ATTR_LONGITUDE] = self._longitude
if self._gps_accuracy is not None:
data[ATTR_GPS_ACCURACY] = self._gps_accuracy
if self._source is not None:
data[ATTR_SOURCE] = self._source
user_id = self._config.get(CONF_USER_ID)
if user_id is not None:
data[ATTR_USER_ID] = user_id
return data
@property
def unique_id(self):
"""Return a unique ID for the person."""
return self._config[CONF_ID]
async def async_added_to_hass(self):
"""Register device trackers."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._parse_source_state(state)
if self.hass.is_running:
# Update person now if hass is already running.
await self.async_update_config(self._config)
else:
# Wait for hass start to not have race between person
# and device trackers finishing setup.
async def person_start_hass(now):
await self.async_update_config(self._config)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, person_start_hass
)
async def async_update_config(self, config):
"""Handle when the config is updated."""
self._config = config
if self._unsub_track_device is not None:
self._unsub_track_device()
self._unsub_track_device = None
trackers = self._config[CONF_DEVICE_TRACKERS]
if trackers:
_LOGGER.debug("Subscribe to device trackers for %s", self.entity_id)
self._unsub_track_device = async_track_state_change_event(
self.hass, trackers, self._async_handle_tracker_update
)
self._update_state()
@callback
def _async_handle_tracker_update(self, event):
"""Handle the device tracker state changes."""
self._update_state()
@callback
def _update_state(self):
"""Update the state."""
latest_non_gps_home = latest_not_home = latest_gps = latest = None
for entity_id in self._config[CONF_DEVICE_TRACKERS]:
state = self.hass.states.get(entity_id)
if not state or state.state in IGNORE_STATES:
continue
if state.attributes.get(ATTR_SOURCE_TYPE) == SOURCE_TYPE_GPS:
latest_gps = _get_latest(latest_gps, state)
elif state.state == STATE_HOME:
latest_non_gps_home = _get_latest(latest_non_gps_home, state)
elif state.state == STATE_NOT_HOME:
latest_not_home = _get_latest(latest_not_home, state)
if latest_non_gps_home:
latest = latest_non_gps_home
elif latest_gps:
latest = latest_gps
else:
latest = latest_not_home
if latest:
self._parse_source_state(latest)
else:
self._state = None
self._source = None
self._latitude = None
self._longitude = None
self._gps_accuracy = None
self.async_write_ha_state()
@callback
def _parse_source_state(self, state):
"""Parse source state and set person attributes.
This is a device tracker state or the restored person state.
"""
self._state = state.state
self._source = state.entity_id
self._latitude = state.attributes.get(ATTR_LATITUDE)
self._longitude = state.attributes.get(ATTR_LONGITUDE)
self._gps_accuracy = state.attributes.get(ATTR_GPS_ACCURACY)
@websocket_api.websocket_command({vol.Required(CONF_TYPE): "person/list"})
def ws_list_person(
hass: HomeAssistantType, connection: websocket_api.ActiveConnection, msg
):
"""List persons."""
yaml, storage = hass.data[DOMAIN]
connection.send_result(
msg[ATTR_ID], {"storage": storage.async_items(), "config": yaml.async_items()}
)
def _get_latest(prev: Optional[State], curr: State):
"""Get latest state."""
if prev is None or curr.last_updated > prev.last_updated:
return curr
return prev
|
from typing import Collection
from typing import List
from service_configuration_lib import read_service_configuration
from paasta_tools.cli.cmds.status import get_actual_deployments
from paasta_tools.cli.utils import figure_out_service_name
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.marathon_tools import get_all_namespaces_for_service
from paasta_tools.marathon_tools import load_service_namespace_config
from paasta_tools.monitoring_tools import get_runbook
from paasta_tools.monitoring_tools import get_team
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_git_url
from paasta_tools.utils import list_services
from paasta_tools.utils import NoDeploymentsAvailable
from paasta_tools.utils import PaastaColors
NO_DESCRIPTION_MESSAGE = "No 'description' entry in service.yaml. Please a one line sentence that describes this service"
NO_EXTERNAL_LINK_MESSAGE = (
"No 'external_link' entry in service.yaml. "
"Please add one that points to a reference doc for your service"
)
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"info",
help="Prints the general information about a service.",
description=(
"'paasta info' gathers information about a service from soa-configs "
"and prints it in a human-friendly way. It does no API calls, it "
"just analyzes the config files."
),
)
list_parser.add_argument(
"-s", "--service", help="The name of the service you wish to inspect"
).completer = lazy_choices_completer(list_services)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
list_parser.set_defaults(command=paasta_info)
def deployments_to_clusters(deployments: Collection[str]) -> Collection[str]:
clusters = []
for deployment in deployments:
cluster = deployment.split(".")[0]
clusters.append(cluster)
return set(clusters)
def get_smartstack_endpoints(service, soa_dir):
endpoints = []
for name, config in get_all_namespaces_for_service(
service, full_name=False, soa_dir=soa_dir
):
mode = config.get("mode", "http")
port = config.get("proxy_port")
endpoints.append(f"{mode}://169.254.255.254:{port} ({name})")
return endpoints
def get_deployments_strings(service: str, soa_dir: str) -> List[str]:
output = []
try:
deployments = get_actual_deployments(service, soa_dir)
except NoDeploymentsAvailable:
deployments = {}
if deployments == {}:
output.append(" - N/A: Not deployed to any PaaSTA Clusters")
else:
service_config = load_service_namespace_config(
service=service, namespace="main", soa_dir=soa_dir
)
service_mode = service_config.get_mode()
for cluster in deployments_to_clusters(deployments):
if service_mode == "tcp":
service_port = service_config.get("proxy_port")
link = PaastaColors.cyan(
"%s://paasta-%s.yelp:%d/" % (service_mode, cluster, service_port)
)
elif service_mode == "http" or service_mode == "https":
link = PaastaColors.cyan(
f"{service_mode}://{service}.paasta-{cluster}.yelp/"
)
else:
link = "N/A"
output.append(f" - {cluster} ({link})")
return output
def get_dashboard_urls(service):
output = [
" - %s (Sensu Alerts)"
% (PaastaColors.cyan("https://uchiwa.yelpcorp.com/#/events?q=%s" % service))
]
return output
def get_service_info(service, soa_dir):
service_configuration = read_service_configuration(service, soa_dir)
description = service_configuration.get("description", NO_DESCRIPTION_MESSAGE)
external_link = service_configuration.get("external_link", NO_EXTERNAL_LINK_MESSAGE)
smartstack_endpoints = get_smartstack_endpoints(service, soa_dir)
git_url = get_git_url(service, soa_dir)
output = []
output.append("Service Name: %s" % service)
output.append("Description: %s" % description)
output.append("External Link: %s" % PaastaColors.cyan(external_link))
output.append(
"Monitored By: team %s"
% get_team(service=service, overrides={}, soa_dir=soa_dir)
)
output.append(
"Runbook: %s"
% PaastaColors.cyan(get_runbook(service=service, overrides={}, soa_dir=soa_dir))
)
output.append("Git Repo: %s" % git_url)
output.append("Deployed to the following clusters:")
output.extend(get_deployments_strings(service, soa_dir))
if smartstack_endpoints:
output.append("Smartstack endpoint(s):")
for endpoint in smartstack_endpoints:
output.append(" - %s" % endpoint)
output.append("Dashboard(s):")
output.extend(get_dashboard_urls(service))
return "\n".join(output)
def paasta_info(args):
"""Prints general information about a service"""
soa_dir = args.soa_dir
service = figure_out_service_name(args, soa_dir=soa_dir)
print(get_service_info(service, soa_dir))
|
import pathlib
import tempfile
from aiohttp import ClientSession, ClientWebSocketResponse
from homeassistant.components.websocket_api import const as ws_const
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as util_dt
from tests.async_mock import patch
async def test_upload_image(hass, hass_client, hass_ws_client):
"""Test we can upload an image."""
now = util_dt.utcnow()
test_image = pathlib.Path(__file__).parent / "logo.png"
with tempfile.TemporaryDirectory() as tempdir, patch.object(
hass.config, "path", return_value=tempdir
), patch("homeassistant.util.dt.utcnow", return_value=now):
assert await async_setup_component(hass, "image", {})
ws_client: ClientWebSocketResponse = await hass_ws_client()
client: ClientSession = await hass_client()
with test_image.open("rb") as fp:
res = await client.post("/api/image/upload", data={"file": fp})
assert res.status == 200
item = await res.json()
assert item["content_type"] == "image/png"
assert item["filesize"] == 38847
assert item["name"] == "logo.png"
assert item["uploaded_at"] == now.isoformat()
tempdir = pathlib.Path(tempdir)
item_folder: pathlib.Path = tempdir / item["id"]
assert (item_folder / "original").read_bytes() == test_image.read_bytes()
# fetch non-existing image
res = await client.get("/api/image/serve/non-existing/256x256")
assert res.status == 404
# fetch invalid sizes
for inv_size in ("256", "256x25A", "100x100", "25Ax256"):
res = await client.get(f"/api/image/serve/{item['id']}/{inv_size}")
assert res.status == 400
# fetch resized version
res = await client.get(f"/api/image/serve/{item['id']}/256x256")
assert res.status == 200
assert (item_folder / "256x256").is_file()
# List item
await ws_client.send_json({"id": 6, "type": "image/list"})
msg = await ws_client.receive_json()
assert msg["id"] == 6
assert msg["type"] == ws_const.TYPE_RESULT
assert msg["success"]
assert msg["result"] == [item]
# Delete item
await ws_client.send_json(
{"id": 7, "type": "image/delete", "image_id": item["id"]}
)
msg = await ws_client.receive_json()
assert msg["id"] == 7
assert msg["type"] == ws_const.TYPE_RESULT
assert msg["success"]
# Ensure removed from disk
assert not item_folder.is_dir()
|
import json
import pytest
from lemur.authorities.views import * # noqa
from lemur.tests.factories import AuthorityFactory, RoleFactory
from lemur.tests.vectors import (
VALID_ADMIN_API_TOKEN,
VALID_ADMIN_HEADER_TOKEN,
VALID_USER_HEADER_TOKEN,
)
def test_authority_input_schema(client, role, issuer_plugin, logged_in_user):
from lemur.authorities.schemas import AuthorityInputSchema
input_data = {
"name": "Example Authority",
"owner": "[email protected]",
"description": "An example authority.",
"commonName": "An Example Authority",
"plugin": {
"slug": "test-issuer",
"plugin_options": [{"name": "test", "value": "blah"}],
},
"type": "root",
"signingAlgorithm": "sha256WithRSA",
"keyType": "RSA2048",
"sensitivity": "medium",
}
data, errors = AuthorityInputSchema().load(input_data)
assert not errors
def test_authority_input_schema_ecc(client, role, issuer_plugin, logged_in_user):
from lemur.authorities.schemas import AuthorityInputSchema
input_data = {
"name": "Example Authority",
"owner": "[email protected]",
"description": "An example authority.",
"commonName": "An Example Authority",
"plugin": {
"slug": "test-issuer",
"plugin_options": [{"name": "test", "value": "blah"}],
},
"type": "root",
"signingAlgorithm": "sha256WithECDSA",
"keyType": "ECCPRIME256V1",
"sensitivity": "medium",
}
data, errors = AuthorityInputSchema().load(input_data)
assert not errors
def test_user_authority(session, client, authority, role, user, issuer_plugin):
u = user["user"]
u.roles.append(role)
authority.roles.append(role)
session.commit()
assert (
client.get(api.url_for(AuthoritiesList), headers=user["token"]).json["total"]
== 1
)
u.roles.remove(role)
session.commit()
assert (
client.get(api.url_for(AuthoritiesList), headers=user["token"]).json["total"]
== 0
)
def test_create_authority(issuer_plugin, user):
from lemur.authorities.service import create
authority = create(
plugin={"plugin_object": issuer_plugin, "slug": issuer_plugin.slug},
owner="[email protected]",
type="root",
creator=user["user"],
)
assert authority.authority_certificate
@pytest.mark.parametrize(
"token, count",
[
(VALID_USER_HEADER_TOKEN, 0),
(VALID_ADMIN_HEADER_TOKEN, 3),
(VALID_ADMIN_API_TOKEN, 3),
],
)
def test_admin_authority(client, authority, issuer_plugin, token, count):
assert (
client.get(api.url_for(AuthoritiesList), headers=token).json["total"] == count
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
(VALID_ADMIN_API_TOKEN, 200),
("", 401),
],
)
def test_authority_get(client, token, status):
assert (
client.get(api.url_for(Authorities, authority_id=1), headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authority_post(client, token, status):
assert (
client.post(
api.url_for(Authorities, authority_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 400),
(VALID_ADMIN_HEADER_TOKEN, 400),
(VALID_ADMIN_API_TOKEN, 400),
("", 401),
],
)
def test_authority_put(client, token, status):
assert (
client.put(
api.url_for(Authorities, authority_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authority_delete(client, token, status):
assert (
client.delete(
api.url_for(Authorities, authority_id=1), headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authority_patch(client, token, status):
assert (
client.patch(
api.url_for(Authorities, authority_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
(VALID_ADMIN_API_TOKEN, 200),
("", 401),
],
)
def test_authorities_get(client, token, status):
assert client.get(api.url_for(AuthoritiesList), headers=token).status_code == status
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 400),
(VALID_ADMIN_HEADER_TOKEN, 400),
(VALID_ADMIN_API_TOKEN, 400),
("", 401),
],
)
def test_authorities_post(client, token, status):
assert (
client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authorities_put(client, token, status):
assert (
client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authorities_delete(client, token, status):
assert (
client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_authorities_patch(client, token, status):
assert (
client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
(VALID_ADMIN_API_TOKEN, 200),
("", 401),
],
)
def test_certificate_authorities_get(client, token, status):
assert client.get(api.url_for(AuthoritiesList), headers=token).status_code == status
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 400),
(VALID_ADMIN_HEADER_TOKEN, 400),
(VALID_ADMIN_API_TOKEN, 400),
("", 401),
],
)
def test_certificate_authorities_post(client, token, status):
assert (
client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_certificate_authorities_put(client, token, status):
assert (
client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_certificate_authorities_delete(client, token, status):
assert (
client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_certificate_authorities_patch(client, token, status):
assert (
client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code
== status
)
def test_authority_roles(client, session, issuer_plugin):
auth = AuthorityFactory()
role = RoleFactory()
session.flush()
data = {
"owner": auth.owner,
"name": auth.name,
"description": auth.description,
"active": True,
"roles": [{"id": role.id}],
}
# Add role
resp = client.put(
api.url_for(Authorities, authority_id=auth.id),
data=json.dumps(data),
headers=VALID_ADMIN_HEADER_TOKEN,
)
assert resp.status_code == 200
assert len(resp.json["roles"]) == 1
assert set(auth.roles) == {role}
# Remove role
del data["roles"][0]
resp = client.put(
api.url_for(Authorities, authority_id=auth.id),
data=json.dumps(data),
headers=VALID_ADMIN_HEADER_TOKEN,
)
assert resp.status_code == 200
assert len(resp.json["roles"]) == 0
|
from datetime import timedelta
import os
import pytest
import voluptuous as vol
from homeassistant.components.device_tracker import (
CONF_CONSIDER_HOME,
CONF_NEW_DEVICE_DEFAULTS,
CONF_TRACK_NEW,
)
from homeassistant.components.device_tracker.legacy import YAML_DEVICES
from homeassistant.components.unifi_direct.device_tracker import (
CONF_PORT,
DOMAIN,
PLATFORM_SCHEMA,
UnifiDeviceScanner,
_response_to_json,
get_scanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, call, patch
from tests.common import assert_setup_component, load_fixture, mock_component
scanner_path = "homeassistant.components.unifi_direct.device_tracker.UnifiDeviceScanner"
@pytest.fixture(autouse=True)
def setup_comp(hass):
"""Initialize components."""
mock_component(hass, "zone")
yaml_devices = hass.config.path(YAML_DEVICES)
yield
if os.path.isfile(yaml_devices):
os.remove(yaml_devices)
@patch(scanner_path, return_value=MagicMock(spec=UnifiDeviceScanner))
async def test_get_scanner(unifi_mock, hass):
"""Test creating an Unifi direct scanner with a password."""
conf_dict = {
DOMAIN: {
CONF_PLATFORM: "unifi_direct",
CONF_HOST: "fake_host",
CONF_USERNAME: "fake_user",
CONF_PASSWORD: "fake_pass",
CONF_TRACK_NEW: True,
CONF_CONSIDER_HOME: timedelta(seconds=180),
CONF_NEW_DEVICE_DEFAULTS: {CONF_TRACK_NEW: True},
}
}
with assert_setup_component(1, DOMAIN):
assert await async_setup_component(hass, DOMAIN, conf_dict)
conf_dict[DOMAIN][CONF_PORT] = 22
assert unifi_mock.call_args == call(conf_dict[DOMAIN])
@patch("pexpect.pxssh.pxssh")
async def test_get_device_name(mock_ssh, hass):
"""Testing MAC matching."""
conf_dict = {
DOMAIN: {
CONF_PLATFORM: "unifi_direct",
CONF_HOST: "fake_host",
CONF_USERNAME: "fake_user",
CONF_PASSWORD: "fake_pass",
CONF_PORT: 22,
CONF_TRACK_NEW: True,
CONF_CONSIDER_HOME: timedelta(seconds=180),
}
}
mock_ssh.return_value.before = load_fixture("unifi_direct.txt")
scanner = get_scanner(hass, conf_dict)
devices = scanner.scan_devices()
assert 23 == len(devices)
assert "iPhone" == scanner.get_device_name("98:00:c6:56:34:12")
assert "iPhone" == scanner.get_device_name("98:00:C6:56:34:12")
@patch("pexpect.pxssh.pxssh.logout")
@patch("pexpect.pxssh.pxssh.login")
async def test_failed_to_log_in(mock_login, mock_logout, hass):
"""Testing exception at login results in False."""
from pexpect import exceptions
conf_dict = {
DOMAIN: {
CONF_PLATFORM: "unifi_direct",
CONF_HOST: "fake_host",
CONF_USERNAME: "fake_user",
CONF_PASSWORD: "fake_pass",
CONF_PORT: 22,
CONF_TRACK_NEW: True,
CONF_CONSIDER_HOME: timedelta(seconds=180),
}
}
mock_login.side_effect = exceptions.EOF("Test")
scanner = get_scanner(hass, conf_dict)
assert not scanner
@patch("pexpect.pxssh.pxssh.logout")
@patch("pexpect.pxssh.pxssh.login", autospec=True)
@patch("pexpect.pxssh.pxssh.prompt")
@patch("pexpect.pxssh.pxssh.sendline")
async def test_to_get_update(mock_sendline, mock_prompt, mock_login, mock_logout, hass):
"""Testing exception in get_update matching."""
conf_dict = {
DOMAIN: {
CONF_PLATFORM: "unifi_direct",
CONF_HOST: "fake_host",
CONF_USERNAME: "fake_user",
CONF_PASSWORD: "fake_pass",
CONF_PORT: 22,
CONF_TRACK_NEW: True,
CONF_CONSIDER_HOME: timedelta(seconds=180),
}
}
scanner = get_scanner(hass, conf_dict)
# mock_sendline.side_effect = AssertionError("Test")
mock_prompt.side_effect = AssertionError("Test")
devices = scanner._get_update() # pylint: disable=protected-access
assert devices is None
def test_good_response_parses(hass):
"""Test that the response form the AP parses to JSON correctly."""
response = _response_to_json(load_fixture("unifi_direct.txt"))
assert response != {}
def test_bad_response_returns_none(hass):
"""Test that a bad response form the AP parses to JSON correctly."""
assert _response_to_json("{(}") == {}
def test_config_error():
"""Test for configuration errors."""
with pytest.raises(vol.Invalid):
PLATFORM_SCHEMA(
{
# no username
CONF_PASSWORD: "password",
CONF_PLATFORM: DOMAIN,
CONF_HOST: "myhost",
"port": 123,
}
)
with pytest.raises(vol.Invalid):
PLATFORM_SCHEMA(
{
# no password
CONF_USERNAME: "foo",
CONF_PLATFORM: DOMAIN,
CONF_HOST: "myhost",
"port": 123,
}
)
with pytest.raises(vol.Invalid):
PLATFORM_SCHEMA(
{
CONF_PLATFORM: DOMAIN,
CONF_USERNAME: "foo",
CONF_PASSWORD: "password",
CONF_HOST: "myhost",
"port": "foo", # bad port!
}
)
|
import pickle
import requests
from src.s3_client_deeposm import post_findings_to_s3
from src.single_layer_network import train_on_cached_data
from src.training_data import CACHE_PATH, METADATA_PATH, download_and_serialize
def main():
"""Analyze each state and publish results to deeposm.org."""
naip_year = 2013
naip_states = {'de': ['http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'],
'ia': ['http://download.geofabrik.de/north-america/us/iowa-latest.osm.pbf'],
'me': ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']
}
number_of_naips = 175
extract_type = 'highway'
bands = [1, 1, 1, 1]
tile_size = 64
pixels_to_fatten_roads = 3
tile_overlap = 1
neural_net = 'two_layer_relu_conv'
number_of_epochs = 10
randomize_naips = False
for state in naip_states:
filenames = naip_states[state]
raster_data_paths = download_and_serialize(number_of_naips,
randomize_naips,
state,
naip_year,
extract_type,
bands,
tile_size,
pixels_to_fatten_roads,
filenames,
tile_overlap)
model = train_on_cached_data(neural_net, number_of_epochs)
with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
training_info = pickle.load(infile)
post_findings_to_s3(raster_data_paths, model, training_info, training_info['bands'], False)
requests.get('http://www.deeposm.org/refresh_findings/')
if __name__ == "__main__":
main()
|
from flask import Flask
from flasgger import Swagger
app = Flask(__name__)
swag = Swagger(app, config={
'headers': [],
'specs': [
{
'endpoint': 'apispec',
'route': '/apispec.json'
}
],
'openapi': '3.0.1'
})
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
for spec in specs_data.values():
assert 'openapi' in spec
assert '3.0.1' == spec['openapi']
assert 'swagger' not in spec
if __name__ == '__main__':
app.run(debug=True)
|
import os
import sys
import site
from .environment import env
@env.catch_exceptions
def enable_virtualenv():
"""Enable virtualenv for vim.
:return bool:
"""
path = env.var('g:pymode_virtualenv_path')
# Normalize path to be an absolute path
# If an absolute path is provided, that path will be returned, otherwise
# the returned path will be an absolute path but computed relative
# to the current working directory
path = os.path.abspath(path)
enabled = env.var('g:pymode_virtualenv_enabled')
if path == enabled:
env.message('Virtualenv %s already enabled.' % path)
return env.stop()
activate_env_from_path(path)
env.message('Activate virtualenv: ' + path)
env.let('g:pymode_virtualenv_enabled', path)
return True
def activate_env_from_path(env_path):
"""Activate given virtualenv."""
prev_sys_path = list(sys.path)
if sys.platform == 'win32':
site_packages_paths = [os.path.join(env_path, 'Lib', 'site-packages')]
else:
lib_path = os.path.join(env_path, 'lib')
site_packages_paths = [os.path.join(lib_path, lib, 'site-packages')
for lib in os.listdir(lib_path)]
for site_packages_path in site_packages_paths:
site.addsitedir(site_packages_path)
sys.real_prefix = sys.prefix
sys.prefix = env_path
sys.exec_prefix = env_path
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
|
import itertools
import tempfile
import os
import random
import collections
import warnings
import functools
from typing import (Iterator,
Tuple,
Mapping,
Sequence,
Union,
Generator,
Optional,
Any,
Type,
Iterable, cast)
from dedupe._typing import (RecordPairs,
RecordID,
RecordDict,
Blocks,
Data,
Literal)
import numpy
import multiprocessing
import multiprocessing.dummy
class ChildProcessError(Exception):
pass
class BlockingError(Exception):
pass
_Queue = Union[multiprocessing.dummy.Queue, multiprocessing.Queue]
_SimpleQueue = Union[multiprocessing.dummy.Queue, multiprocessing.SimpleQueue]
IndicesIterator = Iterator[Tuple[int, int]]
def randomPairs(n_records: int, sample_size: int) -> IndicesIterator:
"""
Return random combinations of indices for a square matrix of size n
records. For a discussion of how this works see
http://stackoverflow.com/a/14839010/98080
"""
n: int = int(n_records * (n_records - 1) / 2)
if sample_size >= n:
random_pairs = numpy.arange(n, dtype='uint')
else:
try:
random_pairs = numpy.array(random.sample(range(n), sample_size),
dtype='uint')
except OverflowError:
return randomPairsWithReplacement(n_records, sample_size)
b: int = 1 - 2 * n_records
root = (-b - 2 * numpy.sqrt(2 * (n - random_pairs) + 0.25)) / 2
i = numpy.floor(root).astype('uint')
j = numpy.rint(random_pairs + i * (b + i + 2) / 2 + 1).astype('uint')
return zip(i, j)
def randomPairsMatch(n_records_A: int, n_records_B: int, sample_size: int) -> IndicesIterator:
"""
Return random combinations of indices for record list A and B
"""
n: int = int(n_records_A * n_records_B)
if sample_size >= n:
random_pairs = numpy.arange(n)
else:
random_pairs = numpy.array(random.sample(range(n), sample_size),
dtype=int)
i, j = numpy.unravel_index(random_pairs, (n_records_A, n_records_B))
return zip(i, j)
def randomPairsWithReplacement(n_records: int, sample_size: int) -> IndicesIterator:
# If the population is very large relative to the sample
# size than we'll get very few duplicates by chance
warnings.warn("The same record pair may appear more than once in the sample")
try:
random_indices = numpy.random.randint(n_records,
size=sample_size * 2)
except (OverflowError, ValueError):
max_int: int = numpy.iinfo('int').max
warnings.warn("Asked to sample pairs from %d records, will only sample pairs from first %d records" % (n_records, max_int))
random_indices = numpy.random.randint(max_int,
size=sample_size * 2)
random_indices = random_indices.reshape((-1, 2))
random_indices.sort(axis=1)
return ((p.item(), q.item()) for p, q in random_indices)
class ScoreDupes(object):
def __init__(self,
data_model,
classifier,
records_queue: _Queue,
score_queue: _SimpleQueue):
self.data_model = data_model
self.classifier = classifier
self.records_queue = records_queue
self.score_queue = score_queue
def __call__(self) -> None:
while True:
record_pairs: Optional[RecordPairs] = self.records_queue.get()
if record_pairs is None:
break
try:
filtered_pairs: Optional[Tuple] = self.fieldDistance(record_pairs)
if filtered_pairs is not None:
self.score_queue.put(filtered_pairs)
except Exception as e:
self.score_queue.put(e)
raise
self.score_queue.put(None)
def fieldDistance(self, record_pairs: RecordPairs) -> Optional[Tuple]:
record_ids, records = zip(*(zip(*record_pair) for record_pair in record_pairs)) # type: ignore
record_ids = cast(Tuple[Tuple[RecordID, RecordID], ...], record_ids)
records = cast(Tuple[Tuple[RecordDict, RecordDict], ...], records)
if records:
distances = self.data_model.distances(records)
scores = self.classifier.predict_proba(distances)[:, -1]
if scores.any():
id_type = sniff_id_type(record_ids)
ids = numpy.array(record_ids, dtype=id_type)
dtype = numpy.dtype([('pairs', id_type, 2),
('score', 'f4')])
temp_file, file_path = tempfile.mkstemp()
os.close(temp_file)
scored_pairs = numpy.memmap(file_path,
shape=len(scores),
dtype=dtype)
scored_pairs['pairs'] = ids
scored_pairs['score'] = scores
return file_path, dtype
return None
def mergeScores(score_queue: _SimpleQueue,
result_queue: _SimpleQueue,
stop_signals: int):
scored_pairs_file, file_path = tempfile.mkstemp()
os.close(scored_pairs_file)
seen_signals = 0
end = 0
while seen_signals < stop_signals:
score_chunk = score_queue.get()
if isinstance(score_chunk, Exception):
result_queue.put(score_chunk)
raise
elif score_chunk is None:
seen_signals += 1
else:
score_file, dtype = score_chunk
score_chunk = numpy.memmap(score_file, mode='r', dtype=dtype)
chunk_size = len(score_chunk)
fp = numpy.memmap(file_path, dtype=dtype,
offset=(end * dtype.itemsize),
shape=(chunk_size, ))
fp[:chunk_size] = score_chunk
end += chunk_size
del score_chunk
os.remove(score_file)
if end:
result_queue.put((file_path, dtype, end))
else:
result_queue.put(None)
def scoreDuplicates(record_pairs: RecordPairs,
data_model,
classifier,
num_cores: int = 1):
if num_cores < 2:
from multiprocessing.dummy import Process, Queue
SimpleQueue = Queue
else:
from .backport import Process, SimpleQueue, Queue # type: ignore
first, record_pairs = peek(record_pairs)
if first is None:
raise BlockingError("No records have been blocked together. "
"Is the data you are trying to match like "
"the data you trained on?")
record_pairs_queue: _Queue = Queue(2)
score_queue: _SimpleQueue = SimpleQueue()
result_queue: _SimpleQueue = SimpleQueue()
n_map_processes = max(num_cores, 1)
score_records = ScoreDupes(data_model,
classifier,
record_pairs_queue,
score_queue)
map_processes = [Process(target=score_records)
for _ in range(n_map_processes)]
for process in map_processes:
process.start()
reduce_process = Process(target=mergeScores,
args=(score_queue,
result_queue,
n_map_processes))
reduce_process.start()
fillQueue(record_pairs_queue, record_pairs, n_map_processes)
result = result_queue.get()
if isinstance(result, Exception):
raise ChildProcessError
if result:
scored_pairs_file, dtype, size = result
scored_pairs = numpy.memmap(scored_pairs_file,
dtype=dtype,
shape=(size,))
else:
dtype = numpy.dtype([('pairs', object, 2),
('score', 'f4', 1)])
scored_pairs = numpy.array([], dtype=dtype)
reduce_process.join()
for process in map_processes:
process.join()
return scored_pairs
def fillQueue(queue: _Queue,
iterable: RecordPairs,
stop_signals: int,
chunk_size: int = 20000) -> None:
iterable = iter(iterable)
while True:
chunk = tuple(itertools.islice(iterable, chunk_size))
if chunk:
queue.put(chunk)
del chunk
else:
# put poison pills in queue to tell scorers that they are
# done
for _ in range(stop_signals):
queue.put(None)
break
class ScoreGazette(object):
def __init__(self, data_model, classifier):
self.data_model = data_model
self.classifier = classifier
def __call__(self, block: RecordPairs) -> numpy.ndarray:
record_ids, records = zip(*(zip(*each) for each in block)) # type: ignore
record_ids = cast(Tuple[Tuple[RecordID, RecordID], ...], record_ids)
records = cast(Tuple[Tuple[RecordDict, RecordDict], ...], records)
distances = self.data_model.distances(records)
scores = self.classifier.predict_proba(distances)[:, -1]
id_type = sniff_id_type(record_ids)
ids = numpy.array(record_ids, dtype=id_type)
dtype = numpy.dtype([('pairs', id_type, 2),
('score', 'f4')])
scored_pairs = numpy.empty(shape=len(scores),
dtype=dtype)
scored_pairs['pairs'] = ids
scored_pairs['score'] = scores
return scored_pairs
def scoreGazette(record_pairs: Blocks,
data_model,
classifier,
num_cores: int = 1) -> Generator[numpy.ndarray, None, None]:
first, record_pairs = peek(record_pairs)
if first is None:
raise ValueError("No records to match")
imap, pool = appropriate_imap(num_cores)
score_records = ScoreGazette(data_model, classifier)
for scored_pairs in imap(score_records, record_pairs):
yield scored_pairs
# The underlying processes in the pool should terminate when the
# pool is garbage collected, but sometimes it takes a while
# before GC, so do it explicitly here
pool.close()
pool.join()
def appropriate_imap(num_cores):
if num_cores < 2:
imap = map
# in order to make it simpler to cleanup a pool of processes
# always return something that we can close and join
class MockPool(object):
def close(self):
pass
def join(self):
pass
pool = MockPool()
else:
from .backport import Pool
pool = Pool(processes=num_cores)
imap = functools.partial(pool.imap_unordered, chunksize=20000)
return imap, pool
def peek(seq: Iterator) -> Tuple[Optional[Any], Iterator]:
try:
first = next(seq)
except TypeError as e:
if "not an iterator" not in str(e):
raise
try:
seq = iter(seq)
first = next(seq)
except StopIteration:
return None, iter(seq)
except StopIteration:
return None, iter(seq)
return first, itertools.chain([first], seq)
def isIndexed(data: Mapping, offset: int) -> bool:
return all(i in data for i in range(offset, offset + len(data)))
def index(data: Mapping[Any, Any], offset: int = 0) -> Mapping[int, Any]:
if isIndexed(data, offset):
return data
else:
data = dict(zip(itertools.count(offset),
data.values()))
return data
def Enumerator(start: int = 0, initial: tuple = ()) -> collections.defaultdict:
return collections.defaultdict(itertools.count(start).__next__, initial)
def sniff_id_type(ids: Sequence[Tuple[RecordID, RecordID]]) -> Union[Type[int], Tuple[Type[str], int]]:
example = ids[0][0]
python_type = type(example)
if python_type is bytes or python_type is str:
dtype: Union[Type[int], Tuple[Type[str], int]] = (str, 256)
elif python_type is int:
int(example) # make sure we can cast to int
dtype: Union[Type[int], Tuple[Type[str], int]] = int # type: ignore
else:
raise ValueError('Invalid type for record id')
return dtype
def sqlite_id_type(data: Data) -> Literal['text', 'integer']:
example = next(iter(data.keys()))
python_type = type(example)
if python_type is bytes or python_type is str:
return 'text'
elif python_type is int:
return 'integer'
else:
raise ValueError('Invalid type for record id')
def unique(seq: Iterable) -> list:
"""Return the unique elements of a collection even if those elements are
unhashable and unsortable, like dicts and sets"""
cleaned: list = []
for each in seq:
if each not in cleaned:
cleaned.append(each)
return cleaned
|
from __future__ import division
import itertools
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer.links import Convolution2D
from chainercv.links import Conv2DBNActiv
from chainercv import utils
from chainercv.links.model.yolo.yolo_base import YOLOBase
def _leaky_relu(x):
return F.leaky_relu(x, slope=0.1)
def _upsample(x):
return F.unpooling_2d(x, 2, cover_all=False)
class ResidualBlock(chainer.ChainList):
"""ChainList with a residual connection."""
def __init__(self, *links):
super(ResidualBlock, self).__init__(*links)
def forward(self, x):
h = x
for link in self:
h = link(h)
h += x
return h
class Darknet53Extractor(chainer.ChainList):
"""A Darknet53 based feature extractor for YOLOv3.
This is a feature extractor for :class:`~chainercv.links.model.yolo.YOLOv3`
"""
insize = 416
grids = (13, 26, 52)
def __init__(self):
super(Darknet53Extractor, self).__init__()
# Darknet53
self.append(Conv2DBNActiv(32, 3, pad=1, activ=_leaky_relu))
for k, n_block in enumerate((1, 2, 8, 8, 4)):
self.append(Conv2DBNActiv(
32 << (k + 1), 3, stride=2, pad=1, activ=_leaky_relu))
for _ in range(n_block):
self.append(ResidualBlock(
Conv2DBNActiv(32 << k, 1, activ=_leaky_relu),
Conv2DBNActiv(32 << (k + 1), 3, pad=1, activ=_leaky_relu)))
# additional links
for i, n in enumerate((512, 256, 128)):
if i > 0:
self.append(Conv2DBNActiv(n, 1, activ=_leaky_relu))
self.append(Conv2DBNActiv(n, 1, activ=_leaky_relu))
self.append(Conv2DBNActiv(n * 2, 3, pad=1, activ=_leaky_relu))
self.append(Conv2DBNActiv(n, 1, activ=_leaky_relu))
self.append(Conv2DBNActiv(n * 2, 3, pad=1, activ=_leaky_relu))
self.append(Conv2DBNActiv(n, 1, activ=_leaky_relu))
def forward(self, x):
"""Compute feature maps from a batch of images.
This method extracts feature maps from 3 layers.
Args:
x (ndarray): An array holding a batch of images.
The images should be resized to :math:`416\\times 416`.
Returns:
list of Variable:
Each variable contains a feature map.
"""
ys = []
h = x
hs = []
for i, link in enumerate(self):
h = link(h)
if i in {33, 39, 45}:
ys.append(h)
elif i in {14, 23}:
hs.append(h)
elif i in {34, 40}:
h = F.concat((_upsample(h), hs.pop()))
return ys
class YOLOv3(YOLOBase):
"""YOLOv3.
This is a model of YOLOv3 [#]_.
This model uses :class:`~chainercv.links.model.yolo.Darknet53Extractor` as
its feature extractor.
.. [#] Joseph Redmon, Ali Farhadi.
YOLOv3: An Incremental Improvement. arXiv 2018.
Args:
n_fg_class (int): The number of classes excluding the background.
pretrained_model (string): The weight file to be loaded.
This can take :obj:`'voc0712'`, `filepath` or :obj:`None`.
The default value is :obj:`None`.
* :obj:`'voc0712'`: Load weights trained on trainval split of \
PASCAL VOC 2007 and 2012. \
The weight file is downloaded and cached automatically. \
:obj:`n_fg_class` must be :obj:`20` or :obj:`None`. \
These weights were converted from the darknet model. \
The conversion code is \
`chainercv/examples/yolo/darknet2npz.py`.
* `filepath`: A path of npz file. In this case, :obj:`n_fg_class` \
must be specified properly.
* :obj:`None`: Do not load weights.
"""
_models = {
'voc0712': {
'param': {'n_fg_class': 20},
'url': 'https://chainercv-models.preferred.jp/'
'yolo_v3_voc0712_converted_2018_05_01.npz',
'cv2': True
},
}
_anchors = (
((90, 116), (198, 156), (326, 373)),
((61, 30), (45, 62), (119, 59)),
((13, 10), (30, 16), (23, 33)))
def __init__(self, n_fg_class=None, pretrained_model=None):
super(YOLOv3, self).__init__()
param, path = utils.prepare_pretrained_model(
{'n_fg_class': n_fg_class}, pretrained_model, self._models)
self.n_fg_class = param['n_fg_class']
self.use_preset('visualize')
with self.init_scope():
self.extractor = Darknet53Extractor()
self.subnet = chainer.ChainList()
for i, n in enumerate((512, 256, 128)):
self.subnet.append(chainer.Sequential(
Conv2DBNActiv(n * 2, 3, pad=1, activ=_leaky_relu),
Convolution2D(
len(self._anchors[i]) * (4 + 1 + self.n_fg_class), 1)))
default_bbox = []
step = []
for k, grid in enumerate(self.extractor.grids):
for v, u in itertools.product(range(grid), repeat=2):
for h, w in self._anchors[k]:
default_bbox.append((v, u, h, w))
step.append(self.insize / grid)
self._default_bbox = np.array(default_bbox, dtype=np.float32)
self._step = np.array(step, dtype=np.float32)
if path:
chainer.serializers.load_npz(path, self, strict=False)
def to_cpu(self):
super(YOLOv3, self).to_cpu()
self._default_bbox = cuda.to_cpu(self._default_bbox)
self._step = cuda.to_cpu(self._step)
def to_gpu(self, device=None):
super(YOLOv3, self).to_gpu(device)
self._default_bbox = cuda.to_gpu(self._default_bbox, device)
self._step = cuda.to_gpu(self._step, device)
def forward(self, x):
"""Compute localization, objectness, and classification from a batch of images.
This method computes three variables, :obj:`locs`, :obj:`objs`,
and :obj:`confs`.
:meth:`self._decode` converts these variables to bounding box
coordinates and confidence scores.
These variables are also used in training YOLOv3.
Args:
x (chainer.Variable): A variable holding a batch of images.
Returns:
tuple of chainer.Variable:
This method returns three variables, :obj:`locs`,
:obj:`objs`, and :obj:`confs`.
* **locs**: A variable of float arrays of shape \
:math:`(B, K, 4)`, \
where :math:`B` is the number of samples in the batch and \
:math:`K` is the number of default bounding boxes.
* **objs**: A variable of float arrays of shape \
:math:`(B, K)`.
* **confs**: A variable of float arrays of shape \
:math:`(B, K, n\_fg\_class)`.
"""
ys = []
for i, h in enumerate(self.extractor(x)):
h = self.subnet[i](h)
h = F.transpose(h, (0, 2, 3, 1))
h = F.reshape(h, (h.shape[0], -1, 4 + 1 + self.n_fg_class))
ys.append(h)
y = F.concat(ys)
locs = y[:, :, :4]
objs = y[:, :, 4]
confs = y[:, :, 5:]
return locs, objs, confs
def _decode(self, loc, obj, conf):
raw_bbox = self._default_bbox.copy()
raw_bbox[:, :2] += 1 / (1 + self.xp.exp(-loc[:, :2]))
raw_bbox[:, :2] *= self._step[:, None]
raw_bbox[:, 2:] *= self.xp.exp(loc[:, 2:])
raw_bbox[:, :2] -= raw_bbox[:, 2:] / 2
raw_bbox[:, 2:] += raw_bbox[:, :2]
obj = 1 / (1 + self.xp.exp(-obj))
conf = 1 / (1 + self.xp.exp(-conf))
raw_score = obj[:, None] * conf
bbox = []
label = []
score = []
for l in range(self.n_fg_class):
bbox_l = raw_bbox
score_l = raw_score[:, l]
mask = score_l >= self.score_thresh
bbox_l = bbox_l[mask]
score_l = score_l[mask]
indices = utils.non_maximum_suppression(
bbox_l, self.nms_thresh, score_l)
bbox_l = bbox_l[indices]
score_l = score_l[indices]
bbox.append(bbox_l)
label.append(self.xp.array((l,) * len(bbox_l)))
score.append(score_l)
bbox = self.xp.vstack(bbox).astype(np.float32)
label = self.xp.hstack(label).astype(np.int32)
score = self.xp.hstack(score).astype(np.float32)
return bbox, label, score
|
import re
import sys
import inspect
import os.path
import collections
import enum
from typing import Callable, MutableMapping, Optional, List, Union
import qutebrowser
from qutebrowser.utils import log, utils
def is_git_repo() -> bool:
"""Check if we're running from a git repository."""
gitfolder = os.path.join(qutebrowser.basedir, os.path.pardir, '.git')
return os.path.isdir(gitfolder)
def docs_up_to_date(path: str) -> bool:
"""Check if the generated html documentation is up to date.
Args:
path: The path of the document to check.
Return:
True if they are up to date or we couldn't check.
False if they are outdated.
"""
if hasattr(sys, 'frozen') or not is_git_repo():
return True
html_path = os.path.join(qutebrowser.basedir, 'html', 'doc', path)
filename = os.path.splitext(path)[0]
asciidoc_path = os.path.join(qutebrowser.basedir, os.path.pardir,
'doc', 'help', filename + '.asciidoc')
try:
html_time = os.path.getmtime(html_path)
asciidoc_time = os.path.getmtime(asciidoc_path)
except FileNotFoundError:
return True
return asciidoc_time <= html_time
class DocstringParser:
"""Generate documentation based on a docstring of a command handler.
The docstring needs to follow the format described in doc/contributing.
Attributes:
_state: The current state of the parser state machine.
_cur_arg_name: The name of the argument we're currently handling.
_short_desc_parts: The short description of the function as list.
_long_desc_parts: The long description of the function as list.
short_desc: The short description of the function.
long_desc: The long description of the function.
arg_descs: A dict of argument names to their descriptions
"""
class State(enum.Enum):
"""The current state of the parser."""
short = enum.auto()
desc = enum.auto()
desc_hidden = enum.auto()
arg_start = enum.auto()
arg_inside = enum.auto()
misc = enum.auto()
def __init__(self, func: Callable) -> None:
"""Constructor.
Args:
func: The function to parse the docstring for.
"""
self._state = self.State.short
self._cur_arg_name: Optional[str] = None
self._short_desc_parts: List[str] = []
self._long_desc_parts: List[str] = []
self.arg_descs: MutableMapping[
str, Union[str, List[str]]] = collections.OrderedDict()
doc = inspect.getdoc(func)
handlers = {
self.State.short: self._parse_short,
self.State.desc: self._parse_desc,
self.State.desc_hidden: self._skip,
self.State.arg_start: self._parse_arg_start,
self.State.arg_inside: self._parse_arg_inside,
self.State.misc: self._skip,
}
if doc is None:
if sys.flags.optimize < 2:
log.commands.warning(
"Function {}() from {} has no docstring".format(
utils.qualname(func),
inspect.getsourcefile(func)))
self.long_desc = ""
self.short_desc = ""
return
for line in doc.splitlines():
handler = handlers[self._state]
stop = handler(line)
if stop:
break
for k, v in self.arg_descs.items():
desc = ' '.join(v)
desc = re.sub(r', or None($|\.)', r'\1', desc)
desc = re.sub(r', or None', r', or not given', desc)
self.arg_descs[k] = desc
self.long_desc = ' '.join(self._long_desc_parts)
self.short_desc = ' '.join(self._short_desc_parts)
def _process_arg(self, line: str) -> None:
"""Helper method to process a line like 'fooarg: Blah blub'."""
self._cur_arg_name, argdesc = line.split(':', maxsplit=1)
self._cur_arg_name = self._cur_arg_name.strip().lstrip('*')
self.arg_descs[self._cur_arg_name] = [argdesc.strip()]
def _skip(self, line: str) -> None:
"""Handler to ignore everything until we get 'Args:'."""
if line.startswith('Args:'):
self._state = self.State.arg_start
def _parse_short(self, line: str) -> None:
"""Parse the short description (first block) in the docstring."""
if not line:
self._state = self.State.desc
else:
self._short_desc_parts.append(line.strip())
def _parse_desc(self, line: str) -> None:
"""Parse the long description in the docstring."""
if line.startswith('Args:'):
self._state = self.State.arg_start
elif line.strip() == '//' or line.startswith('Attributes:'):
self._state = self.State.desc_hidden
elif line.strip():
self._long_desc_parts.append(line.strip())
def _parse_arg_start(self, line: str) -> None:
"""Parse first argument line."""
self._process_arg(line)
self._state = self.State.arg_inside
def _parse_arg_inside(self, line: str) -> bool:
"""Parse subsequent argument lines."""
argname = self._cur_arg_name
assert argname is not None
descs = self.arg_descs[argname]
assert isinstance(descs, list)
if re.fullmatch(r'[A-Z][a-z]+:', line):
if not descs[-1].strip():
del descs[-1]
return True
elif not line.strip():
descs.append('\n\n')
elif line[4:].startswith(' '):
descs.append(line.strip() + '\n')
else:
self._process_arg(line)
return False
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from nfs import NfsCollector
##########################################################################
class TestNfsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NfsCollector', {
'interval': 1
})
self.collector = NfsCollector(config, None)
def test_import(self):
self.assertTrue(NfsCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/rpc/nfs')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_rhel5(self, publish_mock):
NfsCollector.PROC = self.getFixturePath('rhel5-1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NfsCollector.PROC = self.getFixturePath('rhel5-2')
self.collector.collect()
metrics = {
'net.packets': 0.0,
'net.tcpcnt': 0.0,
'net.tcpconn': 0.0,
'net.udpcnt': 0.0,
'rpc.authrefrsh': 0.0,
'rpc.calls': 8042864.0,
'rpc.retrans': 0.0,
'v2.create': 0.0,
'v2.fsstat': 0.0,
'v2.getattr': 0.0,
'v2.link': 0.0,
'v2.lookup': 0.0,
'v2.mkdir': 0.0,
'v2.null': 0.0,
'v2.read': 0.0,
'v2.readdir': 0.0,
'v2.readlink': 0.0,
'v2.remove': 0.0,
'v2.rename': 0.0,
'v2.rmdir': 0.0,
'v2.root': 0.0,
'v2.setattr': 0.0,
'v2.symlink': 0.0,
'v2.wrcache': 0.0,
'v2.write': 0.0,
'v3.access': 40672.0,
'v3.commit': 0.0,
'v3.create': 91.0,
'v3.fsinfo': 0.0,
'v3.fsstat': 20830.0,
'v3.getattr': 162507.0,
'v3.link': 0.0,
'v3.lookup': 89.0,
'v3.mkdir': 0.0,
'v3.mknod': 0.0,
'v3.null': 0.0,
'v3.pathconf': 0.0,
'v3.read': 6093419.0,
'v3.readdir': 4002.0,
'v3.readdirplus': 0.0,
'v3.readlink': 0.0,
'v3.remove': 9.0,
'v3.rename': 0.0,
'v3.rmdir': 0.0,
'v3.setattr': 8640.0,
'v3.symlink': 0.0,
'v3.write': 1712605.0,
'v4.access': 0.0,
'v4.close': 0.0,
'v4.commit': 0.0,
'v4.confirm': 0.0,
'v4.create': 0.0,
'v4.delegreturn': 0.0,
'v4.fs_locations': 0.0,
'v4.fsinfo': 0.0,
'v4.getacl': 0.0,
'v4.getattr': 0.0,
'v4.link': 0.0,
'v4.lock': 0.0,
'v4.lockt': 0.0,
'v4.locku': 0.0,
'v4.lookup': 0.0,
'v4.lookup_root': 0.0,
'v4.null': 0.0,
'v4.open': 0.0,
'v4.open_conf': 0.0,
'v4.open_dgrd': 0.0,
'v4.open_noat': 0.0,
'v4.pathconf': 0.0,
'v4.read': 0.0,
'v4.readdir': 0.0,
'v4.readlink': 0.0,
'v4.rel_lkowner': 0.0,
'v4.remove': 0.0,
'v4.rename': 0.0,
'v4.renew': 0.0,
'v4.server_caps': 0.0,
'v4.setacl': 0.0,
'v4.setattr': 0.0,
'v4.setclntid': 0.0,
'v4.statfs': 0.0,
'v4.symlink': 0.0,
'v4.write': 0.0
}
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_rhel6(self, publish_mock):
NfsCollector.PROC = self.getFixturePath('rhel6-1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NfsCollector.PROC = self.getFixturePath('rhel6-2')
self.collector.collect()
metrics = {
'net.packets': 0.0,
'net.tcpcnt': 0.0,
'net.tcpconn': 0.0,
'net.udpcnt': 0.0,
'rpc.authrefrsh': 32.0,
'rpc.calls': 32.0,
'rpc.retrans': 0.0,
'v2.create': 0.0,
'v2.fsstat': 0.0,
'v2.getattr': 0.0,
'v2.link': 0.0,
'v2.lookup': 0.0,
'v2.mkdir': 0.0,
'v2.null': 0.0,
'v2.read': 0.0,
'v2.readdir': 0.0,
'v2.readlink': 0.0,
'v2.remove': 0.0,
'v2.rename': 0.0,
'v2.rmdir': 0.0,
'v2.root': 0.0,
'v2.setattr': 0.0,
'v2.symlink': 0.0,
'v2.wrcache': 0.0,
'v2.write': 0.0,
'v3.access': 6.0,
'v3.commit': 0.0,
'v3.create': 0.0,
'v3.fsinfo': 0.0,
'v3.fsstat': 17.0,
'v3.getattr': 7.0,
'v3.link': 0.0,
'v3.lookup': 0.0,
'v3.mkdir': 0.0,
'v3.mknod': 0.0,
'v3.null': 0.0,
'v3.pathconf': 0.0,
'v3.read': 0.0,
'v3.readdir': 0.0,
'v3.readdirplus': 0.0,
'v3.readlink': 0.0,
'v3.remove': 0.0,
'v3.rename': 0.0,
'v3.rmdir': 0.0,
'v3.setattr': 1.0,
'v3.symlink': 0.0,
'v3.write': 1.0,
'v4.access': 0.0,
'v4.close': 0.0,
'v4.commit': 0.0,
'v4.confirm': 0.0,
'v4.create': 0.0,
'v4.create_ses': 0.0,
'v4.delegreturn': 0.0,
'v4.destroy_ses': 0.0,
'v4.ds_write': 0.0,
'v4.exchange_id': 0.0,
'v4.fs_locations': 0.0,
'v4.fsinfo': 0.0,
'v4.get_lease_t': 0.0,
'v4.getacl': 0.0,
'v4.getattr': 0.0,
'v4.getdevinfo': 0.0,
'v4.getdevlist': 0.0,
'v4.layoutcommit': 0.0,
'v4.layoutget': 0.0,
'v4.layoutreturn': 0.0,
'v4.link': 0.0,
'v4.lock': 0.0,
'v4.lockt': 0.0,
'v4.locku': 0.0,
'v4.lookup': 0.0,
'v4.lookup_root': 0.0,
'v4.null': 0.0,
'v4.open': 0.0,
'v4.open_conf': 0.0,
'v4.open_dgrd': 0.0,
'v4.open_noat': 0.0,
'v4.pathconf': 0.0,
'v4.read': 0.0,
'v4.readdir': 0.0,
'v4.readlink': 0.0,
'v4.reclaim_comp': 0.0,
'v4.rel_lkowner': 0.0,
'v4.remove': 0.0,
'v4.rename': 0.0,
'v4.renew': 0.0,
'v4.sequence': 0.0,
'v4.server_caps': 0.0,
'v4.setacl': 0.0,
'v4.setattr': 0.0,
'v4.setclntid': 0.0,
'v4.statfs': 0.0,
'v4.symlink': 0.0,
'v4.write': 0.0,
}
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import collections
import json
import unittest
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.windows_packages import psping
psping_results = """
PsPing v2.10 - PsPing - ping, latency, bandwidth measurement utility
Copyright (C) 2012-2016 Mark Russinovich
Sysinternals - www.sysinternals.com
TCP latency test connecting to 10.138.0.2:47001: Connected
15 iterations (warmup 5) sending 8192 bytes TCP latency test: 0%
Connected
15 iterations (warmup 5) sending 8192 bytes TCP latency test: 100%
TCP roundtrip latency statistics (post warmup):
Sent = 10, Size = 8192, Total Bytes: 81920,
Minimum = 0.19ms, Maxiumum = 0.58ms, Average = 0.27ms
Latency Count
0.30\t688
0.51\t292
0.71\t15
0.92\t2
1.13\t0
"""
class PspingBenchmarkTestCase(unittest.TestCase, test_util.SamplesTestMixin):
def testPspingParsing(self):
minimum = 0.19
maximum = 0.58
average = 0.27
use_internal_ip = True
machine = collections.namedtuple('machine', 'zone machine_type')
client = machine(machine_type='cA', zone='cZ')
server = machine(machine_type='sB', zone='sZ')
samples = psping.ParsePspingResults(psping_results, client, server,
use_internal_ip)
expected_metadata = {
'internal_ip_used': use_internal_ip,
'sending_zone': client.zone,
'sending_machine_type': client.machine_type,
'receiving_zone': server.zone,
'receiving_machine_type': server.machine_type,
}
histogram = json.dumps([
{'latency': 0.3, 'count': 688, 'bucket_number': 1},
{'latency': 0.51, 'count': 292, 'bucket_number': 2},
{'latency': 0.71, 'count': 15, 'bucket_number': 3},
{'latency': 0.92, 'count': 2, 'bucket_number': 4},
{'latency': 1.13, 'count': 0, 'bucket_number': 5},
])
expected_samples = [
sample.Sample('latency', average, 'ms', expected_metadata),
sample.Sample('latency:maximum', maximum, 'ms', expected_metadata),
sample.Sample('latency:minimum', minimum, 'ms', expected_metadata),
]
expected_histogram_metadata = expected_metadata.copy()
expected_histogram_metadata['histogram'] = histogram
expected_samples.append(sample.Sample('latency:histogram', 0, 'ms',
expected_histogram_metadata))
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
if __name__ == '__main__':
unittest.main()
|
from functools import partial
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_SENSOR_STATE,
ATTR_SENSOR_TYPE_SENSOR as ENTITY_TYPE,
ATTR_SENSOR_UNIQUE_ID,
ATTR_SENSOR_UOM,
DATA_DEVICES,
DOMAIN,
)
from .entity import MobileAppEntity, sensor_id
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up mobile app sensor from a config entry."""
entities = []
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
for config in hass.data[DOMAIN][ENTITY_TYPE].values():
if config[CONF_WEBHOOK_ID] != webhook_id:
continue
device = hass.data[DOMAIN][DATA_DEVICES][webhook_id]
entities.append(MobileAppSensor(config, device, config_entry))
async_add_entities(entities)
@callback
def handle_sensor_registration(webhook_id, data):
if data[CONF_WEBHOOK_ID] != webhook_id:
return
unique_id = sensor_id(data[CONF_WEBHOOK_ID], data[ATTR_SENSOR_UNIQUE_ID])
entity = hass.data[DOMAIN][ENTITY_TYPE][unique_id]
if "added" in entity:
return
entity["added"] = True
device = hass.data[DOMAIN][DATA_DEVICES][data[CONF_WEBHOOK_ID]]
async_add_entities([MobileAppSensor(data, device, config_entry)])
async_dispatcher_connect(
hass,
f"{DOMAIN}_{ENTITY_TYPE}_register",
partial(handle_sensor_registration, webhook_id),
)
class MobileAppSensor(MobileAppEntity):
"""Representation of an mobile app sensor."""
@property
def state(self):
"""Return the state of the sensor."""
return self._config[ATTR_SENSOR_STATE]
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._config.get(ATTR_SENSOR_UOM)
|
from cerberus import errors
from cerberus.tests import assert_fail
def test_items(validator):
field = 'a_list_of_values'
value = ['a string', 'not an integer']
assert_fail(
document={field: value},
validator=validator,
error=(
field,
(field, 'items'),
errors.ITEMS,
({'type': ('string',)}, {'type': ('integer',)}),
),
child_errors=[
((field, 1), (field, 'items', 1, 'type'), errors.TYPE, ('integer',))
],
)
assert (
errors.BasicErrorHandler.messages[errors.TYPE.code].format(
constraint=('integer',)
)
in validator.errors[field][-1][1]
)
def test_items_with_extra_item():
field = 'a_list_of_values'
assert_fail(
document={field: ['a string', 10, 'an extra item']},
error=(
field,
(field, 'items'),
errors.ITEMS_LENGTH,
({'type': ('string',)}, {'type': ('integer',)}),
(2, 3),
),
)
|
import logging
from ciscomobilityexpress.ciscome import CiscoMobilityExpress
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a Cisco ME scanner."""
config = config[DOMAIN]
controller = CiscoMobilityExpress(
config[CONF_HOST],
config[CONF_USERNAME],
config[CONF_PASSWORD],
config[CONF_SSL],
config[CONF_VERIFY_SSL],
)
if not controller.is_logged_in():
return None
return CiscoMEDeviceScanner(controller)
class CiscoMEDeviceScanner(DeviceScanner):
"""This class scans for devices associated to a Cisco ME controller."""
def __init__(self, controller):
"""Initialize the scanner."""
self.controller = controller
self.last_results = {}
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [device.macaddr for device in self.last_results]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
name = next(
(result.clId for result in self.last_results if result.macaddr == device),
None,
)
return name
def get_extra_attributes(self, device):
"""
Get extra attributes of a device.
Some known extra attributes that may be returned in the device tuple
include SSID, PT (eg 802.11ac), devtype (eg iPhone 7) among others.
"""
device = next(
(result for result in self.last_results if result.macaddr == device), None
)
return device._asdict()
def _update_info(self):
"""Check the Cisco ME controller for devices."""
self.last_results = self.controller.get_associated_devices()
_LOGGER.debug(
"Cisco Mobility Express controller returned: %s", self.last_results
)
|
import io
import csv
import datetime
import os
from urllib.parse import urlparse
from lxml import etree, html
from mako.template import Template
from pkg_resources import resource_filename
from nikola import utils
links = {}
class ImportMixin(object):
"""Mixin with common used methods."""
name = "import_mixin"
needs_config = False
doc_usage = "[options] export_file"
doc_purpose = "import a dump from a different engine."
cmd_options = [
{
'name': 'output_folder',
'long': 'output-folder',
'short': 'o',
'default': 'new_site',
'help': 'Location to write imported content.'
},
]
def _execute(self, options={}, args=[]):
"""Import a blog from an export into a Nikola site."""
raise NotImplementedError("Must be implemented by a subclass.")
@classmethod
def get_channel_from_file(cls, filename):
"""Get channel from XML file."""
tree = etree.fromstring(cls.read_xml_file(filename))
channel = tree.find('channel')
return channel
@staticmethod
def configure_redirections(url_map, base_dir=''):
"""Configure redirections from an url_map."""
index = base_dir + 'index.html'
if index.startswith('/'):
index = index[1:]
redirections = []
for k, v in url_map.items():
if not k[-1] == '/':
k = k + '/'
# remove the initial "/" because src is a relative file path
src = (urlparse(k).path + 'index.html')[1:]
dst = (urlparse(v).path)
if src == index:
utils.LOGGER.warning("Can't do a redirect for: {0!r}".format(k))
else:
redirections.append((src, dst))
return redirections
def generate_base_site(self):
"""Generate a base Nikola site."""
if not os.path.exists(self.output_folder):
os.system('nikola init -q ' + self.output_folder)
else:
self.import_into_existing_site = True
utils.LOGGER.warning('The folder {0} already exists - assuming that this is a '
'already existing Nikola site.'.format(self.output_folder))
filename = resource_filename('nikola', 'conf.py.in')
# The 'strict_undefined=True' will give the missing symbol name if any,
# (ex: NameError: 'THEME' is not defined )
# for other errors from mako/runtime.py, you can add format_extensions=True ,
# then more info will be writen to *somefile* (most probably conf.py)
conf_template = Template(filename=filename, strict_undefined=True)
return conf_template
@staticmethod
def populate_context(channel):
"""Populate context with settings."""
raise NotImplementedError("Must be implemented by a subclass.")
@classmethod
def transform_content(cls, content):
"""Transform content to a Nikola-friendly format."""
return content
@classmethod
def write_content(cls, filename, content, rewrite_html=True):
"""Write content to file."""
if rewrite_html:
try:
doc = html.document_fromstring(content)
doc.rewrite_links(replacer)
content = html.tostring(doc, encoding='utf8')
except etree.ParserError:
content = content.encode('utf-8')
else:
content = content.encode('utf-8')
utils.makedirs(os.path.dirname(filename))
with open(filename, "wb+") as fd:
fd.write(content)
@classmethod
def write_post(cls, filename, content, headers, compiler, rewrite_html=True):
"""Ask the specified compiler to write the post to disk."""
if rewrite_html:
try:
doc = html.document_fromstring(content)
doc.rewrite_links(replacer)
content = html.tostring(doc, encoding='utf8')
except etree.ParserError:
pass
if isinstance(content, bytes):
content = content.decode('utf-8')
compiler.create_post(
filename,
content=content,
onefile=True,
**headers)
def write_metadata(self, filename, title, slug, post_date, description, tags, **kwargs):
"""Write metadata to meta file."""
if not description:
description = ""
utils.makedirs(os.path.dirname(filename))
with io.open(filename, "w+", encoding="utf8") as fd:
data = {'title': title, 'slug': slug, 'date': post_date, 'tags': ','.join(tags), 'description': description}
data.update(kwargs)
fd.write(utils.write_metadata(data, site=self.site, comment_wrap=False))
@staticmethod
def write_urlmap_csv(output_file, url_map):
"""Write urlmap to csv file."""
utils.makedirs(os.path.dirname(output_file))
fmode = 'w+'
with io.open(output_file, fmode) as fd:
csv_writer = csv.writer(fd)
for item in url_map.items():
csv_writer.writerow(item)
def get_configuration_output_path(self):
"""Get path for the output configuration file."""
if not self.import_into_existing_site:
filename = 'conf.py'
else:
filename = 'conf.py.{name}-{time}'.format(
time=datetime.datetime.now().strftime('%Y%m%d_%H%M%S'),
name=self.name)
config_output_path = os.path.join(self.output_folder, filename)
utils.LOGGER.info('Configuration will be written to: {0}'.format(config_output_path))
return config_output_path
@staticmethod
def write_configuration(filename, rendered_template):
"""Write the configuration file."""
utils.makedirs(os.path.dirname(filename))
with io.open(filename, 'w+', encoding='utf8') as fd:
fd.write(rendered_template)
def replacer(dst):
"""Replace links."""
return links.get(dst, dst)
|
import os
from urllib.parse import urlsplit
from django.db import models
from django.http.response import Http404, HttpResponseRedirect
from django.http.request import QueryDict
from django.shortcuts import get_object_or_404
from django.utils.cache import add_never_cache_headers
from django.utils.encoding import force_str
from django.utils.translation import get_language_from_request
from rest_framework import generics
from rest_framework import pagination
from rest_framework import status
from rest_framework import views
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.response import Response
from rest_framework.utils.urls import replace_query_param
from cms.views import details
from shop.conf import app_settings
from shop.models.product import ProductModel
from shop.rest.filters import CMSPagesFilterBackend
from shop.rest.money import JSONRenderer
from shop.rest.renderers import ShopTemplateHTMLRenderer, CMSPageRenderer
from shop.serializers.bases import ProductSerializer
from shop.serializers.defaults.catalog import AddToCartSerializer
class ProductListPagination(pagination.LimitOffsetPagination):
"""
If the catalog's list is rendered with manual pagination, typically we want to render all rows
without "widow" items (widows are single items spawning a new row). By using a limit of 16
items per page, we can render 2 and 4 columns without problem, however whenever we need 3 or 5
columns, there is one widow item, which breaks the layout. To prevent this problem, configure
the ``ProductListView`` to use this pagination class. It behaves so that the last product items
of a page, reappear on the next page. The number of reappearing items is set to 3. It can be
modified by changing ``overlapping`` to a different value.
By virtue, the rendering view can not know the current media breakpoint, and hence the number
of columns. Therefore simply hide (with ``display: none;``) potential widow items by using the
media breakpoints provided by CSS (see ``static/shop/css/prevent-widows.scss`` for details).
Since the last product items overlap with the first ones on the next page, no items are hidden.
This allows us to switch between layouts with different number of columns, keeping the last row
of each page in balance.
"""
template = 'shop/templatetags/paginator.html'
default_limit = 16
overlapping = 1
def adjust_offset(self, url, page_offset):
if url is None:
return
(scheme, netloc, path, query, fragment) = urlsplit(force_str(url))
query_dict = QueryDict(query)
try:
offset = pagination._positive_int(
query_dict[self.offset_query_param],
)
except (KeyError, ValueError):
pass
else:
if offset > page_offset:
url = replace_query_param(url, self.offset_query_param, max(0, offset - self.overlapping))
elif offset < page_offset:
url = replace_query_param(url, self.offset_query_param, offset + self.overlapping)
return url
def get_html_context(self):
context = super().get_html_context()
page_offset = self.get_offset(self.request)
context['previous_url'] = self.adjust_offset(context['previous_url'], page_offset)
context['next_url'] = self.adjust_offset(context['next_url'], page_offset)
for k, pl in enumerate(context['page_links']):
url = self.adjust_offset(pl.url, page_offset)
page_link = pagination.PageLink(url=url, number=pl.number, is_active=pl.is_active, is_break=pl.is_break)
context['page_links'][k] = page_link
return context
class ProductListView(generics.ListAPIView):
"""
This view is used to list all products which shall be visible below a certain URL.
Usage: Add it to the urlpatterns responsible for rendering the catalog's views. The
file containing this patterns can be referenced by the CMS apphook used by the CMS pages
responsible for rendering the catalog's list view.
```
urlpatterns = [
...
url(r'^(?P<slug>[\w-]+)/?$', ProductRetrieveView.as_view(**params)), # see below
url(r'^$', ProductListView.as_view()),
]
```
These attributes can be added to the ``as_view(**params)`` method:
:param renderer_classes: A list or tuple of REST renderer classes.
:param product_model: The product model onto which the filter set is applied.
:param serializer_class: The serializer class used to process the queryset returned
by the catalog's product list view.
:param limit_choices_to: Limit the queryset of product models to these choices.
:param filter_class: A filter set which must be inherit from :class:`django_filters.FilterSet`.
:param pagination_class: A pagination class inheriting from :class:`rest_framework.pagination.BasePagination`.
:param redirect_to_lonely_product: If ``True``, redirect onto a lonely product in the
catalog. Defaults to ``False``.
"""
renderer_classes = (CMSPageRenderer, JSONRenderer, BrowsableAPIRenderer)
product_model = ProductModel
serializer_class = app_settings.PRODUCT_SUMMARY_SERIALIZER
limit_choices_to = models.Q()
filter_class = None
pagination_class = ProductListPagination
redirect_to_lonely_product = False
def get(self, request, *args, **kwargs):
if self.redirect_to_lonely_product and self.get_queryset().count() == 1:
redirect_to = self.get_queryset().first().get_absolute_url()
return HttpResponseRedirect(redirect_to)
response = self.list(request, *args, **kwargs)
# TODO: we must find a better way to invalidate the cache.
# Simply adding a no-cache header eventually decreases the performance dramatically.
add_never_cache_headers(response)
return response
def get_queryset(self):
qs = self.product_model.objects.filter(self.limit_choices_to, active=True)
# restrict queryset by language
if hasattr(self.product_model, 'translations'):
language = get_language_from_request(self.request)
qs = qs.prefetch_related('translations').filter(translations__language_code=language)
qs = qs.select_related('polymorphic_ctype')
return qs
class SyncCatalogView(views.APIView):
"""
This view is used to synchronize the number of items in the cart from using the catalog's list
view. It is intended for sites, where we don't want having to access the product's detail
view for adding each product individually to the cart.
Use Angular directive <ANY shop-sync-catalog-item="..."> on each catalog item to set up
the communication with this view.
To the ``urlpatterns`` responsible for the list view, add
```
urlpatterns = [
...
url(r'^sync-catalog$', SyncCatalogView.as_view(
serializer_class=SyncCatalogSerializer,
)),
...
]
```
to the URLs as specified by the merchant's implementation of its catalog list.
The class ``SyncCatalogSerializer`` must be provided by the merchant implementation.
"""
renderer_classes = (JSONRenderer, BrowsableAPIRenderer)
product_model = ProductModel
product_field = 'product'
serializer_class = None # must be overridden by SyncCatalogView.as_view()
filter_class = None # may be overridden by SyncCatalogView.as_view()
limit_choices_to = models.Q()
def get_context(self, request, **kwargs):
filter_kwargs = {'id': request.data.get('id')}
if hasattr(self.product_model, 'translations'):
filter_kwargs.update(translations__language_code=get_language_from_request(self.request))
queryset = self.product_model.objects.filter(self.limit_choices_to, **filter_kwargs)
product = get_object_or_404(queryset)
return {self.product_field: product, 'request': request}
def get(self, request, *args, **kwargs):
context = self.get_context(request, **kwargs)
serializer = self.serializer_class(context=context, **kwargs)
return Response(serializer.data)
def post(self, request, *args, **kwargs):
context = self.get_context(request, **kwargs)
serializer = self.serializer_class(data=request.data, context=context)
if serializer.is_valid():
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AddToCartView(views.APIView):
"""
Handle the "Add to Cart" dialog on the products detail page.
"""
renderer_classes = (JSONRenderer, BrowsableAPIRenderer)
product_model = ProductModel
serializer_class = AddToCartSerializer
lookup_field = lookup_url_kwarg = 'slug'
limit_choices_to = models.Q()
def get_context(self, request, **kwargs):
assert self.lookup_url_kwarg in kwargs
filter_kwargs = {self.lookup_field: kwargs.pop(self.lookup_url_kwarg)}
if hasattr(self.product_model, 'translations'):
filter_kwargs.update(translations__language_code=get_language_from_request(self.request))
queryset = self.product_model.objects.filter(self.limit_choices_to, **filter_kwargs)
product = get_object_or_404(queryset)
return {'product': product, 'request': request}
def get(self, request, *args, **kwargs):
context = self.get_context(request, **kwargs)
serializer = self.serializer_class(context=context, **kwargs)
return Response(serializer.data)
def post(self, request, *args, **kwargs):
context = self.get_context(request, **kwargs)
serializer = self.serializer_class(data=request.data, context=context)
if serializer.is_valid():
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ProductRetrieveView(generics.RetrieveAPIView):
"""
This view is used to retrieve and render a certain product.
Usage: Add it to the urlpatterns responsible for rendering the catalog's views. The
file containing this patterns can be referenced by the CMS apphook ``CatalogListCMSApp``
and used by the CMS pages responsible for rendering the catalog's list.
```
urlpatterns = [
...
url(r'^(?P<slug>[\w-]+)', ProductRetrieveView.as_view()),
url(r'^', ProductListView.as_view()), # see above
]
```
You may add these attributes to the ``as_view()`` method:
:param renderer_classes: A list or tuple of REST renderer classes.
:param lookup_field: The model field used to retrieve the product instance.
:param lookup_url_kwarg: The name of the parsed URL fragment.
:param serializer_class: The serializer class used to process the queryset returned
by the catalog's product detail view.
:param limit_choices_to: Limit the queryset of product models to these choices.
:param use_modal_dialog: If ``True`` (default), render a modal dialog to confirm adding the
product to the cart.
"""
renderer_classes = (ShopTemplateHTMLRenderer, JSONRenderer, BrowsableAPIRenderer)
lookup_field = lookup_url_kwarg = 'slug'
product_model = ProductModel
serializer_class = ProductSerializer
limit_choices_to = models.Q()
use_modal_dialog = True
def dispatch(self, request, *args, **kwargs):
"""
In some Shop configurations, it is common to render the the catalog's list view right on
the main landing page. Therefore we have a combination of the ``ProductListView`` and the
``ProductRetrieveView`` interfering with the CMS's root page, which means that we have
overlapping namespaces. For example, the URL ``/awesome-toy`` must be served by the
``ProductRetrieveView``, but ``/cart`` is served by **django-CMS**.
In such a situation, the CMS is not able to intercept all requests intended for itself.
Instead this ``ProductRetrieveView`` would not find a product if we query for, say
``/cart``, and hence would raise a Not Found exception. However, since we have overlapping
namespaces, this method first attempts to resolve by product, and if that fails, it
forwards the request to django-CMS.
"""
try:
return super().dispatch(request, *args, **kwargs)
except Http404:
if request.current_page.node.is_root():
return details(request, kwargs.get('slug'))
raise
except:
raise
def get_template_names(self):
product = self.get_object()
app_label = product._meta.app_label.lower()
basename = '{}-detail.html'.format(product._meta.model_name)
return [
os.path.join(app_label, 'catalog', basename),
os.path.join(app_label, 'catalog/product-detail.html'),
'shop/catalog/product-detail.html',
]
def get_renderer_context(self):
renderer_context = super().get_renderer_context()
if renderer_context['request'].accepted_renderer.format == 'html':
# add the product as Python object to the context
product = self.get_object()
renderer_context.update(
app_label=product._meta.app_label.lower(),
product=product,
use_modal_dialog=self.use_modal_dialog,
)
return renderer_context
def get_object(self):
if not hasattr(self, '_product'):
assert self.lookup_url_kwarg in self.kwargs
filter_kwargs = {
'active': True,
self.lookup_field: self.kwargs[self.lookup_url_kwarg],
}
if hasattr(self.product_model, 'translations'):
filter_kwargs.update(translations__language_code=get_language_from_request(self.request))
queryset = self.product_model.objects.filter(self.limit_choices_to, **filter_kwargs)
self._product = get_object_or_404(queryset)
return self._product
class OnePageResultsSetPagination(pagination.PageNumberPagination):
def __init__(self):
self.page_size = ProductModel.objects.count()
class ProductSelectView(generics.ListAPIView):
"""
A simple list view, which is used only by the admin backend. It is required to fetch
the data for rendering the select widget when looking up for a product.
"""
renderer_classes = (JSONRenderer, BrowsableAPIRenderer)
serializer_class = app_settings.PRODUCT_SELECT_SERIALIZER
pagination_class = OnePageResultsSetPagination
def get_queryset(self):
term = self.request.GET.get('term', '')
if len(term) >= 2:
return ProductModel.objects.select_lookup(term)
return ProductModel.objects.all()
class AddFilterContextMixin:
"""
A mixin to enrich the render context by ``filter`` containing information
on how to render the filter set, supplied by attribute ``filter_class``.
"""
def get_renderer_context(self):
renderer_context = super().get_renderer_context()
if self.filter_class and renderer_context['request'].accepted_renderer.format == 'html':
# restrict filter set to products associated to this CMS page only
queryset = self.product_model.objects.filter(self.limit_choices_to)
queryset = CMSPagesFilterBackend().filter_queryset(self.request, queryset, self)
renderer_context['filter'] = self.filter_class.get_render_context(self.request, queryset)
return renderer_context
|
import pandas as pd
from arctic.date import DateRange, to_pandas_closed_closed
from ._chunker import Chunker, START, END
class DateChunker(Chunker):
TYPE = 'date'
def to_chunks(self, df, chunk_size='D', func=None, **kwargs):
"""
chunks the dataframe/series by dates
Parameters
----------
df: pandas dataframe or series
chunk_size: str
any valid Pandas frequency string
func: function
func will be applied to each `chunk` generated by the chunker.
This function CANNOT modify the date column of the dataframe!
Returns
-------
generator that produces tuples: (start date, end date,
chunk_size, dataframe/series)
"""
if 'date' in df.index.names:
dates = df.index.get_level_values('date')
if not df.index.is_monotonic_increasing:
df = df.sort_index()
elif 'date' in df.columns:
dates = pd.DatetimeIndex(df.date)
if not dates.is_monotonic_increasing:
# providing support for pandas 0.16.2 to 0.20.x
# neither sort method exists in both
try:
df = df.sort_values('date')
except AttributeError:
df = df.sort(columns='date')
dates = pd.DatetimeIndex(df.date)
else:
raise Exception("Data must be datetime indexed or have a column named 'date'")
period_obj = dates.to_period(chunk_size)
period_obj_reduced = period_obj.drop_duplicates()
count = 0
for _, g in df.groupby(period_obj._data):
start = period_obj_reduced[count].start_time.to_pydatetime(warn=False)
end = period_obj_reduced[count].end_time.to_pydatetime(warn=False)
count += 1
if func:
yield start, end, chunk_size, func(g)
else:
yield start, end, chunk_size, g
def to_range(self, start, end):
"""
takes start, end from to_chunks and returns a "range" that can be used
as the argument to methods require a chunk_range
returns
-------
A range object (dependent on type of chunker)
"""
return DateRange(start, end)
def chunk_to_str(self, chunk_id):
"""
Converts parts of a chunk range (start or end) to a string. These
chunk ids/indexes/markers are produced by to_chunks.
(See to_chunks)
returns
-------
string
"""
return str(chunk_id).encode('ascii')
def to_mongo(self, range_obj):
"""
takes the range object used for this chunker type
and converts it into a string that can be use for a
mongo query that filters by the range
returns
-------
dict
"""
if isinstance(range_obj, (pd.DatetimeIndex, tuple)):
range_obj = DateRange(range_obj[0], range_obj[-1])
if range_obj.start and range_obj.end:
return {'$and': [{START: {'$lte': range_obj.end}}, {END: {'$gte': range_obj.start}}]}
elif range_obj.start:
return {END: {'$gte': range_obj.start}}
elif range_obj.end:
return {START: {'$lte': range_obj.end}}
else:
return {}
def filter(self, data, range_obj):
"""
ensures data is properly subset to the range in range_obj.
(Depending on how the chunking is implemented, it might be possible
to specify a chunk range that reads out more than the actual range
eg: date range, chunked monthly. read out 2016-01-01 to 2016-01-02.
This will read ALL of January 2016 but it should be subset to just
the first two days)
returns
-------
data, filtered by range_obj
"""
if isinstance(range_obj, (pd.DatetimeIndex, tuple)):
range_obj = DateRange(range_obj[0], range_obj[-1])
range_obj = to_pandas_closed_closed(range_obj, add_tz=False)
start = range_obj.start
end = range_obj.end
if 'date' in data.index.names:
return data[start:end]
elif 'date' in data.columns:
if start and end:
return data[(data.date >= start) & (data.date <= end)]
elif start:
return data[(data.date >= start)]
elif end:
return data[(data.date <= end)]
else:
return data
else:
return data
def exclude(self, data, range_obj):
"""
Removes data within the bounds of the range object (inclusive)
returns
-------
data, filtered by range_obj
"""
if isinstance(range_obj, (pd.DatetimeIndex, tuple)):
range_obj = DateRange(range_obj[0], range_obj[-1])
if 'date' in data.index.names:
return data[(data.index.get_level_values('date') < range_obj.start) | (data.index.get_level_values('date') > range_obj.end)]
elif 'date' in data.columns:
return data[(data.date < range_obj.start) | (data.date > range_obj.end)]
else:
return data
|
from unittest.mock import Mock
import pandas as pd
import pytest
import pytz
from qstrader.portcon.order_sizer.dollar_weighted import (
DollarWeightedCashBufferedOrderSizer
)
@pytest.mark.parametrize(
"cash_buffer_perc,expected",
[
(-1.0, None),
(0.0, 0.0),
(0.5, 0.5),
(0.99, 0.99),
(1.0, 1.0),
(1.5, None)
]
)
def test_check_set_cash_buffer(cash_buffer_perc, expected):
"""
Checks that the cash buffer falls into the appropriate
range and raises otherwise.
"""
broker = Mock()
broker_portfolio_id = "1234"
data_handler = Mock()
if expected is None:
with pytest.raises(ValueError):
order_sizer = DollarWeightedCashBufferedOrderSizer(
broker, broker_portfolio_id, data_handler, cash_buffer_perc
)
else:
order_sizer = DollarWeightedCashBufferedOrderSizer(
broker, broker_portfolio_id, data_handler, cash_buffer_perc
)
assert order_sizer.cash_buffer_percentage == cash_buffer_perc
@pytest.mark.parametrize(
"weights,expected",
[
(
{'EQ:ABC': 0.2, 'EQ:DEF': 0.6},
{'EQ:ABC': 0.25, 'EQ:DEF': 0.75}
),
(
{'EQ:ABC': 0.5, 'EQ:DEF': 0.5},
{'EQ:ABC': 0.5, 'EQ:DEF': 0.5}
),
(
{'EQ:ABC': 0.01, 'EQ:DEF': 0.01},
{'EQ:ABC': 0.5, 'EQ:DEF': 0.5}
),
(
{'EQ:ABC': 0.1, 'EQ:DEF': 0.3, 'EQ:GHI': 0.02, 'EQ:JKL': 0.8},
{'EQ:ABC': 0.1 / 1.22, 'EQ:DEF': 0.3 / 1.22, 'EQ:GHI': 0.02 / 1.22, 'EQ:JKL': 0.8 / 1.22},
),
(
{'EQ:ABC': 0.0, 'EQ:DEF': 0.0},
{'EQ:ABC': 0.0, 'EQ:DEF': 0.0}
),
(
{'EQ:ABC': -0.2, 'EQ:DEF': 0.6},
None
),
]
)
def test_normalise_weights(weights, expected):
"""
Checks that the _normalise_weights method rescales the weights
to ensure that they sum to unity.
"""
broker = Mock()
broker_portfolio_id = "1234"
data_handler = Mock()
cash_buffer_perc = 0.05
order_sizer = DollarWeightedCashBufferedOrderSizer(
broker, broker_portfolio_id, data_handler, cash_buffer_perc
)
if expected is None:
with pytest.raises(ValueError):
result = order_sizer._normalise_weights(weights)
else:
result = order_sizer._normalise_weights(weights)
assert result == pytest.approx(expected)
@pytest.mark.parametrize(
"total_equity,cash_buffer_perc,weights,asset_prices,expected",
[
(
1e6,
0.05,
{'EQ:SPY': 0.5, 'EQ:AGG': 0.5},
{'EQ:SPY': 250.0, 'EQ:AGG': 150.0},
{'EQ:SPY': {'quantity': 1900}, 'EQ:AGG': {'quantity': 3166}}
),
(
325000.0,
0.15,
{'EQ:SPY': 0.6, 'EQ:AGG': 0.4},
{'EQ:SPY': 352.0, 'EQ:AGG': 178.0},
{'EQ:SPY': {'quantity': 470}, 'EQ:AGG': {'quantity': 620}}
),
(
687523.0,
0.025,
{'EQ:SPY': 0.05, 'EQ:AGG': 0.328, 'EQ:TLT': 0.842, 'EQ:GLD': 0.9113},
{'EQ:SPY': 1036.23, 'EQ:AGG': 456.55, 'EQ:TLT': 987.63, 'EQ:GLD': 14.76},
{
'EQ:SPY': {'quantity': 15},
'EQ:AGG': {'quantity': 225},
'EQ:TLT': {'quantity': 268},
'EQ:GLD': {'quantity': 19418},
}
)
]
)
def test_call(total_equity, cash_buffer_perc, weights, asset_prices, expected):
"""
Checks that the __call__ method correctly outputs the target
portfolio from a given set of weights and a timestamp.
"""
dt = pd.Timestamp('2019-01-01 15:00:00', tz=pytz.utc)
broker_portfolio_id = "1234"
broker = Mock()
broker.get_portfolio_total_equity.return_value = total_equity
broker.fee_model.calc_total_cost.return_value = 0.0
data_handler = Mock()
data_handler.get_asset_latest_ask_price.side_effect = lambda self, x: asset_prices[x]
order_sizer = DollarWeightedCashBufferedOrderSizer(
broker, broker_portfolio_id, data_handler, cash_buffer_perc
)
result = order_sizer(dt, weights)
assert result == expected
|
import requests
from django.core.cache import cache
from weblate import USER_AGENT
from weblate.logger import LOGGER
from weblate.utils.errors import report_error
def request(method, url, headers=None, **kwargs):
agent = {"User-Agent": USER_AGENT}
if headers:
headers.update(agent)
else:
headers = agent
response = requests.request(method, url, headers=headers, **kwargs)
response.raise_for_status()
return response
def get_uri_error(uri):
"""Return error for fetching the URL or None if it works."""
if uri.startswith("https://nonexisting.weblate.org/"):
return "Non existing test URL"
cache_key = f"uri-check-{uri}"
cached = cache.get(cache_key)
if cached:
LOGGER.debug("URL check for %s, cached success", uri)
return None
try:
with request("get", uri, stream=True):
cache.set(cache_key, True, 3600)
LOGGER.debug("URL check for %s, tested success", uri)
return None
except (
requests.exceptions.HTTPError,
requests.exceptions.ConnectionError,
) as error:
report_error(cause="URL check failed")
return str(error)
|
import re
import sys
import glob
import os.path
import platform
import subprocess
import importlib
import collections
import enum
import datetime
import getpass
import functools
from typing import Mapping, Optional, Sequence, Tuple, cast
import attr
import pkg_resources
from PyQt5.QtCore import PYQT_VERSION_STR, QLibraryInfo
from PyQt5.QtNetwork import QSslSocket
from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile,
QOffscreenSurface)
from PyQt5.QtWidgets import QApplication
try:
from PyQt5.QtWebKit import qWebKitVersion
except ImportError: # pragma: no cover
qWebKitVersion = None # type: ignore[assignment] # noqa: N816
import qutebrowser
from qutebrowser.utils import log, utils, standarddir, usertypes, message
from qutebrowser.misc import objects, earlyinit, sql, httpclient, pastebin
from qutebrowser.browser import pdfjs
from qutebrowser.config import config
try:
from qutebrowser.browser.webengine import webenginesettings
except ImportError: # pragma: no cover
webenginesettings = None # type: ignore[assignment]
_LOGO = r'''
______ ,,
,.-"` | ,-` |
.^ || |
/ ,-*^| || |
; / | || ;-*```^*.
; ; | |;,-*` \
| | | ,-*` ,-"""\ \
| \ ,-"` ,-^`| \ |
\ `^^ ,-;| | ; |
*; ,-*` || | / ;;
`^^`` | || | ,^ /
| || `^^` ,^
| _,"| _,-"
-*` ****"""``
'''
@attr.s
class DistributionInfo:
"""Information about the running distribution."""
id: Optional[str] = attr.ib()
parsed: 'Distribution' = attr.ib()
version: Optional[Tuple[str, ...]] = attr.ib()
pretty: str = attr.ib()
pastebin_url = None
class Distribution(enum.Enum):
"""A known Linux distribution.
Usually lines up with ID=... in /etc/os-release.
"""
unknown = enum.auto()
ubuntu = enum.auto()
debian = enum.auto()
void = enum.auto()
arch = enum.auto()
gentoo = enum.auto() # includes funtoo
fedora = enum.auto()
opensuse = enum.auto()
linuxmint = enum.auto()
manjaro = enum.auto()
kde_flatpak = enum.auto() # org.kde.Platform
def distribution() -> Optional[DistributionInfo]:
"""Get some information about the running Linux distribution.
Returns:
A DistributionInfo object, or None if no info could be determined.
parsed: A Distribution enum member
version: A Version object, or None
pretty: Always a string (might be "Unknown")
"""
filename = os.environ.get('QUTE_FAKE_OS_RELEASE', '/etc/os-release')
info = {}
try:
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip()
if (not line) or line.startswith('#') or '=' not in line:
continue
k, v = line.split("=", maxsplit=1)
info[k] = v.strip('"')
except (OSError, UnicodeDecodeError):
return None
pretty = info.get('PRETTY_NAME', None)
if pretty in ['Linux', None]: # Funtoo has PRETTY_NAME=Linux
pretty = info.get('NAME', 'Unknown')
assert pretty is not None
if 'VERSION_ID' in info:
dist_version: Optional[Tuple[str, ...]] = pkg_resources.parse_version(
info['VERSION_ID'])
else:
dist_version = None
dist_id = info.get('ID', None)
id_mappings = {
'funtoo': 'gentoo', # does not have ID_LIKE=gentoo
'org.kde.Platform': 'kde_flatpak',
}
parsed = Distribution.unknown
if dist_id is not None:
try:
parsed = Distribution[id_mappings.get(dist_id, dist_id)]
except KeyError:
pass
return DistributionInfo(parsed=parsed, version=dist_version, pretty=pretty,
id=dist_id)
def is_sandboxed() -> bool:
"""Whether the environment has restricted access to the host system."""
current_distro = distribution()
if current_distro is None:
return False
return current_distro.parsed == Distribution.kde_flatpak
def _git_str() -> Optional[str]:
"""Try to find out git version.
Return:
string containing the git commit ID.
None if there was an error or we're not in a git repo.
"""
# First try via subprocess if possible
commit = None
if not hasattr(sys, "frozen"):
try:
gitpath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
os.path.pardir, os.path.pardir)
except (NameError, OSError):
log.misc.exception("Error while getting git path")
else:
commit = _git_str_subprocess(gitpath)
if commit is not None:
return commit
# If that fails, check the git-commit-id file.
try:
return utils.read_file('git-commit-id')
except (OSError, ImportError):
return None
def _call_git(gitpath: str, *args: str) -> str:
"""Call a git subprocess."""
return subprocess.run(
['git'] + list(args),
cwd=gitpath, check=True,
stdout=subprocess.PIPE).stdout.decode('UTF-8').strip()
def _git_str_subprocess(gitpath: str) -> Optional[str]:
"""Try to get the git commit ID and timestamp by calling git.
Args:
gitpath: The path where the .git folder is.
Return:
The ID/timestamp on success, None on failure.
"""
if not os.path.isdir(os.path.join(gitpath, ".git")):
return None
try:
# https://stackoverflow.com/questions/21017300/21017394#21017394
commit_hash = _call_git(gitpath, 'describe', '--match=NeVeRmAtCh',
'--always', '--dirty')
date = _call_git(gitpath, 'show', '-s', '--format=%ci', 'HEAD')
branch = _call_git(gitpath, 'rev-parse', '--abbrev-ref', 'HEAD')
return '{} on {} ({})'.format(commit_hash, branch, date)
except (subprocess.CalledProcessError, OSError):
return None
def _release_info() -> Sequence[Tuple[str, str]]:
"""Try to gather distribution release information.
Return:
list of (filename, content) tuples.
"""
blacklisted = ['ANSI_COLOR=', 'HOME_URL=', 'SUPPORT_URL=',
'BUG_REPORT_URL=']
data = []
for fn in glob.glob("/etc/*-release"):
lines = []
try:
with open(fn, 'r', encoding='utf-8') as f:
for line in f.read().strip().splitlines():
if not any(line.startswith(bl) for bl in blacklisted):
lines.append(line)
if lines:
data.append((fn, '\n'.join(lines)))
except OSError:
log.misc.exception("Error while reading {}.".format(fn))
return data
def _module_versions() -> Sequence[str]:
"""Get versions of optional modules.
Return:
A list of lines with version info.
"""
lines = []
modules: Mapping[str, Sequence[str]] = collections.OrderedDict([
('sip', ['SIP_VERSION_STR']),
('colorama', ['VERSION', '__version__']),
('pypeg2', ['__version__']),
('jinja2', ['__version__']),
('pygments', ['__version__']),
('yaml', ['__version__']),
('cssutils', ['__version__']),
('attr', ['__version__']),
('PyQt5.QtWebEngineWidgets', []),
('PyQt5.QtWebEngine', ['PYQT_WEBENGINE_VERSION_STR']),
('PyQt5.QtWebKitWidgets', []),
])
for modname, attributes in modules.items():
try:
module = importlib.import_module(modname)
except (ImportError, ValueError):
text = '{}: no'.format(modname)
else:
for name in attributes:
try:
text = '{}: {}'.format(modname, getattr(module, name))
except AttributeError:
pass
else:
break
else:
text = '{}: yes'.format(modname)
lines.append(text)
return lines
def _path_info() -> Mapping[str, str]:
"""Get info about important path names.
Return:
A dictionary of descriptive to actual path names.
"""
info = {
'config': standarddir.config(),
'data': standarddir.data(),
'cache': standarddir.cache(),
'runtime': standarddir.runtime(),
}
if standarddir.config() != standarddir.config(auto=True):
info['auto config'] = standarddir.config(auto=True)
if standarddir.data() != standarddir.data(system=True):
info['system data'] = standarddir.data(system=True)
return info
def _os_info() -> Sequence[str]:
"""Get operating system info.
Return:
A list of lines with version info.
"""
lines = []
releaseinfo = None
if utils.is_linux:
osver = ''
releaseinfo = _release_info()
elif utils.is_windows:
osver = ', '.join(platform.win32_ver())
elif utils.is_mac:
release, info_tpl, machine = platform.mac_ver()
if all(not e for e in info_tpl):
versioninfo = ''
else:
versioninfo = '.'.join(info_tpl)
osver = ', '.join(e for e in [release, versioninfo, machine] if e)
elif utils.is_posix:
osver = ' '.join(platform.uname())
else:
osver = '?'
lines.append('OS Version: {}'.format(osver))
if releaseinfo is not None:
for (fn, data) in releaseinfo:
lines += ['', '--- {} ---'.format(fn), data]
return lines
def _pdfjs_version() -> str:
"""Get the pdf.js version.
Return:
A string with the version number.
"""
try:
pdfjs_file, file_path = pdfjs.get_pdfjs_res_and_path('build/pdf.js')
except pdfjs.PDFJSNotFound:
return 'no'
else:
pdfjs_file = pdfjs_file.decode('utf-8')
version_re = re.compile(
r"^ *(PDFJS\.version|(var|const) pdfjsVersion) = '(?P<version>[^']+)';$",
re.MULTILINE)
match = version_re.search(pdfjs_file)
pdfjs_version = 'unknown' if not match else match.group('version')
if file_path is None:
file_path = 'bundled'
return '{} ({})'.format(pdfjs_version, file_path)
def _chromium_version() -> str:
"""Get the Chromium version for QtWebEngine.
This can also be checked by looking at this file with the right Qt tag:
http://code.qt.io/cgit/qt/qtwebengine.git/tree/tools/scripts/version_resolver.py#n41
Quick reference:
Qt 5.12: Chromium 69
(LTS) 69.0.3497.128 (~2018-09-11)
5.12.0: Security fixes up to 70.0.3538.102 (~2018-10-24)
5.12.1: Security fixes up to 71.0.3578.94 (2018-12-12)
5.12.2: Security fixes up to 72.0.3626.121 (2019-03-01)
5.12.3: Security fixes up to 73.0.3683.75 (2019-03-12)
5.12.4: Security fixes up to 74.0.3729.157 (2019-05-14)
5.12.5: Security fixes up to 76.0.3809.87 (2019-07-30)
5.12.6: Security fixes up to 77.0.3865.120 (~2019-09-10)
5.12.7: Security fixes up to 79.0.3945.130 (2020-01-16)
5.12.8: Security fixes up to 80.0.3987.149 (2020-03-18)
5.12.9: Security fixes up to 83.0.4103.97 (2020-06-03)
5.12.10: Security fixes up to 86.0.4240.75 (2020-10-06)
Qt 5.13: Chromium 73
73.0.3683.105 (~2019-02-28)
5.13.0: Security fixes up to 74.0.3729.157 (2019-05-14)
5.13.1: Security fixes up to 76.0.3809.87 (2019-07-30)
5.13.2: Security fixes up to 77.0.3865.120 (2019-10-10)
Qt 5.14: Chromium 77
77.0.3865.129 (~2019-10-10)
5.14.0: Security fixes up to 77.0.3865.129 (~2019-09-10)
5.14.1: Security fixes up to 79.0.3945.117 (2020-01-07)
5.14.2: Security fixes up to 80.0.3987.132 (2020-03-03)
Qt 5.15: Chromium 80
80.0.3987.163 (2020-04-02)
5.15.0: Security fixes up to 81.0.4044.138 (2020-05-05)
5.15.1: Security fixes up to 85.0.4183.83 (2020-08-25)
5.15.2: Updated to 83.0.4103.122 (~2020-06-24)
Security fixes up to 86.0.4240.111 (2020-10-20)
Also see:
- https://chromiumdash.appspot.com/schedule
- https://www.chromium.org/developers/calendar
- https://chromereleases.googleblog.com/
"""
if webenginesettings is None:
return 'unavailable' # type: ignore[unreachable]
if webenginesettings.parsed_user_agent is None:
if 'avoid-chromium-init' in objects.debug_flags:
return 'avoided'
webenginesettings.init_user_agent()
assert webenginesettings.parsed_user_agent is not None
return webenginesettings.parsed_user_agent.upstream_browser_version
def _backend() -> str:
"""Get the backend line with relevant information."""
if objects.backend == usertypes.Backend.QtWebKit:
return 'new QtWebKit (WebKit {})'.format(qWebKitVersion())
elif objects.backend == usertypes.Backend.QtWebEngine:
webengine = usertypes.Backend.QtWebEngine
assert objects.backend == webengine, objects.backend
return 'QtWebEngine (Chromium {})'.format(_chromium_version())
raise utils.Unreachable(objects.backend)
def _uptime() -> datetime.timedelta:
launch_time = QApplication.instance().launch_time
time_delta = datetime.datetime.now() - launch_time
# Round off microseconds
time_delta -= datetime.timedelta(microseconds=time_delta.microseconds)
return time_delta
def _autoconfig_loaded() -> str:
return "yes" if config.instance.yaml_loaded else "no"
def _config_py_loaded() -> str:
if config.instance.config_py_loaded:
return "{} has been loaded".format(standarddir.config_py())
else:
return "no config.py was loaded"
def version_info() -> str:
"""Return a string with various version information."""
lines = _LOGO.lstrip('\n').splitlines()
lines.append("qutebrowser v{}".format(qutebrowser.__version__))
gitver = _git_str()
if gitver is not None:
lines.append("Git commit: {}".format(gitver))
lines.append('Backend: {}'.format(_backend()))
lines.append('Qt: {}'.format(earlyinit.qt_version()))
lines += [
'',
'{}: {}'.format(platform.python_implementation(),
platform.python_version()),
'PyQt: {}'.format(PYQT_VERSION_STR),
'',
]
lines += _module_versions()
lines += [
'pdf.js: {}'.format(_pdfjs_version()),
'sqlite: {}'.format(sql.version()),
'QtNetwork SSL: {}\n'.format(QSslSocket.sslLibraryVersionString()
if QSslSocket.supportsSsl() else 'no'),
]
qapp = QApplication.instance()
if qapp:
style = qapp.style()
lines.append('Style: {}'.format(style.metaObject().className()))
lines.append('Platform plugin: {}'.format(qapp.platformName()))
lines.append('OpenGL: {}'.format(opengl_info()))
importpath = os.path.dirname(os.path.abspath(qutebrowser.__file__))
lines += [
'Platform: {}, {}'.format(platform.platform(),
platform.architecture()[0]),
]
dist = distribution()
if dist is not None:
lines += [
'Linux distribution: {} ({})'.format(dist.pretty, dist.parsed.name)
]
lines += [
'Frozen: {}'.format(hasattr(sys, 'frozen')),
"Imported from {}".format(importpath),
"Using Python from {}".format(sys.executable),
"Qt library executable path: {}, data path: {}".format(
QLibraryInfo.location(QLibraryInfo.LibraryExecutablesPath),
QLibraryInfo.location(QLibraryInfo.DataPath)
)
]
if not dist or dist.parsed == Distribution.unknown:
lines += _os_info()
lines += [
'',
'Paths:',
]
for name, path in sorted(_path_info().items()):
lines += ['{}: {}'.format(name, path)]
lines += [
'',
'Autoconfig loaded: {}'.format(_autoconfig_loaded()),
'Config.py: {}'.format(_config_py_loaded()),
'Uptime: {}'.format(_uptime())
]
return '\n'.join(lines)
@attr.s
class OpenGLInfo:
"""Information about the OpenGL setup in use."""
# If we're using OpenGL ES. If so, no further information is available.
gles: bool = attr.ib(False)
# The name of the vendor. Examples:
# - nouveau
# - "Intel Open Source Technology Center", "Intel", "Intel Inc."
vendor: Optional[str] = attr.ib(None)
# The OpenGL version as a string. See tests for examples.
version_str: Optional[str] = attr.ib(None)
# The parsed version as a (major, minor) tuple of ints
version: Optional[Tuple[int, ...]] = attr.ib(None)
# The vendor specific information following the version number
vendor_specific: Optional[str] = attr.ib(None)
def __str__(self) -> str:
if self.gles:
return 'OpenGL ES'
return '{}, {}'.format(self.vendor, self.version_str)
@classmethod
def parse(cls, *, vendor: str, version: str) -> 'OpenGLInfo':
"""Parse OpenGL version info from a string.
The arguments should be the strings returned by OpenGL for GL_VENDOR
and GL_VERSION, respectively.
According to the OpenGL reference, the version string should have the
following format:
<major>.<minor>[.<release>] <vendor-specific info>
"""
if ' ' not in version:
log.misc.warning("Failed to parse OpenGL version (missing space): "
"{}".format(version))
return cls(vendor=vendor, version_str=version)
num_str, vendor_specific = version.split(' ', maxsplit=1)
try:
parsed_version = tuple(int(i) for i in num_str.split('.'))
except ValueError:
log.misc.warning("Failed to parse OpenGL version (parsing int): "
"{}".format(version))
return cls(vendor=vendor, version_str=version)
return cls(vendor=vendor, version_str=version,
version=parsed_version, vendor_specific=vendor_specific)
@functools.lru_cache(maxsize=1)
def opengl_info() -> Optional[OpenGLInfo]: # pragma: no cover
"""Get the OpenGL vendor used.
This returns a string such as 'nouveau' or
'Intel Open Source Technology Center'; or None if the vendor can't be
determined.
"""
assert QApplication.instance()
# Some setups can segfault in here if we don't do this.
utils.libgl_workaround()
override = os.environ.get('QUTE_FAKE_OPENGL')
if override is not None:
log.init.debug("Using override {}".format(override))
vendor, version = override.split(', ', maxsplit=1)
return OpenGLInfo.parse(vendor=vendor, version=version)
old_context = cast(Optional[QOpenGLContext], QOpenGLContext.currentContext())
old_surface = None if old_context is None else old_context.surface()
surface = QOffscreenSurface()
surface.create()
ctx = QOpenGLContext()
ok = ctx.create()
if not ok:
log.init.debug("Creating context failed!")
return None
ok = ctx.makeCurrent(surface)
if not ok:
log.init.debug("Making context current failed!")
return None
try:
if ctx.isOpenGLES():
# Can't use versionFunctions there
return OpenGLInfo(gles=True)
vp = QOpenGLVersionProfile()
vp.setVersion(2, 0)
try:
vf = ctx.versionFunctions(vp)
except ImportError as e:
log.init.debug("Importing version functions failed: {}".format(e))
return None
if vf is None:
log.init.debug("Getting version functions failed!")
return None
vendor = vf.glGetString(vf.GL_VENDOR)
version = vf.glGetString(vf.GL_VERSION)
return OpenGLInfo.parse(vendor=vendor, version=version)
finally:
ctx.doneCurrent()
if old_context and old_surface:
old_context.makeCurrent(old_surface)
def pastebin_version(pbclient: pastebin.PastebinClient = None) -> None:
"""Pastebin the version and log the url to messages."""
def _yank_url(url: str) -> None:
utils.set_clipboard(url)
message.info("Version url {} yanked to clipboard.".format(url))
def _on_paste_version_success(url: str) -> None:
assert pbclient is not None
global pastebin_url
url = url.strip()
_yank_url(url)
pbclient.deleteLater()
pastebin_url = url
def _on_paste_version_err(text: str) -> None:
assert pbclient is not None
message.error("Failed to pastebin version"
" info: {}".format(text))
pbclient.deleteLater()
if pastebin_url:
_yank_url(pastebin_url)
return
app = QApplication.instance()
http_client = httpclient.HTTPClient()
misc_api = pastebin.PastebinClient.MISC_API_URL
pbclient = pbclient or pastebin.PastebinClient(http_client, parent=app,
api_url=misc_api)
pbclient.success.connect(_on_paste_version_success)
pbclient.error.connect(_on_paste_version_err)
pbclient.paste(getpass.getuser(),
"qute version info {}".format(qutebrowser.__version__),
version_info(),
private=True)
|
from collections import namedtuple
import logging
import requests
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_TYPE,
CONF_USERNAME,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_TYPE = "rogers"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE): cv.string,
}
)
def get_scanner(_hass, config):
"""Validate the configuration and return a Hitron CODA-4582U scanner."""
scanner = HitronCODADeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
Device = namedtuple("Device", ["mac", "name"])
class HitronCODADeviceScanner(DeviceScanner):
"""This class scans for devices using the CODA's web interface."""
def __init__(self, config):
"""Initialize the scanner."""
self.last_results = []
host = config[CONF_HOST]
self._url = f"http://{host}/data/getConnectInfo.asp"
self._loginurl = f"http://{host}/goform/login"
self._username = config.get(CONF_USERNAME)
self._password = config.get(CONF_PASSWORD)
if config.get(CONF_TYPE) == "shaw":
self._type = "pwd"
else:
self._type = "pws"
self._userid = None
self.success_init = self._update_info()
_LOGGER.info("Scanner initialized")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [device.mac for device in self.last_results]
def get_device_name(self, device):
"""Return the name of the device with the given MAC address."""
name = next(
(result.name for result in self.last_results if result.mac == device), None
)
return name
def _login(self):
"""Log in to the router. This is required for subsequent api calls."""
_LOGGER.info("Logging in to CODA...")
try:
data = [("user", self._username), (self._type, self._password)]
res = requests.post(self._loginurl, data=data, timeout=10)
except requests.exceptions.Timeout:
_LOGGER.error("Connection to the router timed out at URL %s", self._url)
return False
if res.status_code != HTTP_OK:
_LOGGER.error("Connection failed with http code %s", res.status_code)
return False
try:
self._userid = res.cookies["userid"]
return True
except KeyError:
_LOGGER.error("Failed to log in to router")
return False
def _update_info(self):
"""Get ARP from router."""
_LOGGER.info("Fetching...")
if self._userid is None:
if not self._login():
_LOGGER.error("Could not obtain a user ID from the router")
return False
last_results = []
# doing a request
try:
res = requests.get(self._url, timeout=10, cookies={"userid": self._userid})
except requests.exceptions.Timeout:
_LOGGER.error("Connection to the router timed out at URL %s", self._url)
return False
if res.status_code != HTTP_OK:
_LOGGER.error("Connection failed with http code %s", res.status_code)
return False
try:
result = res.json()
except ValueError:
# If json decoder could not parse the response
_LOGGER.error("Failed to parse response from router")
return False
# parsing response
for info in result:
mac = info["macAddr"]
name = info["hostName"]
# No address = no item :)
if mac is None:
continue
last_results.append(Device(mac.upper(), name))
self.last_results = last_results
_LOGGER.info("Request successful")
return True
|
import numpy as np
from numpy.polynomial.legendre import legval
from scipy import linalg
from ..utils import logger, warn, verbose
from ..io.meas_info import _simplify_info
from ..io.pick import pick_types, pick_channels, pick_info
from ..surface import _normalize_vectors
from ..forward import _map_meg_or_eeg_channels
from ..utils import _check_option, _validate_type
def _calc_h(cosang, stiffness=4, n_legendre_terms=50):
"""Calculate spherical spline h function between points on a sphere.
Parameters
----------
cosang : array-like | float
cosine of angles between pairs of points on a spherical surface. This
is equivalent to the dot product of unit vectors.
stiffness : float
stiffnes of the spline. Also referred to as ``m``.
n_legendre_terms : int
number of Legendre terms to evaluate.
"""
factors = [(2 * n + 1) /
(n ** (stiffness - 1) * (n + 1) ** (stiffness - 1) * 4 * np.pi)
for n in range(1, n_legendre_terms + 1)]
return legval(cosang, [0] + factors)
def _calc_g(cosang, stiffness=4, n_legendre_terms=50):
"""Calculate spherical spline g function between points on a sphere.
Parameters
----------
cosang : array-like of float, shape(n_channels, n_channels)
cosine of angles between pairs of points on a spherical surface. This
is equivalent to the dot product of unit vectors.
stiffness : float
stiffness of the spline.
n_legendre_terms : int
number of Legendre terms to evaluate.
Returns
-------
G : np.ndrarray of float, shape(n_channels, n_channels)
The G matrix.
"""
factors = [(2 * n + 1) / (n ** stiffness * (n + 1) ** stiffness *
4 * np.pi)
for n in range(1, n_legendre_terms + 1)]
return legval(cosang, [0] + factors)
def _make_interpolation_matrix(pos_from, pos_to, alpha=1e-5):
"""Compute interpolation matrix based on spherical splines.
Implementation based on [1]
Parameters
----------
pos_from : np.ndarray of float, shape(n_good_sensors, 3)
The positions to interpoloate from.
pos_to : np.ndarray of float, shape(n_bad_sensors, 3)
The positions to interpoloate.
alpha : float
Regularization parameter. Defaults to 1e-5.
Returns
-------
interpolation : np.ndarray of float, shape(len(pos_from), len(pos_to))
The interpolation matrix that maps good signals to the location
of bad signals.
References
----------
[1] Perrin, F., Pernier, J., Bertrand, O. and Echallier, JF. (1989).
Spherical splines for scalp potential and current density mapping.
Electroencephalography Clinical Neurophysiology, Feb; 72(2):184-7.
"""
pos_from = pos_from.copy()
pos_to = pos_to.copy()
n_from = pos_from.shape[0]
n_to = pos_to.shape[0]
# normalize sensor positions to sphere
_normalize_vectors(pos_from)
_normalize_vectors(pos_to)
# cosine angles between source positions
cosang_from = pos_from.dot(pos_from.T)
cosang_to_from = pos_to.dot(pos_from.T)
G_from = _calc_g(cosang_from)
G_to_from = _calc_g(cosang_to_from)
assert G_from.shape == (n_from, n_from)
assert G_to_from.shape == (n_to, n_from)
if alpha is not None:
G_from.flat[::len(G_from) + 1] += alpha
C = np.vstack([np.hstack([G_from, np.ones((n_from, 1))]),
np.hstack([np.ones((1, n_from)), [[0]]])])
C_inv = linalg.pinv(C)
interpolation = np.hstack([G_to_from, np.ones((n_to, 1))]) @ C_inv[:, :-1]
assert interpolation.shape == (n_to, n_from)
return interpolation
def _do_interp_dots(inst, interpolation, goods_idx, bads_idx):
"""Dot product of channel mapping matrix to channel data."""
from ..io.base import BaseRaw
from ..epochs import BaseEpochs
from ..evoked import Evoked
_validate_type(inst, (BaseRaw, BaseEpochs, Evoked), 'inst')
inst._data[..., bads_idx, :] = np.matmul(
interpolation, inst._data[..., goods_idx, :])
@verbose
def _interpolate_bads_eeg(inst, origin, verbose=None):
"""Interpolate bad EEG channels.
Operates in place.
Parameters
----------
inst : mne.io.Raw, mne.Epochs or mne.Evoked
The data to interpolate. Must be preloaded.
"""
bads_idx = np.zeros(len(inst.ch_names), dtype=bool)
goods_idx = np.zeros(len(inst.ch_names), dtype=bool)
picks = pick_types(inst.info, meg=False, eeg=True, exclude=[])
inst.info._check_consistency()
bads_idx[picks] = [inst.ch_names[ch] in inst.info['bads'] for ch in picks]
if len(picks) == 0 or bads_idx.sum() == 0:
return
goods_idx[picks] = True
goods_idx[bads_idx] = False
pos = inst._get_channel_positions(picks)
# Make sure only EEG are used
bads_idx_pos = bads_idx[picks]
goods_idx_pos = goods_idx[picks]
# test spherical fit
distance = np.linalg.norm(pos - origin, axis=-1)
distance = np.mean(distance / np.mean(distance))
if np.abs(1. - distance) > 0.1:
warn('Your spherical fit is poor, interpolation results are '
'likely to be inaccurate.')
pos_good = pos[goods_idx_pos] - origin
pos_bad = pos[bads_idx_pos] - origin
logger.info('Computing interpolation matrix from {} sensor '
'positions'.format(len(pos_good)))
interpolation = _make_interpolation_matrix(pos_good, pos_bad)
logger.info('Interpolating {} sensors'.format(len(pos_bad)))
_do_interp_dots(inst, interpolation, goods_idx, bads_idx)
def _interpolate_bads_meg(inst, mode='accurate', origin=(0., 0., 0.04),
verbose=None, ref_meg=False):
return _interpolate_bads_meeg(
inst, mode, origin, ref_meg=ref_meg, eeg=False, verbose=verbose)
@verbose
def _interpolate_bads_meeg(inst, mode='accurate', origin=(0., 0., 0.04),
meg=True, eeg=True, ref_meg=False, verbose=None):
"""Interpolate bad channels from data in good channels.
Parameters
----------
inst : mne.io.Raw, mne.Epochs or mne.Evoked
The data to interpolate. Must be preloaded.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used for interpolation. `'fast'` should
be sufficient for most applications.
origin : array-like, shape (3,) | str
Origin of the sphere in the head coordinate frame and in meters.
Can be ``'auto'``, which means a head-digitization-based origin
fit. Default is ``(0., 0., 0.04)``.
%(verbose)s
ref_meg : bool
Should always be False; only exists for testing purpose.
meg : bool
If True, interpolate bad MEG channels.
eeg : bool
If True, interpolate bad EEG channels.
"""
bools = dict(meg=meg, eeg=eeg)
info = _simplify_info(inst.info)
for ch_type, do in bools.items():
if not do:
continue
kw = dict(meg=False, eeg=False)
kw[ch_type] = True
picks_type = pick_types(info, ref_meg=ref_meg, exclude=[], **kw)
picks_good = pick_types(info, ref_meg=ref_meg, exclude='bads', **kw)
use_ch_names = [inst.info['ch_names'][p] for p in picks_type]
bads_type = [ch for ch in inst.info['bads'] if ch in use_ch_names]
if len(bads_type) == 0 or len(picks_type) == 0:
continue
# select the bad channels to be interpolated
picks_bad = pick_channels(inst.info['ch_names'], bads_type,
exclude=[])
if ch_type == 'eeg':
picks_to = picks_type
bad_sel = np.in1d(picks_type, picks_bad)
else:
picks_to = picks_bad
bad_sel = slice(None)
info_from = pick_info(inst.info, picks_good)
info_to = pick_info(inst.info, picks_to)
mapping = _map_meg_or_eeg_channels(
info_from, info_to, mode=mode, origin=origin)
mapping = mapping[bad_sel]
_do_interp_dots(inst, mapping, picks_good, picks_bad)
@verbose
def _interpolate_bads_nirs(inst, method='nearest', verbose=None):
"""Interpolate bad nirs channels. Simply replaces by closest non bad.
Parameters
----------
inst : mne.io.Raw, mne.Epochs or mne.Evoked
The data to interpolate. Must be preloaded.
method : str
Only the method 'nearest' is currently available. This method replaces
each bad channel with the nearest non bad channel.
%(verbose)s
"""
from scipy.spatial.distance import pdist, squareform
from mne.preprocessing.nirs import _channel_frequencies,\
_check_channels_ordered
# Returns pick of all nirs and ensures channels are correctly ordered
freqs = np.unique(_channel_frequencies(inst))
picks_nirs = _check_channels_ordered(inst, freqs)
if len(picks_nirs) == 0:
return
nirs_ch_names = [inst.info['ch_names'][p] for p in picks_nirs]
bads_nirs = [ch for ch in inst.info['bads'] if ch in nirs_ch_names]
if len(bads_nirs) == 0:
return
picks_bad = pick_channels(inst.info['ch_names'], bads_nirs, exclude=[])
bads_mask = [p in picks_bad for p in picks_nirs]
chs = [inst.info['chs'][i] for i in picks_nirs]
locs3d = np.array([ch['loc'][:3] for ch in chs])
_check_option('fnirs_method', method, ['nearest'])
if method == 'nearest':
dist = pdist(locs3d)
dist = squareform(dist)
for bad in picks_bad:
dists_to_bad = dist[bad]
# Ignore distances to self
dists_to_bad[dists_to_bad == 0] = np.inf
# Ignore distances to other bad channels
dists_to_bad[bads_mask] = np.inf
# Find closest remaining channels for same frequency
closest_idx = np.argmin(dists_to_bad) + (bad % 2)
inst._data[bad] = inst._data[closest_idx]
inst.info['bads'] = []
return inst
|
import contextlib
import logging
import urllib.parse
import pytest
asyncio = pytest.importorskip("asyncio")
aiohttp = pytest.importorskip("aiohttp")
import vcr # noqa: E402
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
def run_in_loop(fn):
with contextlib.closing(asyncio.new_event_loop()) as loop:
asyncio.set_event_loop(loop)
task = loop.create_task(fn(loop))
return loop.run_until_complete(task)
def request(method, url, output="text", **kwargs):
def run(loop):
return aiohttp_request(loop, method, url, output=output, **kwargs)
return run_in_loop(run)
def get(url, output="text", **kwargs):
return request("GET", url, output=output, **kwargs)
def post(url, output="text", **kwargs):
return request("POST", url, output="text", **kwargs)
@pytest.fixture(params=["https", "http"])
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
def test_status(tmpdir, scheme):
url = scheme + "://httpbin.org"
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("status.yaml"))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == response.status
assert cassette.play_count == 1
@pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")])
def test_headers(tmpdir, scheme, auth):
url = scheme + "://httpbin.org"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
response, _ = get(url, auth=auth)
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cassette:
if auth is not None:
request = cassette.requests[0]
assert "AUTHORIZATION" in request.headers
cassette_response, _ = get(url, auth=auth)
assert dict(cassette_response.headers) == dict(response.headers)
assert cassette.play_count == 1
assert "istr" not in cassette.data[0]
assert "yarl.URL" not in cassette.data[0]
def test_case_insensitive_headers(tmpdir, scheme):
url = scheme + "://httpbin.org"
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
_, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))) as cassette:
cassette_response, _ = get(url)
assert "Content-Type" in cassette_response.headers
assert "content-type" in cassette_response.headers
assert cassette.play_count == 1
def test_text(tmpdir, scheme):
url = scheme + "://httpbin.org"
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
_, response_text = get(url)
with vcr.use_cassette(str(tmpdir.join("text.yaml"))) as cassette:
_, cassette_response_text = get(url)
assert cassette_response_text == response_text
assert cassette.play_count == 1
def test_json(tmpdir, scheme):
url = scheme + "://httpbin.org/get"
headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
_, response_json = get(url, output="json", headers=headers)
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", headers=headers)
assert cassette_response_json == response_json
assert cassette.play_count == 1
def test_binary(tmpdir, scheme):
url = scheme + "://httpbin.org/image/png"
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))):
_, response_binary = get(url, output="raw")
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))) as cassette:
_, cassette_response_binary = get(url, output="raw")
assert cassette_response_binary == response_binary
assert cassette.play_count == 1
def test_stream(tmpdir, scheme):
url = scheme + "://httpbin.org/get"
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))):
resp, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))) as cassette:
cassette_resp, cassette_body = get(url, output="stream")
assert cassette_body == body
assert cassette.play_count == 1
@pytest.mark.parametrize("body", ["data", "json"])
def test_post(tmpdir, scheme, body, caplog):
caplog.set_level(logging.INFO)
data = {"key1": "value1", "key2": "value2"}
url = scheme + "://httpbin.org/post"
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
_, response_json = post(url, **{body: data})
with vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette:
request = cassette.requests[0]
assert request.body == data
_, cassette_response_json = post(url, **{body: data})
assert cassette_response_json == response_json
assert cassette.play_count == 1
assert next(
(
log
for log in caplog.records
if log.getMessage() == "<Request (POST) {}> not in cassette, sending to real server".format(url)
),
None,
), "Log message not found."
def test_params(tmpdir, scheme):
url = scheme + "://httpbin.org/get"
headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": False, "c": "c"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", params=params, headers=headers)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
assert cassette_response_json == response_json
assert cassette.play_count == 1
def test_params_same_url_distinct_params(tmpdir, scheme):
url = scheme + "://httpbin.org/get"
headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": False, "c": "c"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", params=params, headers=headers)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
assert cassette_response_json == response_json
assert cassette.play_count == 1
other_params = {"other": "params"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
get(url, output="text", params=other_params)
def test_params_on_url(tmpdir, scheme):
url = scheme + "://httpbin.org/get?a=1&b=foo"
headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", headers=headers)
request = cassette.requests[0]
assert request.url == url
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", headers=headers)
request = cassette.requests[0]
assert request.url == url
assert cassette_response_json == response_json
assert cassette.play_count == 1
def test_aiohttp_test_client(aiohttp_client, tmpdir):
loop = asyncio.get_event_loop()
app = aiohttp_app()
url = "/"
client = loop.run_until_complete(aiohttp_client(app))
with vcr.use_cassette(str(tmpdir.join("get.yaml"))):
response = loop.run_until_complete(client.get(url))
assert response.status == 200
response_text = loop.run_until_complete(response.text())
assert response_text == "hello"
response_text = loop.run_until_complete(response.text(errors="replace"))
assert response_text == "hello"
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
response = loop.run_until_complete(client.get(url))
request = cassette.requests[0]
assert request.url == str(client.make_url(url))
response_text = loop.run_until_complete(response.text())
assert response_text == "hello"
assert cassette.play_count == 1
def test_aiohttp_test_client_json(aiohttp_client, tmpdir):
loop = asyncio.get_event_loop()
app = aiohttp_app()
url = "/json/empty"
client = loop.run_until_complete(aiohttp_client(app))
with vcr.use_cassette(str(tmpdir.join("get.yaml"))):
response = loop.run_until_complete(client.get(url))
assert response.status == 200
response_json = loop.run_until_complete(response.json())
assert response_json is None
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
response = loop.run_until_complete(client.get(url))
request = cassette.requests[0]
assert request.url == str(client.make_url(url))
response_json = loop.run_until_complete(response.json())
assert response_json is None
assert cassette.play_count == 1
def test_redirect(aiohttp_client, tmpdir):
url = "https://mockbin.org/redirect/302/2"
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == response.status
assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 3
assert cassette.play_count == 3
# Assert that the real response and the cassette response have a similar
# looking request_info.
assert cassette_response.request_info.url == response.request_info.url
assert cassette_response.request_info.method == response.request_info.method
assert {k: v for k, v in cassette_response.request_info.headers.items()} == {
k: v for k, v in response.request_info.headers.items()
}
assert cassette_response.request_info.real_url == response.request_info.real_url
def test_not_modified(aiohttp_client, tmpdir):
"""It doesn't try to redirect on 304"""
url = "https://httpbin.org/status/304"
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == 304
assert response.status == 304
assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 1
assert cassette.play_count == 1
def test_double_requests(tmpdir):
"""We should capture, record, and replay all requests and response chains,
even if there are duplicate ones.
We should replay in the order we saw them.
"""
url = "https://httpbin.org/get"
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
_, response_text1 = get(url, output="text")
_, response_text2 = get(url, output="text")
with vcr.use_cassette(str(tmpdir.join("text.yaml"))) as cassette:
resp, cassette_response_text = get(url, output="text")
assert resp.status == 200
assert cassette_response_text == response_text1
# We made only one request, so we should only play 1 recording.
assert cassette.play_count == 1
# Now make the second test to url
resp, cassette_response_text = get(url, output="text")
assert resp.status == 200
assert cassette_response_text == response_text2
# Now that we made both requests, we should have played both.
assert cassette.play_count == 2
def test_cookies(scheme, tmpdir):
async def run(loop):
cookies_url = scheme + (
"://httpbin.org/response-headers?"
"set-cookie=" + urllib.parse.quote("cookie_1=val_1; Path=/") + "&"
"Set-Cookie=" + urllib.parse.quote("Cookie_2=Val_2; Path=/")
)
home_url = scheme + "://httpbin.org/"
tmp = str(tmpdir.join("cookies.yaml"))
req_cookies = {"Cookie_3": "Val_3"}
req_headers = {"Cookie": "Cookie_4=Val_4"}
# ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url)
home_resp = await session.get(home_url, cookies=req_cookies, headers=req_headers)
assert cassette.play_count == 0
assert_responses(cookies_resp, home_resp)
# -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url)
home_resp = await session.get(home_url, cookies=req_cookies, headers=req_headers)
assert cassette.play_count == 2
assert_responses(cookies_resp, home_resp)
def assert_responses(cookies_resp, home_resp):
assert cookies_resp.cookies.get("cookie_1").value == "val_1"
assert cookies_resp.cookies.get("Cookie_2").value == "Val_2"
request_cookies = home_resp.request_info.headers["cookie"]
assert "cookie_1=val_1" in request_cookies
assert "Cookie_2=Val_2" in request_cookies
assert "Cookie_3=Val_3" in request_cookies
assert "Cookie_4=Val_4" in request_cookies
run_in_loop(run)
def test_cookies_redirect(scheme, tmpdir):
async def run(loop):
# Sets cookie as provided by the query string and redirects
cookies_url = scheme + "://httpbin.org/cookies/set?Cookie_1=Val_1"
tmp = str(tmpdir.join("cookies.yaml"))
# ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 0
cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 2
cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# Assert that it's ignoring expiration date
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
cassette.responses[0]["headers"]["set-cookie"] = [
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT"
]
async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1"
run_in_loop(run)
|
from PyQt5.QtWebEngineWidgets import QWebEngineCertificateError
from qutebrowser.utils import usertypes, utils, debug
class CertificateErrorWrapper(usertypes.AbstractCertificateErrorWrapper):
"""A wrapper over a QWebEngineCertificateError."""
def __init__(self, error):
super().__init__(error)
self.ignore = False
def __str__(self):
return self._error.errorDescription()
def __repr__(self):
return utils.get_repr(
self, error=debug.qenum_key(QWebEngineCertificateError,
self._error.error()),
string=str(self))
def url(self):
return self._error.url()
def is_overridable(self):
return self._error.isOverridable()
|
import argparse
import chainer
from chainer import iterators
from chainercv.datasets import ade20k_semantic_segmentation_label_names
from chainercv.datasets import ADE20KSemanticSegmentationDataset
from chainercv.datasets import camvid_label_names
from chainercv.datasets import CamVidDataset
from chainercv.datasets import cityscapes_semantic_segmentation_label_names
from chainercv.datasets import CityscapesSemanticSegmentationDataset
from chainercv.datasets import voc_semantic_segmentation_label_names
from chainercv.datasets import VOCSemanticSegmentationDataset
from chainercv.evaluations import eval_semantic_segmentation
from chainercv.experimental.links import PSPNetResNet101
from chainercv.experimental.links import PSPNetResNet50
from chainercv.links import DeepLabV3plusXception65
from chainercv.links import SegNetBasic
from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook
models = {
'pspnet_resnet50': (PSPNetResNet50, {}, 1),
'pspnet_resnet101': (PSPNetResNet101, {}, 1),
'segnet': (SegNetBasic, {}, 1),
'deeplab_v3plus_xception65': (DeepLabV3plusXception65, {}, 1),
}
def setup(dataset, model, pretrained_model, batchsize, input_size):
dataset_name = dataset
if dataset_name == 'cityscapes':
dataset = CityscapesSemanticSegmentationDataset(
split='val', label_resolution='fine')
label_names = cityscapes_semantic_segmentation_label_names
elif dataset_name == 'ade20k':
dataset = ADE20KSemanticSegmentationDataset(split='val')
label_names = ade20k_semantic_segmentation_label_names
elif dataset_name == 'camvid':
dataset = CamVidDataset(split='test')
label_names = camvid_label_names
elif dataset_name == 'voc':
dataset = VOCSemanticSegmentationDataset(split='val')
label_names = voc_semantic_segmentation_label_names
def eval_(out_values, rest_values):
pred_labels, = out_values
gt_labels, = rest_values
result = eval_semantic_segmentation(pred_labels, gt_labels)
for iu, label_name in zip(result['iou'], label_names):
print('{:>23} : {:.4f}'.format(label_name, iu))
print('=' * 34)
print('{:>23} : {:.4f}'.format('mean IoU', result['miou']))
print('{:>23} : {:.4f}'.format(
'Class average accuracy', result['mean_class_accuracy']))
print('{:>23} : {:.4f}'.format(
'Global average accuracy', result['pixel_accuracy']))
cls, pretrained_models, default_batchsize = models[model]
if pretrained_model is None:
pretrained_model = pretrained_models.get(dataset_name, dataset_name)
if input_size is None:
input_size = None
else:
input_size = (input_size, input_size)
kwargs = {
'n_class': len(label_names),
'pretrained_model': pretrained_model,
}
if model in ['pspnet_resnet50', 'pspnet_resnet101']:
kwargs.update({'input_size': input_size})
elif model == 'deeplab_v3plus_xception65':
kwargs.update({'min_input_size': input_size})
model = cls(**kwargs)
if batchsize is None:
batchsize = default_batchsize
return dataset, eval_, model, batchsize
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset', choices=('cityscapes', 'ade20k', 'camvid', 'voc'))
parser.add_argument('--model', choices=sorted(models.keys()))
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model')
parser.add_argument('--batchsize', type=int)
parser.add_argument('--input-size', type=int, default=None)
args = parser.parse_args()
dataset, eval_, model, batchsize = setup(
args.dataset, args.model, args.pretrained_model,
args.batchsize, args.input_size)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
iterator = iterators.SerialIterator(
dataset, batchsize, repeat=False, shuffle=False)
in_values, out_values, rest_values = apply_to_iterator(
model.predict, iterator, hook=ProgressHook(len(dataset)))
# Delete an iterator of images to save memory usage.
del in_values
eval_(out_values, rest_values)
if __name__ == '__main__':
main()
|
from django.db import migrations
def fix_rename_changes(apps, schema_editor):
Change = apps.get_model("trans", "Change")
db_alias = schema_editor.connection.alias
for change in (
Change.objects.using(db_alias)
.filter(action__in=[41, 42, 43])
.select_related("project", "component")
):
change.old = change.target
if change.action in (43, 41):
# Move component, rename project
change.target = change.project.slug
elif change.action == 42:
# Rename component
change.target = change.component.slug
change.save(update_fields=["target", "old"])
class Migration(migrations.Migration):
dependencies = [
("trans", "0100_auto_20200912_1131"),
]
operations = [migrations.RunPython(fix_rename_changes, elidable=True)]
|
import socket
import unittest
from homeassistant.const import TEMP_CELSIUS
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import get_test_home_assistant
VALID_CONFIG_MINIMAL = {"sensor": {"platform": "hddtemp"}}
VALID_CONFIG_NAME = {"sensor": {"platform": "hddtemp", "name": "FooBar"}}
VALID_CONFIG_ONE_DISK = {"sensor": {"platform": "hddtemp", "disks": ["/dev/sdd1"]}}
VALID_CONFIG_WRONG_DISK = {"sensor": {"platform": "hddtemp", "disks": ["/dev/sdx1"]}}
VALID_CONFIG_MULTIPLE_DISKS = {
"sensor": {
"platform": "hddtemp",
"host": "foobar.local",
"disks": ["/dev/sda1", "/dev/sdb1", "/dev/sdc1"],
}
}
VALID_CONFIG_HOST = {"sensor": {"platform": "hddtemp", "host": "alice.local"}}
VALID_CONFIG_HOST_UNREACHABLE = {"sensor": {"platform": "hddtemp", "host": "bob.local"}}
class TelnetMock:
"""Mock class for the telnetlib.Telnet object."""
def __init__(self, host, port, timeout=0):
"""Initialize Telnet object."""
self.host = host
self.port = port
self.timeout = timeout
self.sample_data = bytes(
"|/dev/sda1|WDC WD30EZRX-12DC0B0|29|C|"
+ "|/dev/sdb1|WDC WD15EADS-11P7B2|32|C|"
+ "|/dev/sdc1|WDC WD20EARX-22MMMB0|29|C|"
+ "|/dev/sdd1|WDC WD15EARS-00Z5B1|89|F|",
"ascii",
)
def read_all(self):
"""Return sample values."""
if self.host == "alice.local":
raise ConnectionRefusedError
if self.host == "bob.local":
raise socket.gaierror
return self.sample_data
class TestHDDTempSensor(unittest.TestCase):
"""Test the hddtemp sensor."""
def setUp(self):
"""Set up things to run when tests begin."""
self.hass = get_test_home_assistant()
self.config = VALID_CONFIG_ONE_DISK
self.reference = {
"/dev/sda1": {
"device": "/dev/sda1",
"temperature": "29",
"unit_of_measurement": TEMP_CELSIUS,
"model": "WDC WD30EZRX-12DC0B0",
},
"/dev/sdb1": {
"device": "/dev/sdb1",
"temperature": "32",
"unit_of_measurement": TEMP_CELSIUS,
"model": "WDC WD15EADS-11P7B2",
},
"/dev/sdc1": {
"device": "/dev/sdc1",
"temperature": "29",
"unit_of_measurement": TEMP_CELSIUS,
"model": "WDC WD20EARX-22MMMB0",
},
"/dev/sdd1": {
"device": "/dev/sdd1",
"temperature": "32",
"unit_of_measurement": TEMP_CELSIUS,
"model": "WDC WD15EARS-00Z5B1",
},
}
self.addCleanup(self.hass.stop)
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_min_config(self):
"""Test minimal hddtemp configuration."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_MINIMAL)
self.hass.block_till_done()
entity = self.hass.states.all()[0].entity_id
state = self.hass.states.get(entity)
reference = self.reference[state.attributes.get("device")]
assert state.state == reference["temperature"]
assert state.attributes.get("device") == reference["device"]
assert state.attributes.get("model") == reference["model"]
assert (
state.attributes.get("unit_of_measurement")
== reference["unit_of_measurement"]
)
assert (
state.attributes.get("friendly_name")
== f"HD Temperature {reference['device']}"
)
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_rename_config(self):
"""Test hddtemp configuration with different name."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_NAME)
self.hass.block_till_done()
entity = self.hass.states.all()[0].entity_id
state = self.hass.states.get(entity)
reference = self.reference[state.attributes.get("device")]
assert state.attributes.get("friendly_name") == f"FooBar {reference['device']}"
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_one_disk(self):
"""Test hddtemp one disk configuration."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_ONE_DISK)
self.hass.block_till_done()
state = self.hass.states.get("sensor.hd_temperature_dev_sdd1")
reference = self.reference[state.attributes.get("device")]
assert state.state == reference["temperature"]
assert state.attributes.get("device") == reference["device"]
assert state.attributes.get("model") == reference["model"]
assert (
state.attributes.get("unit_of_measurement")
== reference["unit_of_measurement"]
)
assert (
state.attributes.get("friendly_name")
== f"HD Temperature {reference['device']}"
)
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_wrong_disk(self):
"""Test hddtemp wrong disk configuration."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_WRONG_DISK)
self.hass.block_till_done()
assert len(self.hass.states.all()) == 1
state = self.hass.states.get("sensor.hd_temperature_dev_sdx1")
assert state.attributes.get("friendly_name") == "HD Temperature /dev/sdx1"
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_multiple_disks(self):
"""Test hddtemp multiple disk configuration."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_MULTIPLE_DISKS)
self.hass.block_till_done()
for sensor in [
"sensor.hd_temperature_dev_sda1",
"sensor.hd_temperature_dev_sdb1",
"sensor.hd_temperature_dev_sdc1",
]:
state = self.hass.states.get(sensor)
reference = self.reference[state.attributes.get("device")]
assert state.state == reference["temperature"]
assert state.attributes.get("device") == reference["device"]
assert state.attributes.get("model") == reference["model"]
assert (
state.attributes.get("unit_of_measurement")
== reference["unit_of_measurement"]
)
assert (
state.attributes.get("friendly_name")
== f"HD Temperature {reference['device']}"
)
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_host_refused(self):
"""Test hddtemp if host unreachable."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_HOST)
self.hass.block_till_done()
assert len(self.hass.states.all()) == 0
@patch("telnetlib.Telnet", new=TelnetMock)
def test_hddtemp_host_unreachable(self):
"""Test hddtemp if host unreachable."""
assert setup_component(self.hass, "sensor", VALID_CONFIG_HOST_UNREACHABLE)
self.hass.block_till_done()
assert len(self.hass.states.all()) == 0
|
from typing import Any, Optional, Union, Text, Sequence, Tuple, List
import numpy as np
Tensor = Any
def tensordot(tf, a, b, axes, name: Optional[Text] = None) -> Tensor:
r"""Tensor contraction of a and b along specified axes.
Tensordot (also known as tensor contraction) sums the product of elements
from `a` and `b` over the indices specified by `a_axes` and `b_axes`.
The lists `a_axes` and `b_axes` specify those pairs of axes along which to
contract the tensors. The axis `a_axes[i]` of `a` must have the same dimension
as axis `b_axes[i]` of `b` for all `i` in `range(0, len(a_axes))`. The lists
`a_axes` and `b_axes` must have identical length and consist of unique
integers that specify valid axes for each of the tensors.
This operation corresponds to `numpy.tensordot(a, b, axes)`.
Example 1: When `a` and `b` are matrices (order 2), the case `axes = 1`
is equivalent to matrix multiplication.
Example 2: When `a` and `b` are matrices (order 2), the case
`axes = [[1], [0]]` is equivalent to matrix multiplication.
Example 3: Suppose that \\(a_{ijk}\\) and \\(b_{lmn}\\) represent two
tensors of order 3. Then, `contract(a, b, [[0], [2]])` is the order 4 tensor
\\(c_{jklm}\\) whose entry
corresponding to the indices \\((j,k,l,m)\\) is given by:
\\( c_{jklm} = \sum_i a_{ijk} b_{lmi} \\).
In general, `order(c) = order(a) + order(b) - 2*len(axes[0])`.
Args:
tf: The TensorFlow module. This must be passed in instead of imported
since we don't assume users have TensorFlow installed.
a: `Tensor` of type `float32` or `float64`.
b: `Tensor` with the same type as `a`.
axes: Either a scalar `N`, or a list or an `int32` `Tensor` of shape [2, k].
If axes is a scalar, sum over the last N axes of a and the first N axes of
b in order. If axes is a list or `Tensor` the first and second row contain
the set of unique integers specifying axes along which the contraction is
computed, for `a` and `b`, respectively. The number of axes for `a` and
`b` must be equal.
name: A name for the operation (optional).
Returns:
A `Tensor` with the same type as `a`.
Raises:
ValueError: If the shapes of `a`, `b`, and `axes` are incompatible.
IndexError: If the values in axes exceed the rank of the corresponding
tensor.
"""
def _tensordot_should_flip(contraction_axes: List[int],
free_axes: List[int]) -> bool:
"""Helper method to determine axis ordering.
We minimize the average distance the indices would have to move under the
transposition.
Args:
contraction_axes: The axes to be contracted.
free_axes: The free axes.
Returns:
should_flip: `True` if `contraction_axes` should be moved to the left,
`False` if they should be moved to the right.
"""
# NOTE: This will fail if the arguments contain any Tensors.
if contraction_axes and free_axes:
return bool(np.mean(contraction_axes) < np.mean(free_axes))
return False
def _tranpose_if_necessary(tensor: Tensor, perm: List[int]) -> Tensor:
"""Like transpose(), but avoids creating a new tensor if possible.
Although the graph optimizer should kill trivial transposes, it is
best not to add them in the first place!
"""
if perm == list(range(len(perm))):
return tensor
return tf.transpose(tensor, perm)
def _reshape_if_necessary(tensor: Tensor, new_shape: List[int]) -> Tensor:
"""Like reshape(), but avoids creating a new tensor if possible.
Assumes shapes are both fully specified.
"""
cur_shape = tensor.get_shape().as_list()
if (len(new_shape) == len(cur_shape) and
all(d0 == d1 for d0, d1 in zip(cur_shape, new_shape))):
return tensor
return tf.reshape(tensor, new_shape)
def _tensordot_reshape(
a: Tensor,
axes: Union[Sequence[int], Tensor],
is_right_term=False
) -> Tuple[Tensor, Union[List[int], Tensor], Optional[List[int]], bool]:
"""Helper method to perform transpose and reshape for contraction op.
This method is helpful in reducing `math_ops.tensordot` to `math_ops.matmul`
using `array_ops.transpose` and `array_ops.reshape`. The method takes a
tensor and performs the correct transpose and reshape operation for a given
set of indices. It returns the reshaped tensor as well as a list of indices
necessary to reshape the tensor again after matrix multiplication.
Args:
a: `Tensor`.
axes: List or `int32` `Tensor` of unique indices specifying valid axes of
`a`.
is_right_term: Whether `a` is the right (second) argument to `matmul`.
Returns:
A tuple `(reshaped_a, free_dims, free_dims_static, transpose_needed)`
where `reshaped_a` is the tensor `a` reshaped to allow contraction via
`matmul`, `free_dims` is either a list of integers or an `int32`
`Tensor`, depending on whether the shape of a is fully specified, and
free_dims_static is either a list of integers and None values, or None,
representing the inferred static shape of the free dimensions.
`transpose_needed` indicates whether `reshaped_a` must be transposed,
or not, when calling `matmul`.
"""
if a.get_shape().is_fully_defined() and isinstance(axes, (list, tuple)):
shape_a = a.get_shape().as_list()
# NOTE: This will fail if axes contains any tensors
axes = [i if i >= 0 else i + len(shape_a) for i in axes]
free = [i for i in range(len(shape_a)) if i not in axes]
flipped = _tensordot_should_flip(axes, free)
free_dims = [shape_a[i] for i in free]
prod_free = int(np.prod([shape_a[i] for i in free]))
prod_axes = int(np.prod([shape_a[i] for i in axes]))
perm = axes + free if flipped else free + axes
new_shape = [prod_axes, prod_free] if flipped else [prod_free, prod_axes]
transposed_a = _tranpose_if_necessary(a, perm)
reshaped_a = _reshape_if_necessary(transposed_a, new_shape)
transpose_needed = (not flipped) if is_right_term else flipped
return reshaped_a, free_dims, free_dims, transpose_needed
if a.get_shape().ndims is not None and isinstance(axes, (list, tuple)):
shape_a = a.get_shape().as_list()
axes = [i if i >= 0 else i + len(shape_a) for i in axes]
free = [i for i in range(len(shape_a)) if i not in axes]
flipped = _tensordot_should_flip(axes, free)
perm = axes + free if flipped else free + axes
axes_dims = [shape_a[i] for i in axes]
free_dims = [shape_a[i] for i in free]
free_dims_static = free_dims
axes = tf.convert_to_tensor(axes, dtype=tf.dtypes.int32, name="axes")
free = tf.convert_to_tensor(free, dtype=tf.dtypes.int32, name="free")
shape_a = tf.shape(a)
transposed_a = _tranpose_if_necessary(a, perm)
else:
free_dims_static = None
shape_a = tf.shape(a)
rank_a = tf.rank(a)
axes = tf.convert_to_tensor(axes, dtype=tf.dtypes.int32, name="axes")
axes = tf.where(axes >= 0, axes, axes + rank_a)
free, _ = tf.compat.v1.setdiff1d(tf.range(rank_a), axes)
# Matmul does not accept tensors for its transpose arguments, so fall
# back to the previous, fixed behavior.
# NOTE(amilsted): With a suitable wrapper for `matmul` using e.g. `case`
# to match transpose arguments to tensor values, we could also avoid
# unneeded tranposes in this case at the expense of a somewhat more
# complicated graph. Unclear whether this would be beneficial overall.
flipped = is_right_term
perm = (
tf.concat([axes, free], 0) if flipped else tf.concat([free, axes], 0))
transposed_a = tf.transpose(a, perm)
free_dims = tf.gather(shape_a, free)
axes_dims = tf.gather(shape_a, axes)
prod_free_dims = tf.reduce_prod(free_dims)
prod_axes_dims = tf.reduce_prod(axes_dims)
if flipped:
new_shape = tf.stack([prod_axes_dims, prod_free_dims])
else:
new_shape = tf.stack([prod_free_dims, prod_axes_dims])
reshaped_a = tf.reshape(transposed_a, new_shape)
transpose_needed = (not flipped) if is_right_term else flipped
return reshaped_a, free_dims, free_dims_static, transpose_needed
def _tensordot_axes(a: Tensor, axes) -> Tuple[Any, Any]:
"""Generates two sets of contraction axes for the two tensor arguments."""
a_shape = a.get_shape()
if isinstance(axes, tf.compat.integral_types):
if axes < 0:
raise ValueError("'axes' must be at least 0.")
if a_shape.ndims is not None:
if axes > a_shape.ndims:
raise ValueError("'axes' must not be larger than the number of "
"dimensions of tensor %s." % a)
return (list(range(a_shape.ndims - axes,
a_shape.ndims)), list(range(axes)))
rank = tf.rank(a)
return (tf.range(rank - axes, rank,
dtype=tf.int32), tf.range(axes, dtype=tf.int32))
if isinstance(axes, (list, tuple)):
if len(axes) != 2:
raise ValueError("'axes' must be an integer or have length 2.")
a_axes = axes[0]
b_axes = axes[1]
if isinstance(a_axes, tf.compat.integral_types) and \
isinstance(b_axes, tf.compat.integral_types):
a_axes = [a_axes]
b_axes = [b_axes]
# NOTE: This fails if either a_axes and b_axes are Tensors.
if len(a_axes) != len(b_axes):
raise ValueError(
"Different number of contraction axes 'a' and 'b', %s != %s." %
(len(a_axes), len(b_axes)))
# The contraction indices do not need to be permuted.
# Sort axes to avoid unnecessary permutations of a.
# NOTE: This fails if either a_axes and b_axes contain Tensors.
# pylint: disable=len-as-condition
if len(a_axes) > 0:
a_axes, b_axes = list(zip(*sorted(zip(a_axes, b_axes))))
return a_axes, b_axes
axes = tf.convert_to_tensor(axes, name="axes", dtype=tf.int32)
return axes[0], axes[1]
with tf.compat.v1.name_scope(name, "Tensordot", [a, b, axes]) as _name:
a = tf.convert_to_tensor(a, name="a")
b = tf.convert_to_tensor(b, name="b")
a_axes, b_axes = _tensordot_axes(a, axes)
a_reshape, a_free_dims, a_free_dims_static, a_transp = _tensordot_reshape(
a, a_axes)
b_reshape, b_free_dims, b_free_dims_static, b_transp = _tensordot_reshape(
b, b_axes, is_right_term=True)
ab_matmul = tf.matmul(
a_reshape, b_reshape, transpose_a=a_transp, transpose_b=b_transp)
if isinstance(a_free_dims, list) and isinstance(b_free_dims, list):
return tf.reshape(ab_matmul, a_free_dims + b_free_dims, name=_name)
a_free_dims = tf.convert_to_tensor(a_free_dims, dtype=tf.dtypes.int32)
b_free_dims = tf.convert_to_tensor(b_free_dims, dtype=tf.dtypes.int32)
product = tf.reshape(
ab_matmul, tf.concat([a_free_dims, b_free_dims], 0), name=_name)
if a_free_dims_static is not None and b_free_dims_static is not None:
product.set_shape(a_free_dims_static + b_free_dims_static)
return product
|
import io
import os
import re
import sys
import time
import unittest
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from spinners.spinners import Spinners
from halo import Halo
from halo._utils import get_terminal_columns, is_supported
from tests._utils import strip_ansi, find_colors, encode_utf_8_text, decode_utf_8_text
from termcolor import COLORS
if sys.version_info.major == 2:
get_coded_text = encode_utf_8_text
else:
get_coded_text = decode_utf_8_text
if is_supported():
frames = [get_coded_text(frame) for frame in Spinners['dots'].value['frames']]
default_spinner = Spinners['dots'].value
else:
frames = [get_coded_text(frame) for frame in Spinners['line'].value['frames']]
default_spinner = Spinners['line'].value
class SpecificException(Exception):
"""A unique exc class we know only our tests would raise"""
class TestHalo(unittest.TestCase):
"""Test Halo enum for attribute values.
"""
TEST_FOLDER = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
"""Set up things before beginning of each test.
"""
self._stream_file = os.path.join(self.TEST_FOLDER, 'test.txt')
self._stream = io.open(self._stream_file, 'w+')
self._stream_no_tty = StringIO()
def _get_test_output(self, no_ansi=True):
"""Clean the output from stream and return it in list form.
Returns
-------
list
Clean output from stream
"""
self._stream.seek(0)
data = self._stream.readlines()
output = {}
output_text = []
output_colors = []
for line in data:
clean_line = strip_ansi(line.strip('\n')) if no_ansi else line.strip('\n')
if clean_line != '':
output_text.append(get_coded_text(clean_line))
colors_found = find_colors(line.strip('\n'))
if colors_found:
tmp = []
for color in colors_found:
tmp.append(re.sub(r'[^0-9]', '', color, flags=re.I))
output_colors.append(tmp)
output['text'] = output_text
output['colors'] = output_colors
return output
def test_basic_spinner(self):
"""Test the basic of basic spinners.
"""
spinner = Halo(text='foo', spinner='dots', stream=self._stream)
spinner.start()
time.sleep(1)
spinner.stop()
output = self._get_test_output()['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
def test_text_spinner_color(self):
"""Test basic spinner with available colors color (both spinner and text)
"""
for color, color_int in COLORS.items():
self._stream_file = os.path.join(self.TEST_FOLDER, 'test.txt')
self._stream = io.open(self._stream_file, 'w+')
spinner = Halo(
text='foo',
text_color=color,
color=color,
spinner='dots',
stream=self._stream
)
spinner.start()
time.sleep(1)
spinner.stop()
output = self._get_test_output()['colors']
# check if spinner colors match
self.assertEqual(color_int, int(output[0][0]))
self.assertEqual(color_int, int(output[1][0]))
self.assertEqual(color_int, int(output[2][0]))
# check if text colors match
self.assertEqual(color_int, int(output[0][1]))
self.assertEqual(color_int, int(output[1][1]))
self.assertEqual(color_int, int(output[2][1]))
def test_spinner_getter(self):
instance = Halo()
if is_supported():
default_spinner_value = "dots"
else:
default_spinner_value = "line"
instance.spinner = default_spinner_value
self.assertEqual(default_spinner, instance.spinner)
instance.spinner = "This_spinner_do_not_exist"
self.assertEqual(default_spinner, instance.spinner)
instance.spinner = -123
self.assertEqual(default_spinner, instance.spinner)
def test_text_stripping(self):
"""Test the text being stripped before output.
"""
spinner = Halo(text='foo\n', spinner='dots', stream=self._stream)
spinner.start()
time.sleep(1)
spinner.succeed('foo\n')
output = self._get_test_output()['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
pattern = re.compile(r'(✔|v) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_text_ellipsing(self):
"""Test the text gets ellipsed if it's too long
"""
text = 'This is a text that it is too long. In fact, it exceeds the eighty column standard ' \
'terminal width, which forces the text frame renderer to add an ellipse at the end of the ' \
'text. ' * 6
spinner = Halo(text=text, spinner='dots', stream=self._stream)
spinner.start()
time.sleep(1)
spinner.succeed('End!')
output = self._get_test_output()['text']
terminal_width = get_terminal_columns()
# -6 of the ' (...)' ellipsis, -2 of the spinner and space
self.assertEqual(output[0], '{} {} (...)'.format(frames[0], text[:terminal_width - 6 - 2]))
self.assertEqual(output[1], '{} {} (...)'.format(frames[1], text[:terminal_width - 6 - 2]))
self.assertEqual(output[2], '{} {} (...)'.format(frames[2], text[:terminal_width - 6 - 2]))
pattern = re.compile(r'(✔|v) End!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_text_animation(self):
"""Test the text gets animated when it is too long
"""
text = 'This is a text that it is too long. In fact, it exceeds the eighty column standard ' \
'terminal width, which forces the text frame renderer to add an ellipse at the end of the ' \
'text. ' * 6
spinner = Halo(text=text, spinner='dots', stream=self._stream, animation='marquee')
spinner.start()
time.sleep(1)
spinner.succeed('End!')
output = self._get_test_output()['text']
terminal_width = get_terminal_columns()
self.assertEqual(output[0], '{} {}'.format(frames[0], text[:terminal_width - 2]))
self.assertEqual(output[1], '{} {}'.format(frames[1], text[1:terminal_width - 1]))
self.assertEqual(output[2], '{} {}'.format(frames[2], text[2:terminal_width]))
pattern = re.compile(r'(✔|v) End!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_context_manager(self):
"""Test the basic of basic spinners used through the with statement.
"""
with Halo(text='foo', spinner='dots', stream=self._stream):
time.sleep(1)
output = self._get_test_output()['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
def test_context_manager_exceptions(self):
"""Test Halo context manager allows exceptions to bubble up
"""
with self.assertRaises(SpecificException):
with Halo(text='foo', spinner='dots', stream=self._stream):
raise SpecificException
def test_decorator_spinner(self):
"""Test basic usage of spinners with the decorator syntax."""
@Halo(text="foo", spinner="dots", stream=self._stream)
def decorated_function():
time.sleep(1)
decorated_function()
output = self._get_test_output()['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
def test_decorator_exceptions(self):
"""Test Halo decorator allows exceptions to bubble up"""
@Halo(text="foo", spinner="dots", stream=self._stream)
def decorated_function():
raise SpecificException
with self.assertRaises(SpecificException):
decorated_function()
def test_initial_title_spinner(self):
"""Test Halo with initial title.
"""
spinner = Halo('bar', stream=self._stream)
spinner.start()
time.sleep(1)
spinner.stop()
output = self._get_test_output()['text']
self.assertEqual(output[0], '{} bar'.format(frames[0]))
self.assertEqual(output[1], '{} bar'.format(frames[1]))
self.assertEqual(output[2], '{} bar'.format(frames[2]))
def test_id_not_created_before_start(self):
"""Test Spinner ID not created before start.
"""
spinner = Halo(stream=self._stream)
self.assertEqual(spinner.spinner_id, None)
def test_ignore_multiple_start_calls(self):
"""Test ignoring of multiple start calls.
"""
spinner = Halo(stream=self._stream)
spinner.start()
spinner_id = spinner.spinner_id
spinner.start()
self.assertEqual(spinner.spinner_id, spinner_id)
spinner.stop()
def test_chaining_start(self):
"""Test chaining start with constructor
"""
spinner = Halo(stream=self._stream).start()
spinner_id = spinner.spinner_id
self.assertIsNotNone(spinner_id)
spinner.stop()
def test_succeed(self):
"""Test succeed method
"""
spinner = Halo(stream=self._stream)
spinner.start('foo')
spinner.succeed('foo')
output = self._get_test_output()['text']
pattern = re.compile(r'(✔|v) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_succeed_with_new_text(self):
"""Test succeed method with new text
"""
spinner = Halo(stream=self._stream)
spinner.start('foo')
spinner.succeed('bar')
output = self._get_test_output()['text']
pattern = re.compile(r'(✔|v) bar', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_info(self):
"""Test info method
"""
spinner = Halo(stream=self._stream)
spinner.start('foo')
spinner.info()
output = self._get_test_output()['text']
pattern = re.compile(r'(ℹ|¡) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_fail(self):
"""Test fail method
"""
spinner = Halo(stream=self._stream)
spinner.start('foo')
spinner.fail()
output = self._get_test_output()['text']
pattern = re.compile(r'(✖|×) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_warning(self):
"""Test warn method
"""
spinner = Halo(stream=self._stream)
spinner.start('foo')
spinner.warn('Warning!')
output = self._get_test_output()['text']
pattern = re.compile(r'(⚠|!!) Warning!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_spinner_getters_setters(self):
"""Test spinner getters and setters.
"""
spinner = Halo()
self.assertEqual(spinner.text, '')
self.assertIsNone(spinner.text_color, None)
self.assertEqual(spinner.color, 'cyan')
self.assertIsNone(spinner.spinner_id)
spinner.spinner = 'dots12'
spinner.text = 'bar'
spinner.text_color = 'red'
spinner.color = 'red'
self.assertEqual(spinner.text, 'bar')
self.assertEqual(spinner.text_color, 'red')
self.assertEqual(spinner.color, 'red')
if is_supported():
self.assertEqual(spinner.spinner, Spinners['dots12'].value)
else:
self.assertEqual(spinner.spinner, default_spinner)
spinner.spinner = 'dots11'
if is_supported():
self.assertEqual(spinner.spinner, Spinners['dots11'].value)
else:
self.assertEqual(spinner.spinner, default_spinner)
spinner.spinner = 'foo_bar'
self.assertEqual(spinner.spinner, default_spinner)
# Color is None
spinner.text_color = None
spinner.color = None
spinner.start()
spinner.stop()
self.assertIsNone(spinner.text_color)
self.assertIsNone(spinner.color)
def test_unavailable_spinner_defaults(self):
"""Test unavailable spinner defaults.
"""
spinner = Halo('dot')
self.assertEqual(spinner.text, 'dot')
self.assertEqual(spinner.spinner, default_spinner)
def test_if_enabled(self):
"""Test if spinner is enabled
"""
spinner = Halo(text='foo', enabled=False, stream=self._stream)
spinner.start()
time.sleep(1)
spinner.fail()
output = self._get_test_output()['text']
self.assertEqual(len(output), 0)
self.assertEqual(output, [])
def test_writing_disabled_on_closed_stream(self):
"""Test no I/O is performed on closed streams
"""
# BytesIO supports the writable() method, while StringIO does not, in
# some versions of Python. We want to check whether the stream is
# writable (e.g. for file streams which can be open but not writable),
# but only if the stream supports it — otherwise we assume
# open == writable.
for io_class in (io.StringIO, io.BytesIO):
stream = io_class()
stream.close()
# sanity checks
self.assertTrue(stream.closed)
self.assertRaises(ValueError, stream.isatty)
self.assertRaises(ValueError, stream.write, u'')
try:
spinner = Halo(text='foo', stream=stream)
spinner.start()
time.sleep(0.5)
spinner.stop()
except ValueError as e:
self.fail('Attempted to write to a closed stream: {}'.format(e))
def test_closing_stream_before_stopping(self):
"""Test no I/O is performed on streams closed before stop is called
"""
stream = io.StringIO()
spinner = Halo(text='foo', stream=stream)
spinner.start()
time.sleep(0.5)
# no exception raised after closing the stream means test was successful
try:
stream.close()
time.sleep(0.5)
spinner.stop()
except ValueError as e:
self.fail('Attempted to write to a closed stream: {}'.format(e))
def test_closing_stream_before_persistent(self):
"""Test no I/O is performed on streams closed before stop_and_persist is called
"""
stream = io.StringIO()
spinner = Halo(text='foo', stream=stream)
spinner.start()
time.sleep(0.5)
# no exception raised after closing the stream means test was successful
try:
stream.close()
time.sleep(0.5)
spinner.stop_and_persist('done')
except ValueError as e:
self.fail('Attempted to write to a closed stream: {}'.format(e))
def test_setting_enabled_property(self):
"""Test if spinner stops writing when enabled property set to False
"""
spinner = Halo(text='foo', stream=self._stream)
spinner.start()
time.sleep(0.5)
spinner.enabled = False
bytes_written = self._stream.tell()
time.sleep(0.5)
spinner.stop()
total_bytes_written = self._stream.tell()
self.assertEqual(total_bytes_written, bytes_written)
def test_spinner_interval_default(self):
"""Test proper assignment of the default interval value.
"""
spinner = Halo()
self.assertEqual(spinner._interval, default_spinner['interval'])
def test_spinner_interval_argument(self):
"""Test proper assignment of the interval value from the constructor argument.
"""
spinner = Halo(interval=123)
self.assertEqual(spinner._interval, 123)
def test_spinner_interval_dict(self):
"""Test proper assignment of the interval value from a dictionary.
"""
spinner = Halo(spinner={'interval': 321, 'frames': ['+', '-']})
self.assertEqual(spinner._interval, 321)
def test_invalid_placement(self):
"""Test invalid placement of spinner.
"""
with self.assertRaises(ValueError):
Halo(placement='')
Halo(placement='foo')
Halo(placement=None)
spinner = Halo(placement='left')
with self.assertRaises(ValueError):
spinner.placement = ''
spinner.placement = 'foo'
spinner.placement = None
def test_default_placement(self):
"""Test default placement of spinner.
"""
spinner = Halo()
self.assertEqual(spinner.placement, 'left')
def test_right_placement(self):
"""Test right placement of spinner.
"""
spinner = Halo(text='foo', placement='right', stream=self._stream)
spinner.start()
time.sleep(1)
output = self._get_test_output()['text']
(text, _) = output[-1].split(' ')
self.assertEqual(text, 'foo')
spinner.succeed()
output = self._get_test_output()['text']
(text, symbol) = output[-1].split(' ')
pattern = re.compile(r"(✔|v)", re.UNICODE)
self.assertEqual(text, 'foo')
self.assertRegexpMatches(symbol, pattern)
spinner.stop()
def test_bounce_animation(self):
def filler_text(n_chars):
return "_" * n_chars
terminal_width = get_terminal_columns()
text = "{}abc".format(filler_text(terminal_width))
expected_frames_without_appended_spinner = [
"{}".format(filler_text(terminal_width - 2)),
"{}".format(filler_text(terminal_width - 2)),
"{}".format(filler_text(terminal_width - 2)),
"{}a".format(filler_text(terminal_width - 3)),
"{}ab".format(filler_text(terminal_width - 4)),
"{}abc".format(filler_text(terminal_width - 5)),
"{}abc".format(filler_text(terminal_width - 5)),
"{}ab".format(filler_text(terminal_width - 4)),
"{}a".format(filler_text(terminal_width - 3)),
"{}".format(filler_text(terminal_width - 2)),
"{}".format(filler_text(terminal_width - 2)),
"{}".format(filler_text(terminal_width - 2)),
]
# Prepend the actual spinner
expected_frames = [
"{} {}".format(frames[idx % frames.__len__()], frame)
for idx, frame in enumerate(expected_frames_without_appended_spinner)
]
spinner = Halo(text, animation="bounce", stream=self._stream)
spinner.start()
# Sleep a full bounce cycle
time.sleep(1.2)
spinner.stop()
output = self._get_test_output()['text']
zipped_expected_and_actual_frame = zip(expected_frames, output)
for multiple_frames in zipped_expected_and_actual_frame:
expected_frame, actual_frame = multiple_frames
self.assertEquals(expected_frame, actual_frame)
def test_animation_setter(self):
spinner = Halo("Asdf")
spinner.animation = "bounce"
self.assertEquals("bounce", spinner.animation)
spinner.animation = "marquee"
self.assertEquals("marquee", spinner.animation)
def test_spinner_color(self):
"""Test ANSI escape characters are present
"""
for color, color_int in COLORS.items():
self._stream = io.open(self._stream_file, 'w+') # reset stream
spinner = Halo(color=color, stream=self._stream)
spinner.start()
spinner.stop()
output = self._get_test_output(no_ansi=False)
output_merged = [arr for c in output['colors'] for arr in c]
self.assertEquals(str(color_int) in output_merged, True)
def test_redirect_stdout(self):
"""Test redirect stdout
"""
out = self._stream
try:
self._stream = self._stream_no_tty
spinner = Halo(text='foo', spinner='dots', stream=self._stream)
spinner.start()
time.sleep(1)
spinner.stop()
output = self._get_test_output()['text']
finally:
self._stream = out
self.assertIn('foo', output[0])
def tearDown(self):
pass
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(TestHalo)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
import os
import sys
import shutil
try:
import setuptools # noqa, analysis:ignore
except ImportError:
pass # setuptools allows for "develop", but it's not essential
from distutils.core import setup
## Function we need
def get_version_and_doc(filename):
NS = dict(__version__='', __doc__='')
docStatus = 0 # Not started, in progress, done
for line in open(filename, 'rb').read().decode().splitlines():
if line.startswith('__version__'):
exec(line.strip(), NS, NS)
elif line.startswith('"""'):
if docStatus == 0:
docStatus = 1
line = line.lstrip('"')
elif docStatus == 1:
docStatus = 2
if docStatus == 1:
NS['__doc__'] += line.rstrip() + '\n'
if not NS['__version__']:
raise RuntimeError('Could not find __version__')
return NS['__version__'], NS['__doc__']
def get_readme_as_rst(filename):
lines = []
for line in open(filename, 'rb').read().decode().splitlines():
lines.append(line)
# Convert links, images, and images with links
i1, i2 = line.find('['), line.find(']')
i3, i4 = line.find('(', i2), line.find(')', i2)
i5, i6 = line.find('(', i4), line.find(')', i4+1)
if '[Documentation Status' in line:
line.find('x')
if i1 >=0 and i2 > i1 and i3 == i2 + 1 and i4 > i3:
text, link = line[i1+1:i2], line[i3+1:i4]
if i1 == 1 and line[0] == '!':
# Image
lines[-1] = '\n.. image:: %s\n' % link
elif i1 == 0 and line.startswith('[![') and i5 == i4 + 2 and i6 > i5:
# Image with link
link2 = line[i5+1:i6]
lines[-1] = '\n.. image:: %s\n :target: %s\n' % (link, link2)
else:
# RST link: `link text </the/link>`_
lines[-1] = '%s`%s <%s>`_%s' % (line[:i1], text, link, line[i4+1:])
return '\n'.join(lines)
def package_tree(pkgroot):
subdirs = [os.path.relpath(i[0], THIS_DIR).replace(os.path.sep, '.')
for i in os.walk(os.path.join(THIS_DIR, pkgroot))
if '__init__.py' in i[2]]
return subdirs
def get_all_resources():
import logging # noqa - prevent mixup with logging module inside flexx.util
sys.path.insert(0, os.path.join(THIS_DIR, 'flexx', 'util'))
from getresource import RESOURCES, get_resoure_path
for name in RESOURCES.keys():
get_resoure_path(name)
sys.path.pop(0)
## Collect info for setup()
THIS_DIR = os.path.dirname(__file__)
# Define name and description
name = 'flexx'
description = "Write desktop and web apps in pure Python."
# Get version and docstring (i.e. long description)
version, doc = get_version_and_doc(os.path.join(THIS_DIR, name, '__init__.py'))
if os.path.isfile(os.path.join(THIS_DIR, 'README.md')):
doc = get_readme_as_rst(os.path.join(THIS_DIR, 'README.md'))
# Install resources (e.g. phosphor.js)
get_all_resources()
## Setup
setup(
name=name,
version=version,
author='Flexx contributors',
author_email='[email protected]',
license='(new) BSD',
url='http://flexx.readthedocs.io',
download_url='https://pypi.python.org/pypi/flexx',
keywords="ui design, GUI, web, runtime, pyscript, events, properties",
description=description,
long_description=doc,
platforms='any',
provides=[name],
python_requires='>=3.5',
install_requires=['tornado', 'pscript>=0.7.0', 'webruntime>=0.5.6', 'dialite>=0.5.2'],
packages=package_tree('flexx') + package_tree('flexxamples'),
package_dir={'flexx': 'flexx', 'flexxamples': 'flexxamples'},
package_data={name: ['resources/*']},
entry_points={'console_scripts': ['flexx = flexx.__main__:main'], },
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Topic :: Software Development :: User Interfaces',
'Topic :: Internet :: WWW/HTTP',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: JavaScript',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
|
import copy
import httplib2
import pytest
from homeassistant.components.google import (
CONF_CAL_ID,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_DEVICE_ID,
CONF_ENTITIES,
CONF_NAME,
CONF_TRACK,
DEVICE_SCHEMA,
SERVICE_SCAN_CALENDARS,
do_setup,
)
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.template import DATE_STR_FORMAT
from homeassistant.setup import async_setup_component
from homeassistant.util import slugify
import homeassistant.util.dt as dt_util
from tests.async_mock import Mock, patch
from tests.common import async_mock_service
GOOGLE_CONFIG = {CONF_CLIENT_ID: "client_id", CONF_CLIENT_SECRET: "client_secret"}
TEST_ENTITY = "calendar.we_are_we_are_a_test_calendar"
TEST_ENTITY_NAME = "We are, we are, a... Test Calendar"
TEST_EVENT = {
"summary": "Test All Day Event",
"start": {},
"end": {},
"location": "Test Cases",
"description": "test event",
"kind": "calendar#event",
"created": "2016-06-23T16:37:57.000Z",
"transparency": "transparent",
"updated": "2016-06-24T01:57:21.045Z",
"reminders": {"useDefault": True},
"organizer": {
"email": "[email protected]",
"displayName": "Organizer Name",
"self": True,
},
"sequence": 0,
"creator": {
"email": "[email protected]",
"displayName": "Organizer Name",
"self": True,
},
"id": "_c8rinwq863h45qnucyoi43ny8",
"etag": '"2933466882090000"',
"htmlLink": "https://www.google.com/calendar/event?eid=*******",
"iCalUID": "[email protected]",
"status": "confirmed",
}
def get_calendar_info(calendar):
"""Convert data from Google into DEVICE_SCHEMA."""
calendar_info = DEVICE_SCHEMA(
{
CONF_CAL_ID: calendar["id"],
CONF_ENTITIES: [
{
CONF_TRACK: calendar["track"],
CONF_NAME: calendar["summary"],
CONF_DEVICE_ID: slugify(calendar["summary"]),
}
],
}
)
return calendar_info
@pytest.fixture(autouse=True)
def mock_google_setup(hass, test_calendar):
"""Mock the google set up functions."""
hass.loop.run_until_complete(async_setup_component(hass, "group", {"group": {}}))
calendar = get_calendar_info(test_calendar)
calendars = {calendar[CONF_CAL_ID]: calendar}
patch_google_auth = patch(
"homeassistant.components.google.do_authentication", side_effect=do_setup
)
patch_google_load = patch(
"homeassistant.components.google.load_config", return_value=calendars
)
patch_google_services = patch("homeassistant.components.google.setup_services")
async_mock_service(hass, "google", SERVICE_SCAN_CALENDARS)
with patch_google_auth, patch_google_load, patch_google_services:
yield
@pytest.fixture(autouse=True)
def mock_http(hass):
"""Mock the http component."""
hass.http = Mock()
@pytest.fixture(autouse=True)
def set_time_zone():
"""Set the time zone for the tests."""
# Set our timezone to CST/Regina so we can check calculations
# This keeps UTC-6 all year round
dt_util.set_default_time_zone(dt_util.get_time_zone("America/Regina"))
yield
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
@pytest.fixture(name="google_service")
def mock_google_service():
"""Mock google service."""
patch_google_service = patch(
"homeassistant.components.google.calendar.GoogleCalendarService"
)
with patch_google_service as mock_service:
yield mock_service
async def test_all_day_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
week_from_today = dt_util.dt.date.today() + dt_util.dt.timedelta(days=7)
end_event = week_from_today + dt_util.dt.timedelta(days=1)
event = copy.deepcopy(TEST_EVENT)
start = week_from_today.isoformat()
end = end_event.isoformat()
event["start"]["date"] = start
event["end"]["date"] = end
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_OFF
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event["summary"],
"all_day": True,
"offset_reached": False,
"start_time": week_from_today.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
async def test_future_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
one_hour_from_now = dt_util.now() + dt_util.dt.timedelta(minutes=30)
end_event = one_hour_from_now + dt_util.dt.timedelta(minutes=60)
start = one_hour_from_now.isoformat()
end = end_event.isoformat()
event = copy.deepcopy(TEST_EVENT)
event["start"]["dateTime"] = start
event["end"]["dateTime"] = end
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_OFF
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event["summary"],
"all_day": False,
"offset_reached": False,
"start_time": one_hour_from_now.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
async def test_in_progress_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() - dt_util.dt.timedelta(minutes=30)
end_event = middle_of_event + dt_util.dt.timedelta(minutes=60)
start = middle_of_event.isoformat()
end = end_event.isoformat()
event = copy.deepcopy(TEST_EVENT)
event["start"]["dateTime"] = start
event["end"]["dateTime"] = end
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_ON
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event["summary"],
"all_day": False,
"offset_reached": False,
"start_time": middle_of_event.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
async def test_offset_in_progress_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() + dt_util.dt.timedelta(minutes=14)
end_event = middle_of_event + dt_util.dt.timedelta(minutes=60)
start = middle_of_event.isoformat()
end = end_event.isoformat()
event_summary = "Test Event in Progress"
event = copy.deepcopy(TEST_EVENT)
event["start"]["dateTime"] = start
event["end"]["dateTime"] = end
event["summary"] = f"{event_summary} !!-15"
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_OFF
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event_summary,
"all_day": False,
"offset_reached": True,
"start_time": middle_of_event.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
@pytest.mark.skip
async def test_all_day_offset_in_progress_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() + dt_util.dt.timedelta(days=1)
end_event = tomorrow + dt_util.dt.timedelta(days=1)
start = tomorrow.isoformat()
end = end_event.isoformat()
event_summary = "Test All Day Event Offset In Progress"
event = copy.deepcopy(TEST_EVENT)
event["start"]["date"] = start
event["end"]["date"] = end
event["summary"] = f"{event_summary} !!-25:0"
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_OFF
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event_summary,
"all_day": True,
"offset_reached": True,
"start_time": tomorrow.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
async def test_all_day_offset_event(hass, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() + dt_util.dt.timedelta(days=2)
end_event = tomorrow + dt_util.dt.timedelta(days=1)
start = tomorrow.isoformat()
end = end_event.isoformat()
offset_hours = 1 + dt_util.now().hour
event_summary = "Test All Day Event Offset"
event = copy.deepcopy(TEST_EVENT)
event["start"]["date"] = start
event["end"]["date"] = end
event["summary"] = f"{event_summary} !!-{offset_hours}:0"
mock_next_event.return_value.event = event
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == STATE_OFF
assert dict(state.attributes) == {
"friendly_name": TEST_ENTITY_NAME,
"message": event_summary,
"all_day": True,
"offset_reached": False,
"start_time": tomorrow.strftime(DATE_STR_FORMAT),
"end_time": end_event.strftime(DATE_STR_FORMAT),
"location": event["location"],
"description": event["description"],
}
async def test_update_error(hass, google_service):
"""Test that the calendar handles a server error."""
google_service.return_value.get = Mock(
side_effect=httplib2.ServerNotFoundError("unit test")
)
assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG})
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY)
assert state.name == TEST_ENTITY_NAME
assert state.state == "off"
|
import logging
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_SUPPORTED_MODES, DATA_COORDINATOR, DATA_INFO, DOMAIN
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
CM_TO_HA_STATE = {
"heat": HVAC_MODE_HEAT,
"cool": HVAC_MODE_COOL,
"auto": HVAC_MODE_HEAT_COOL,
"dry": HVAC_MODE_DRY,
"fan": HVAC_MODE_FAN_ONLY,
}
HA_STATE_TO_CM = {value: key for key, value in CM_TO_HA_STATE.items()}
FAN_MODES = ["low", "med", "high", "auto"]
_LOGGER = logging.getLogger(__name__)
def _build_entity(coordinator, unit_id, unit, supported_modes, info):
_LOGGER.debug("Found device %s", unit_id)
return CoolmasterClimate(coordinator, unit_id, unit, supported_modes, info)
async def async_setup_entry(hass, config_entry, async_add_devices):
"""Set up the CoolMasterNet climate platform."""
supported_modes = config_entry.data.get(CONF_SUPPORTED_MODES)
info = hass.data[DOMAIN][config_entry.entry_id][DATA_INFO]
coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR]
all_devices = [
_build_entity(coordinator, unit_id, unit, supported_modes, info)
for (unit_id, unit) in coordinator.data.items()
]
async_add_devices(all_devices)
class CoolmasterClimate(CoordinatorEntity, ClimateEntity):
"""Representation of a coolmaster climate device."""
def __init__(self, coordinator, unit_id, unit, supported_modes, info):
"""Initialize the climate device."""
super().__init__(coordinator)
self._unit_id = unit_id
self._unit = unit
self._hvac_modes = supported_modes
self._info = info
@callback
def _handle_coordinator_update(self):
self._unit = self.coordinator.data[self._unit_id]
super()._handle_coordinator_update()
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "CoolAutomation",
"model": "CoolMasterNet",
"sw_version": self._info["version"],
}
@property
def unique_id(self):
"""Return unique ID for this device."""
return self._unit_id
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def name(self):
"""Return the name of the climate device."""
return self.unique_id
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self._unit.temperature_unit == "celsius":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
return self._unit.temperature
@property
def target_temperature(self):
"""Return the temperature we are trying to reach."""
return self._unit.thermostat
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
mode = self._unit.mode
is_on = self._unit.is_on
if not is_on:
return HVAC_MODE_OFF
return CM_TO_HA_STATE[mode]
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return self._hvac_modes
@property
def fan_mode(self):
"""Return the fan setting."""
return self._unit.fan_speed
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return FAN_MODES
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
_LOGGER.debug("Setting temp of %s to %s", self.unique_id, str(temp))
self._unit = await self._unit.set_thermostat(temp)
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
_LOGGER.debug("Setting fan mode of %s to %s", self.unique_id, fan_mode)
self._unit = await self._unit.set_fan_speed(fan_mode)
self.async_write_ha_state()
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
_LOGGER.debug("Setting operation mode of %s to %s", self.unique_id, hvac_mode)
if hvac_mode == HVAC_MODE_OFF:
await self.async_turn_off()
else:
self._unit = await self._unit.set_mode(HA_STATE_TO_CM[hvac_mode])
await self.async_turn_on()
async def async_turn_on(self):
"""Turn on."""
_LOGGER.debug("Turning %s on", self.unique_id)
self._unit = await self._unit.turn_on()
self.async_write_ha_state()
async def async_turn_off(self):
"""Turn off."""
_LOGGER.debug("Turning %s off", self.unique_id)
self._unit = await self._unit.turn_off()
self.async_write_ha_state()
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
import xmltodict
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "http://www.zillow.com/webservice/GetZestimate.htm"
ATTRIBUTION = "Data provided by Zillow.com"
CONF_ZPID = "zpid"
DEFAULT_NAME = "Zestimate"
NAME = "zestimate"
ZESTIMATE = f"{DEFAULT_NAME}:{NAME}"
ICON = "mdi:home-variant"
ATTR_AMOUNT = "amount"
ATTR_CHANGE = "amount_change_30_days"
ATTR_CURRENCY = "amount_currency"
ATTR_LAST_UPDATED = "amount_last_updated"
ATTR_VAL_HI = "valuation_range_high"
ATTR_VAL_LOW = "valuation_range_low"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ZPID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
SCAN_INTERVAL = timedelta(minutes=30)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Zestimate sensor."""
name = config.get(CONF_NAME)
properties = config[CONF_ZPID]
sensors = []
for zpid in properties:
params = {"zws-id": config[CONF_API_KEY]}
params["zpid"] = zpid
sensors.append(ZestimateDataSensor(name, params))
add_entities(sensors, True)
class ZestimateDataSensor(Entity):
"""Implementation of a Zestimate sensor."""
def __init__(self, name, params):
"""Initialize the sensor."""
self._name = name
self.params = params
self.data = None
self.address = None
self._state = None
@property
def unique_id(self):
"""Return the ZPID."""
return self.params["zpid"]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self.address}"
@property
def state(self):
"""Return the state of the sensor."""
try:
return round(float(self._state), 1)
except ValueError:
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
attributes = {}
if self.data is not None:
attributes = self.data
attributes["address"] = self.address
attributes[ATTR_ATTRIBUTION] = ATTRIBUTION
return attributes
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and update the states."""
try:
response = requests.get(_RESOURCE, params=self.params, timeout=5)
data = response.content.decode("utf-8")
data_dict = xmltodict.parse(data).get(ZESTIMATE)
error_code = int(data_dict["message"]["code"])
if error_code != 0:
_LOGGER.error("The API returned: %s", data_dict["message"]["text"])
return
except requests.exceptions.ConnectionError:
_LOGGER.error("Unable to retrieve data from %s", _RESOURCE)
return
data = data_dict["response"][NAME]
details = {}
if "amount" in data and data["amount"] is not None:
details[ATTR_AMOUNT] = data["amount"]["#text"]
details[ATTR_CURRENCY] = data["amount"]["@currency"]
if "last-updated" in data and data["last-updated"] is not None:
details[ATTR_LAST_UPDATED] = data["last-updated"]
if "valueChange" in data and data["valueChange"] is not None:
details[ATTR_CHANGE] = int(data["valueChange"]["#text"])
if "valuationRange" in data and data["valuationRange"] is not None:
details[ATTR_VAL_HI] = int(data["valuationRange"]["high"]["#text"])
details[ATTR_VAL_LOW] = int(data["valuationRange"]["low"]["#text"])
self.address = data_dict["response"]["address"]["street"]
self.data = details
if self.data is not None:
self._state = self.data[ATTR_AMOUNT]
else:
self._state = None
_LOGGER.error("Unable to parase Zestimate data from response")
|
from matchzoo.data_pack import DataPack
from .units import Vocabulary
from .build_unit_from_data_pack import build_unit_from_data_pack
from .units import BertVocabulary
def build_vocab_unit(
data_pack: DataPack,
mode: str = 'both',
verbose: int = 1
) -> Vocabulary:
"""
Build a :class:`preprocessor.units.Vocabulary` given `data_pack`.
The `data_pack` should be preprocessed forehand, and each item in
`text_left` and `text_right` columns of the `data_pack` should be a list
of tokens.
:param data_pack: The :class:`DataPack` to build vocabulary upon.
:param mode: One of 'left', 'right', and 'both', to determine the source
data for building the :class:`VocabularyUnit`.
:param verbose: Verbosity.
:return: A built vocabulary unit.
"""
return build_unit_from_data_pack(
unit=Vocabulary(),
data_pack=data_pack,
mode=mode,
flatten=True, verbose=verbose
)
def built_bert_vocab_unit(vocab_path: str) -> BertVocabulary:
"""
Build a :class:`preprocessor.units.BertVocabulary` given `vocab_path`.
:param vocab_path: bert vocabulary path.
:return: A built vocabulary unit.
"""
vocab_unit = BertVocabulary(pad_value='[PAD]', oov_value='[UNK]')
vocab_unit.fit(vocab_path)
return vocab_unit
|
import numpy as np
import matchzoo as mz
from matchzoo.data_generator.callbacks import Callback
class Histogram(Callback):
"""
Generate data with matching histogram.
:param embedding_matrix: The embedding matrix used to generator match
histogram.
:param bin_size: The number of bin size of the histogram.
:param hist_mode: The mode of the :class:`MatchingHistogramUnit`, one of
`CH`, `NH`, and `LCH`.
"""
def __init__(
self,
embedding_matrix: np.ndarray,
bin_size: int = 30,
hist_mode: str = 'CH',
):
"""Init."""
self._match_hist_unit = mz.preprocessors.units.MatchingHistogram(
bin_size=bin_size,
embedding_matrix=embedding_matrix,
normalize=True,
mode=hist_mode
)
def on_batch_unpacked(self, x, y):
"""Insert `match_histogram` to `x`."""
x['match_histogram'] = _build_match_histogram(x, self._match_hist_unit)
def _trunc_text(input_text: list, length: list) -> list:
"""
Truncating the input text according to the input length.
:param input_text: The input text need to be truncated.
:param length: The length used to truncated the text.
:return: The truncated text.
"""
return [row[:length[idx]] for idx, row in enumerate(input_text)]
def _build_match_histogram(
x: dict,
match_hist_unit: mz.preprocessors.units.MatchingHistogram
) -> np.ndarray:
"""
Generate the matching hisogram for input.
:param x: The input `dict`.
:param match_hist_unit: The histogram unit :class:`MatchingHistogramUnit`.
:return: The matching histogram.
"""
match_hist = []
text_left = x['text_left'].tolist()
text_right = _trunc_text(x['text_right'].tolist(),
x['length_right'].tolist())
for pair in zip(text_left, text_right):
match_hist.append(match_hist_unit.transform(list(pair)))
return np.asarray(match_hist)
|
import base64
import getpass
import logging
import bson
import pytest as pytest
from .. import arctic as m
from ..chunkstore.chunkstore import CHUNK_STORE_TYPE
from ..store.bitemporal_store import BitemporalStore
from ..tickstore.tickstore import TICK_STORE_TYPE
logger = logging.getLogger(__name__)
@pytest.fixture(scope="function")
def mongo_host(mongo_server):
return str(mongo_server.hostname) + ":" + str(mongo_server.port)
@pytest.fixture(scope="function")
def arctic(mongo_server):
logger.info('arctic.fixtures: arctic init()')
mongo_server.api.drop_database('arctic')
mongo_server.api.drop_database('arctic_{}'.format(getpass.getuser()))
arctic = m.Arctic(mongo_host=mongo_server.api)
# Do not add global libraries here: use specific fixtures below.
# Remember, for testing it does not usually matter what your libraries are called.
return arctic
# A arctic which allows reads to hit the secondary
@pytest.fixture(scope="function")
def arctic_secondary(mongo_server, arctic):
arctic = m.Arctic(mongo_host=mongo_server.api, allow_secondary=True)
return arctic
@pytest.fixture(scope="function")
def multicolumn_store_with_uncompressed_write(mongo_server):
"""
The database state created by this fixture is equivalent to the following operations using arctic 1.40
or previous:
arctic.initialize_library('arctic_test.TEST', m.VERSION_STORE, segment='month')
library = arctic.get_library('arctic_test.TEST')
df = pd.DataFrame([[1,2], [3,4]], index=['x','y'], columns=[['a','w'], ['a','v']])
library.write('pandas', df)
different from newer versions, the last write creates a uncompressed chunk.
"""
mongo_server.api.drop_database('arctic_test')
library_name = 'arctic_test.TEST'
arctic = m.Arctic(mongo_host=mongo_server.api)
arctic.initialize_library(library_name, m.VERSION_STORE, segment='month')
db = mongo_server.api.arctic_test
db.TEST.insert_many([
{
'parent': [bson.ObjectId('5ad0dc065c911d1188b512d8')],
'data': bson.Binary(b'\x11\x00\x00\x002x\x01\x00\x01\x00\x80\x02\x00\x00\x00\x00\x00\x00\x00', 0),
'symbol': 'pandas',
'sha': bson.Binary(b'\xaa\\`\x0e\xc2D-\xc1_\xf7\xfd\x12\xfa\xd2\x17\x05`\x00\x98\xe2', 0),
'compressed': True,
'_id': bson.ObjectId('5ad0dc067934ecad404070be'),
'segment': 0
},
{
'parent': [bson.ObjectId('5ad0dc065c911d1188b512d8')],
'data': bson.Binary(b'y\x03\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00', 0),
'symbol': 'pandas',
'sha': bson.Binary(b'\xfe=WQ\xb5\xfdL\xb7\xcavd\x85o\x04]\x04\xdb\xa8]3', 0),
'compressed': False,
'_id': bson.ObjectId('5ad0dc077934ecad404070bf'),
'segment': 1
}
])
db.TEST.ARCTIC.update_one({"_id": "ARCTIC_META"}, {"$set": {"_id": "ARCTIC_META", "TYPE": "VersionStore", "QUOTA": 10737418240}})
db.TEST.version_nums.insert_one({'symbol': 'pandas', '_id': bson.ObjectId('5ad0dc067934ecad404070bd'), 'version': 2})
db.TEST.versions.insert_many([
{
'append_count': 0,
'dtype_metadata': {
'index': ['index'],
'columns': ["('a', 'a')", "('w', 'v')"]
},
'segment_count': 1,
'dtype': '[(\'index\', \'S1\'), ("(\'a\', \'a\')", \'<i8\'), ("(\'w\', \'v\')", \'<i8\')]',
'symbol': 'pandas',
'up_to': 1,
'metadata': None,
'sha': bson.Binary(b'\xf2\x15h\x9d\x925\x95\xa5\x0e\x95J\xc4x\xfc\xfc\xd5\x80\xe0\x1d\xef', 0),
'shape': [-1],
'version': 1,
'base_sha': bson.Binary(b'\xf2\x15h\x9d\x925\x95\xa5\x0e\x95J\xc4x\xfc\xfc\xd5\x80\xe0\x1d\xef', 0),
'_id': bson.ObjectId('5ad0dc065c911d1188b512d8'),
'type': 'pandasdf',
'append_size': 0
},
{
'append_count': 1,
'dtype_metadata': {
'index': ['index'],
'columns': ["('a', 'a')", "('w', 'v')"]
},
'segment_count': 2,
'sha': bson.Binary(b'1\x83[ZO\xec\x080D\x80f\xe4@\xe4\xd3\x94yG\xe2\x08', 0),
'dtype': '[(\'index\', \'S1\'), ("(\'a\', \'a\')", \'<i8\'), ("(\'w\', \'v\')", \'<i8\')]',
'symbol': 'pandas',
'up_to': 2,
'metadata': None,
'base_version_id': bson.ObjectId('5ad0dc065c911d1188b512d8'),
'shape': [-1],
'version': 2,
'base_sha': bson.Binary(b'\xf2\x15h\x9d\x925\x95\xa5\x0e\x95J\xc4x\xfc\xfc\xd5\x80\xe0\x1d\xef', 0),
'_id': bson.ObjectId('5ad0dc075c911d1188b512d9'),
'type': 'pandasdf',
'append_size': 17
}
])
return {'symbol': 'pandas', 'store': arctic.get_library('arctic_test.TEST')}
@pytest.fixture(scope="function")
def ndarray_store_with_uncompressed_write(mongo_server):
"""
The database state created by this fixture is equivalent to the following operations using arctic 1.40
or previous:
arctic.initialize_library('arctic_test.TEST', m.VERSION_STORE, segment='month')
library = arctic.get_library('arctic_test.TEST')
arr = np.arange(2).astype([('abc', 'int64')])
library.write('MYARR', arr[:1])
library.write('MYARR', arr)
different from newer versions, the last write creates a uncompressed chunk.
"""
mongo_server.api.drop_database('arctic_test')
library_name = 'arctic_test.TEST'
arctic = m.Arctic(mongo_host=mongo_server.api)
arctic.initialize_library(library_name, m.VERSION_STORE, segment='month')
db = mongo_server.api.arctic_test
db.TEST.insert_many([
{
"_id": bson.ObjectId("5ad0742ca0949de6727cf994"),
"segment": 0,
"sha": bson.Binary(base64.b64decode("Fk+quqPVSDfaajYJkOAvnDyXtGQ="), 0),
"symbol": "MYARR",
"data": bson.Binary(base64.b64decode("CAAAAIAAAAAAAAAAAA=="), 0),
"compressed": True,
"parent": [bson.ObjectId("5ad0742c5c911d4d80ee2ea3")]
},
{
"_id": bson.ObjectId("5ad0742ca0949de6727cf995"),
"sha": bson.Binary(base64.b64decode("eqpp8VOJBttTz0j5H+QGtOQ+r44="), 0),
"symbol": "MYARR",
"segment": 1,
"data": bson.Binary(base64.b64decode("AQAAAAAAAAA="), 0),
"compressed": False,
"parent": [bson.ObjectId("5ad0742c5c911d4d80ee2ea3")]
}
])
db.TEST.ARCTIC.update_one({"_id": "ARCTIC_META"}, {"$set": {"_id": "ARCTIC_META", "TYPE": "VersionStore", "QUOTA": 10737418240}})
db.TEST.versions_nums.insert_one({"_id": bson.ObjectId("5ad0742ca0949de6727cf993"), "symbol": "MYARR", "version": 2})
db.TEST.versions.insert_many([
{
"_id": bson.ObjectId("5ad0742c5c911d4d80ee2ea3"),
"append_count": 0,
"dtype_metadata": {},
"segment_count": 1,
"dtype": "[('abc', '<i8')]",
"symbol": "MYARR",
"up_to": 1,
"append_size": 0,
"sha": bson.Binary(base64.b64decode("Bf5AV1MWbxJVWefJrFWGVPEHx+k="), 0),
"shape": [-1],
"version": 1,
"base_sha": bson.Binary(base64.b64decode("Bf5AV1MWbxJVWefJrFWGVPEHx+k="), 0),
"type": "ndarray",
"metadata": None
},
{
"_id": bson.ObjectId("5ad0742c5c911d4d80ee2ea4"),
"append_count": 1,
"dtype_metadata": {},
"segment_count": 2,
"base_version_id": bson.ObjectId("5ad0742c5c911d4d80ee2ea3"),
"dtype": "[('abc', '<i8')]",
"symbol": "MYARR",
"up_to": 2,
"append_size": 8,
"sha": bson.Binary(base64.b64decode("Ax7oBxVFw1/9wKog2gfOLjbOVD8="), 0),
"shape": [-1],
"version": 2,
"base_sha": bson.Binary(base64.b64decode("Bf5AV1MWbxJVWefJrFWGVPEHx+k="), 0),
"type": "ndarray",
"metadata": None
}
])
return {'symbol': 'MYARR', 'store': arctic.get_library('arctic_test.TEST')}
@pytest.fixture(scope="function")
def library_name():
return 'test.TEST'
@pytest.fixture(scope="function")
def user_library_name():
return "{}.TEST".format(getpass.getuser())
@pytest.fixture(scope="function")
def overlay_library_name():
return "test.OVERLAY"
@pytest.fixture(scope="function")
def library(arctic, library_name):
# Add a single test library
arctic.initialize_library(library_name, m.VERSION_STORE, segment='month')
return arctic.get_library(library_name)
@pytest.fixture(scope="function")
def bitemporal_library(arctic, library_name):
arctic.initialize_library(library_name, m.VERSION_STORE, segment='month')
return BitemporalStore(arctic.get_library(library_name))
@pytest.fixture(scope="function")
def library_secondary(arctic_secondary, library_name):
arctic_secondary.initialize_library(library_name, m.VERSION_STORE, segment='month')
return arctic_secondary.get_library(library_name)
@pytest.fixture(scope="function")
def user_library(arctic, user_library_name):
arctic.initialize_library(user_library_name, m.VERSION_STORE, segment='month')
return arctic.get_library(user_library_name)
@pytest.fixture(scope="function")
def overlay_library(arctic, overlay_library_name):
""" Overlay library fixture, returns a pair of libs, read-write: ${name} and read-only: ${name}_RAW
"""
# Call _create_overlay_library to avoid:
# RemovedInPytest4Warning: Fixture overlay_library called directly. Fixtures are not meant to be called directly
return _overlay_library(arctic, overlay_library)
def _overlay_library(arctic, overlay_library_name):
rw_name = overlay_library_name
ro_name = '{}_RAW'.format(overlay_library_name)
arctic.initialize_library(rw_name, m.VERSION_STORE, segment='year')
arctic.initialize_library(ro_name, m.VERSION_STORE, segment='year')
return arctic.get_library(rw_name), arctic.get_library(ro_name)
@pytest.fixture(scope="function")
def tickstore_lib(arctic, library_name):
# Call _create_overlay_library to avoid:
# RemovedInPytest4Warning: Fixture overlay_library called directly. Fixtures are not meant to be called directly
return _tickstore_lib(arctic, library_name)
def _tickstore_lib(arctic, library_name):
arctic.initialize_library(library_name, TICK_STORE_TYPE)
return arctic.get_library(library_name)
@pytest.fixture(scope="function")
def chunkstore_lib(arctic, library_name):
arctic.initialize_library(library_name, CHUNK_STORE_TYPE)
return arctic.get_library(library_name)
@pytest.fixture(scope="function")
def ms_lib(arctic, library_name):
arctic.initialize_library(library_name, m.METADATA_STORE)
return arctic.get_library(library_name)
|
import logging
from typing import Any, Callable, Dict, List, Optional
import attr
from huawei_lte_api.enums.cradle import ConnectionStatusEnum
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from . import HuaweiLteBaseEntity
from .const import (
DOMAIN,
KEY_MONITORING_CHECK_NOTIFICATIONS,
KEY_MONITORING_STATUS,
KEY_WLAN_WIFI_FEATURE_SWITCH,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up from config entry."""
router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]]
entities: List[Entity] = []
if router.data.get(KEY_MONITORING_STATUS):
entities.append(HuaweiLteMobileConnectionBinarySensor(router))
entities.append(HuaweiLteWifiStatusBinarySensor(router))
entities.append(HuaweiLteWifi24ghzStatusBinarySensor(router))
entities.append(HuaweiLteWifi5ghzStatusBinarySensor(router))
if router.data.get(KEY_MONITORING_CHECK_NOTIFICATIONS):
entities.append(HuaweiLteSmsStorageFullBinarySensor(router))
async_add_entities(entities, True)
@attr.s
class HuaweiLteBaseBinarySensor(HuaweiLteBaseEntity, BinarySensorEntity):
"""Huawei LTE binary sensor device base class."""
key: str
item: str
_raw_state: Optional[str] = attr.ib(init=False, default=None)
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return False
@property
def _device_unique_id(self) -> str:
return f"{self.key}.{self.item}"
async def async_added_to_hass(self) -> None:
"""Subscribe to needed data on add."""
await super().async_added_to_hass()
self.router.subscriptions[self.key].add(f"{BINARY_SENSOR_DOMAIN}/{self.item}")
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from needed data on remove."""
await super().async_will_remove_from_hass()
self.router.subscriptions[self.key].remove(
f"{BINARY_SENSOR_DOMAIN}/{self.item}"
)
async def async_update(self) -> None:
"""Update state."""
try:
value = self.router.data[self.key][self.item]
except KeyError:
value = None
_LOGGER.debug("%s[%s] not in data", self.key, self.item)
if value is None:
self._raw_state = value
self._available = False
else:
self._raw_state = str(value)
self._available = True
CONNECTION_STATE_ATTRIBUTES = {
str(ConnectionStatusEnum.CONNECTING): "Connecting",
str(ConnectionStatusEnum.DISCONNECTING): "Disconnecting",
str(ConnectionStatusEnum.CONNECT_FAILED): "Connect failed",
str(ConnectionStatusEnum.CONNECT_STATUS_NULL): "Status not available",
str(ConnectionStatusEnum.CONNECT_STATUS_ERROR): "Status error",
}
@attr.s
class HuaweiLteMobileConnectionBinarySensor(HuaweiLteBaseBinarySensor):
"""Huawei LTE mobile connection binary sensor."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_MONITORING_STATUS
self.item = "ConnectionStatus"
@property
def _entity_name(self) -> str:
return "Mobile connection"
@property
def is_on(self) -> bool:
"""Return whether the binary sensor is on."""
return bool(
self._raw_state
and int(self._raw_state)
in (ConnectionStatusEnum.CONNECTED, ConnectionStatusEnum.DISCONNECTING)
)
@property
def assumed_state(self) -> bool:
"""Return True if real state is assumed, not known."""
return not self._raw_state or int(self._raw_state) not in (
ConnectionStatusEnum.CONNECT_FAILED,
ConnectionStatusEnum.CONNECTED,
ConnectionStatusEnum.DISCONNECTED,
)
@property
def icon(self) -> str:
"""Return mobile connectivity sensor icon."""
return "mdi:signal" if self.is_on else "mdi:signal-off"
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return True
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Get additional attributes related to connection status."""
attributes = super().device_state_attributes
if self._raw_state in CONNECTION_STATE_ATTRIBUTES:
if attributes is None:
attributes = {}
attributes["additional_state"] = CONNECTION_STATE_ATTRIBUTES[
self._raw_state
]
return attributes
class HuaweiLteBaseWifiStatusBinarySensor(HuaweiLteBaseBinarySensor):
"""Huawei LTE WiFi status binary sensor base class."""
@property
def is_on(self) -> bool:
"""Return whether the binary sensor is on."""
return self._raw_state is not None and int(self._raw_state) == 1
@property
def assumed_state(self) -> bool:
"""Return True if real state is assumed, not known."""
return self._raw_state is None
@property
def icon(self) -> str:
"""Return WiFi status sensor icon."""
return "mdi:wifi" if self.is_on else "mdi:wifi-off"
@attr.s
class HuaweiLteWifiStatusBinarySensor(HuaweiLteBaseWifiStatusBinarySensor):
"""Huawei LTE WiFi status binary sensor."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_MONITORING_STATUS
self.item = "WifiStatus"
@property
def _entity_name(self) -> str:
return "WiFi status"
@attr.s
class HuaweiLteWifi24ghzStatusBinarySensor(HuaweiLteBaseWifiStatusBinarySensor):
"""Huawei LTE 2.4GHz WiFi status binary sensor."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_WLAN_WIFI_FEATURE_SWITCH
self.item = "wifi24g_switch_enable"
@property
def _entity_name(self) -> str:
return "2.4GHz WiFi status"
@attr.s
class HuaweiLteWifi5ghzStatusBinarySensor(HuaweiLteBaseWifiStatusBinarySensor):
"""Huawei LTE 5GHz WiFi status binary sensor."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_WLAN_WIFI_FEATURE_SWITCH
self.item = "wifi5g_enabled"
@property
def _entity_name(self) -> str:
return "5GHz WiFi status"
@attr.s
class HuaweiLteSmsStorageFullBinarySensor(HuaweiLteBaseBinarySensor):
"""Huawei LTE SMS storage full binary sensor."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_MONITORING_CHECK_NOTIFICATIONS
self.item = "SmsStorageFull"
@property
def _entity_name(self) -> str:
return "SMS storage full"
@property
def is_on(self) -> bool:
"""Return whether the binary sensor is on."""
return self._raw_state is not None and int(self._raw_state) != 0
@property
def assumed_state(self) -> bool:
"""Return True if real state is assumed, not known."""
return self._raw_state is None
@property
def icon(self) -> str:
"""Return WiFi status sensor icon."""
return "mdi:email-alert" if self.is_on else "mdi:email-off"
|
from collections import deque
import io
import logging
import time
import av
from .const import (
MAX_MISSING_DTS,
MAX_TIMESTAMP_GAP,
MIN_SEGMENT_DURATION,
PACKETS_TO_WAIT_FOR_AUDIO,
STREAM_TIMEOUT,
)
from .core import Segment, StreamBuffer
_LOGGER = logging.getLogger(__name__)
def create_stream_buffer(stream_output, video_stream, audio_stream, sequence):
"""Create a new StreamBuffer."""
segment = io.BytesIO()
container_options = (
stream_output.container_options(sequence)
if stream_output.container_options
else {}
)
output = av.open(
segment,
mode="w",
format=stream_output.format,
container_options={
"video_track_timescale": str(int(1 / video_stream.time_base)),
**container_options,
},
)
vstream = output.add_stream(template=video_stream)
# Check if audio is requested
astream = None
if audio_stream and audio_stream.name in stream_output.audio_codecs:
astream = output.add_stream(template=audio_stream)
return StreamBuffer(segment, output, vstream, astream)
def stream_worker(hass, stream, quit_event):
"""Handle consuming streams and restart keepalive streams."""
wait_timeout = 0
while not quit_event.wait(timeout=wait_timeout):
start_time = time.time()
try:
_stream_worker_internal(hass, stream, quit_event)
except av.error.FFmpegError: # pylint: disable=c-extension-no-member
_LOGGER.exception("Stream connection failed: %s", stream.source)
if not stream.keepalive or quit_event.is_set():
break
# To avoid excessive restarts, don't restart faster than once every 40 seconds.
wait_timeout = max(40 - (time.time() - start_time), 0)
_LOGGER.debug(
"Restarting stream worker in %d seconds: %s",
wait_timeout,
stream.source,
)
def _stream_worker_internal(hass, stream, quit_event):
"""Handle consuming streams."""
container = av.open(stream.source, options=stream.options, timeout=STREAM_TIMEOUT)
try:
video_stream = container.streams.video[0]
except (KeyError, IndexError):
_LOGGER.error("Stream has no video")
container.close()
return
try:
audio_stream = container.streams.audio[0]
except (KeyError, IndexError):
audio_stream = None
# These formats need aac_adtstoasc bitstream filter, but auto_bsf not
# compatible with empty_moov and manual bitstream filters not in PyAV
if container.format.name in {"hls", "mpegts"}:
audio_stream = None
# Some audio streams do not have a profile and throw errors when remuxing
if audio_stream and audio_stream.profile is None:
audio_stream = None
# Iterator for demuxing
container_packets = None
# The presentation timestamps of the first packet in each stream we receive
# Use to adjust before muxing or outputting, but we don't adjust internally
first_pts = {}
# The decoder timestamps of the latest packet in each stream we processed
last_dts = None
# Keep track of consecutive packets without a dts to detect end of stream.
missing_dts = 0
# Holds the buffers for each stream provider
outputs = None
# Keep track of the number of segments we've processed
sequence = 0
# The video pts at the beginning of the segment
segment_start_pts = None
# Because of problems 1 and 2 below, we need to store the first few packets and replay them
initial_packets = deque()
# Have to work around two problems with RTSP feeds in ffmpeg
# 1 - first frame has bad pts/dts https://trac.ffmpeg.org/ticket/5018
# 2 - seeking can be problematic https://trac.ffmpeg.org/ticket/7815
def peek_first_pts():
nonlocal first_pts, audio_stream, container_packets
missing_dts = 0
def empty_stream_dict():
return {
video_stream: None,
**({audio_stream: None} if audio_stream else {}),
}
try:
container_packets = container.demux((video_stream, audio_stream))
first_packet = empty_stream_dict()
first_pts = empty_stream_dict()
# Get to first video keyframe
while first_packet[video_stream] is None:
packet = next(container_packets)
if (
packet.dts is None
): # Allow MAX_MISSING_DTS packets with no dts, raise error on the next one
if missing_dts >= MAX_MISSING_DTS:
raise StopIteration(
f"Invalid data - got {MAX_MISSING_DTS+1} packets with missing DTS while initializing"
)
missing_dts += 1
continue
if packet.stream == video_stream and packet.is_keyframe:
first_packet[video_stream] = packet
initial_packets.append(packet)
# Get first_pts from subsequent frame to first keyframe
while any(
[pts is None for pts in {**first_packet, **first_pts}.values()]
) and (len(initial_packets) < PACKETS_TO_WAIT_FOR_AUDIO):
packet = next(container_packets)
if (
packet.dts is None
): # Allow MAX_MISSING_DTS packet with no dts, raise error on the next one
if missing_dts >= MAX_MISSING_DTS:
raise StopIteration(
f"Invalid data - got {MAX_MISSING_DTS+1} packets with missing DTS while initializing"
)
missing_dts += 1
continue
if (
first_packet[packet.stream] is None
): # actually video already found above so only for audio
if packet.is_keyframe:
first_packet[packet.stream] = packet
else: # Discard leading non-keyframes
continue
else: # This is the second frame to calculate first_pts from
if first_pts[packet.stream] is None:
first_pts[packet.stream] = packet.dts - packet.duration
first_packet[packet.stream].pts = first_pts[packet.stream]
first_packet[packet.stream].dts = first_pts[packet.stream]
initial_packets.append(packet)
if audio_stream and first_packet[audio_stream] is None:
_LOGGER.warning(
"Audio stream not found"
) # Some streams declare an audio stream and never send any packets
del first_pts[audio_stream]
audio_stream = None
except (av.AVError, StopIteration) as ex:
_LOGGER.error(
"Error demuxing stream while finding first packet: %s", str(ex)
)
finalize_stream()
return False
return True
def initialize_segment(video_pts):
"""Reset some variables and initialize outputs for each segment."""
nonlocal outputs, sequence, segment_start_pts
# Clear outputs and increment sequence
outputs = {}
sequence += 1
segment_start_pts = video_pts
for stream_output in stream.outputs.values():
if video_stream.name not in stream_output.video_codecs:
continue
buffer = create_stream_buffer(
stream_output, video_stream, audio_stream, sequence
)
outputs[stream_output.name] = (
buffer,
{video_stream: buffer.vstream, audio_stream: buffer.astream},
)
def mux_video_packet(packet):
# adjust pts and dts before muxing
packet.pts -= first_pts[video_stream]
packet.dts -= first_pts[video_stream]
# mux packets to each buffer
for buffer, output_streams in outputs.values():
# Assign the packet to the new stream & mux
packet.stream = output_streams[video_stream]
buffer.output.mux(packet)
def mux_audio_packet(packet):
# almost the same as muxing video but add extra check
# adjust pts and dts before muxing
packet.pts -= first_pts[audio_stream]
packet.dts -= first_pts[audio_stream]
for buffer, output_streams in outputs.values():
# Assign the packet to the new stream & mux
if output_streams.get(audio_stream):
packet.stream = output_streams[audio_stream]
buffer.output.mux(packet)
def finalize_stream():
if not stream.keepalive:
# End of stream, clear listeners and stop thread
for fmt in stream.outputs:
hass.loop.call_soon_threadsafe(stream.outputs[fmt].put, None)
if not peek_first_pts():
container.close()
return
last_dts = {k: v - 1 for k, v in first_pts.items()}
initialize_segment(first_pts[video_stream])
while not quit_event.is_set():
try:
if len(initial_packets) > 0:
packet = initial_packets.popleft()
else:
packet = next(container_packets)
if packet.dts is None:
# Allow MAX_MISSING_DTS consecutive packets without dts. Terminate the stream on the next one.
if missing_dts >= MAX_MISSING_DTS:
raise StopIteration(
f"No dts in {MAX_MISSING_DTS+1} consecutive packets"
)
missing_dts += 1
continue
missing_dts = 0
except (av.AVError, StopIteration) as ex:
_LOGGER.error("Error demuxing stream: %s", str(ex))
finalize_stream()
break
# Discard packet if dts is not monotonic
if packet.dts <= last_dts[packet.stream]:
if (
packet.time_base * (last_dts[packet.stream] - packet.dts)
> MAX_TIMESTAMP_GAP
):
_LOGGER.warning(
"Timestamp overflow detected: last dts %s, dts = %s, resetting stream",
last_dts[packet.stream],
packet.dts,
)
finalize_stream()
break
continue
# Check for end of segment
if packet.stream == video_stream and packet.is_keyframe:
segment_duration = (packet.pts - segment_start_pts) * packet.time_base
if segment_duration >= MIN_SEGMENT_DURATION:
# Save segment to outputs
for fmt, (buffer, _) in outputs.items():
buffer.output.close()
if stream.outputs.get(fmt):
hass.loop.call_soon_threadsafe(
stream.outputs[fmt].put,
Segment(
sequence,
buffer.segment,
segment_duration,
),
)
# Reinitialize
initialize_segment(packet.pts)
# Update last_dts processed
last_dts[packet.stream] = packet.dts
# mux packets
if packet.stream == video_stream:
mux_video_packet(packet) # mutates packet timestamps
else:
mux_audio_packet(packet) # mutates packet timestamps
# Close stream
for buffer, _ in outputs.values():
buffer.output.close()
container.close()
|
import json
from flask import Response
def build_json(result):
return Response(response=json.dumps(result),
status=200,
mimetype="application/json")
def sent_json(result):
return Response(response=result,
status=200,
mimetype="application/json")
def sent_ok():
return Response(response=json.dumps({"result": True}),
status=200,
mimetype="application/json")
def sent_plain_text(result):
return Response(response=result.strip(), status=200, mimetype="text")
|
from homeassistant.const import (
ATTR_ATTRIBUTION,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
LENGTH_KILOMETERS,
PERCENTAGE,
SPEED_MILES_PER_HOUR,
TEMP_CELSIUS,
UV_INDEX,
)
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
ATTRIBUTION,
CONDITION_CLASSES,
DOMAIN,
METOFFICE_COORDINATOR,
METOFFICE_DATA,
METOFFICE_NAME,
VISIBILITY_CLASSES,
VISIBILITY_DISTANCE_CLASSES,
)
ATTR_LAST_UPDATE = "last_update"
ATTR_SENSOR_ID = "sensor_id"
ATTR_SITE_ID = "site_id"
ATTR_SITE_NAME = "site_name"
# Sensor types are defined as:
# variable -> [0]title, [1]device_class, [2]units, [3]icon, [4]enabled_by_default
SENSOR_TYPES = {
"name": ["Station Name", None, None, "mdi:label-outline", False],
"weather": [
"Weather",
None,
None,
"mdi:weather-sunny", # but will adapt to current conditions
True,
],
"temperature": ["Temperature", DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS, None, True],
"feels_like_temperature": [
"Feels Like Temperature",
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
None,
False,
],
"wind_speed": [
"Wind Speed",
None,
SPEED_MILES_PER_HOUR,
"mdi:weather-windy",
True,
],
"wind_direction": ["Wind Direction", None, None, "mdi:compass-outline", False],
"wind_gust": ["Wind Gust", None, SPEED_MILES_PER_HOUR, "mdi:weather-windy", False],
"visibility": ["Visibility", None, None, "mdi:eye", False],
"visibility_distance": [
"Visibility Distance",
None,
LENGTH_KILOMETERS,
"mdi:eye",
False,
],
"uv": ["UV Index", None, UV_INDEX, "mdi:weather-sunny-alert", True],
"precipitation": [
"Probability of Precipitation",
None,
PERCENTAGE,
"mdi:weather-rainy",
True,
],
"humidity": ["Humidity", DEVICE_CLASS_HUMIDITY, PERCENTAGE, None, False],
}
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigType, async_add_entities
) -> None:
"""Set up the Met Office weather sensor platform."""
hass_data = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
MetOfficeCurrentSensor(entry.data, hass_data, sensor_type)
for sensor_type in SENSOR_TYPES
],
False,
)
class MetOfficeCurrentSensor(Entity):
"""Implementation of a Met Office current weather condition sensor."""
def __init__(self, entry_data, hass_data, sensor_type):
"""Initialize the sensor."""
self._data = hass_data[METOFFICE_DATA]
self._coordinator = hass_data[METOFFICE_COORDINATOR]
self._type = sensor_type
self._name = f"{hass_data[METOFFICE_NAME]} {SENSOR_TYPES[self._type][0]}"
self._unique_id = f"{SENSOR_TYPES[self._type][0]}_{self._data.latitude}_{self._data.longitude}"
self.metoffice_site_id = None
self.metoffice_site_name = None
self.metoffice_now = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique of the sensor."""
return self._unique_id
@property
def state(self):
"""Return the state of the sensor."""
value = None
if self._type == "visibility_distance" and hasattr(
self.metoffice_now, "visibility"
):
value = VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value)
if self._type == "visibility" and hasattr(self.metoffice_now, "visibility"):
value = VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value)
elif self._type == "weather" and hasattr(self.metoffice_now, self._type):
value = [
k
for k, v in CONDITION_CLASSES.items()
if self.metoffice_now.weather.value in v
][0]
elif hasattr(self.metoffice_now, self._type):
value = getattr(self.metoffice_now, self._type)
if not isinstance(value, int):
value = value.value
return value
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._type][2]
@property
def icon(self):
"""Return the icon for the entity card."""
value = SENSOR_TYPES[self._type][3]
if self._type == "weather":
value = self.state
if value is None:
value = "sunny"
elif value == "partlycloudy":
value = "partly-cloudy"
value = f"mdi:weather-{value}"
return value
@property
def device_class(self):
"""Return the device class of the sensor."""
return SENSOR_TYPES[self._type][1]
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_LAST_UPDATE: self.metoffice_now.date if self.metoffice_now else None,
ATTR_SENSOR_ID: self._type,
ATTR_SITE_ID: self.metoffice_site_id if self.metoffice_site_id else None,
ATTR_SITE_NAME: self.metoffice_site_name
if self.metoffice_site_name
else None,
}
async def async_added_to_hass(self) -> None:
"""Set up a listener and load data."""
self.async_on_remove(
self._coordinator.async_add_listener(self._update_callback)
)
self._update_callback()
async def async_update(self):
"""Schedule a custom update via the common entity update service."""
await self._coordinator.async_request_refresh()
@callback
def _update_callback(self) -> None:
"""Load data from integration."""
self.metoffice_site_id = self._data.site_id
self.metoffice_site_name = self._data.site_name
self.metoffice_now = self._data.now
self.async_write_ha_state()
@property
def should_poll(self) -> bool:
"""Entities do not individually poll."""
return False
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return SENSOR_TYPES[self._type][4]
@property
def available(self):
"""Return if state is available."""
return self.metoffice_site_id is not None and self.metoffice_now is not None
|
import copy
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import sample
from perfkitbenchmarker.dpb_service import BaseDpbService
BENCHMARK_NAME = 'dpb_cluster_boot_benchmark'
BENCHMARK_CONFIG = """
dpb_cluster_boot_benchmark:
description: Run dpb cluster boot on dataproc and emr
dpb_service:
service_type: dataproc
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-1
boot_disk_size: 50
AWS:
machine_type: m5.large
disk_spec:
GCP:
disk_type: nodisk
AWS:
disk_size: 1500
disk_type: gp2
worker_count: 2
"""
flags.DEFINE_enum('dpb_cluster_boot_fs_type', 'ephemeral',
['ephemeral', 'persistent'],
'File System to use in dpb cluster boot benchmark')
flags.DEFINE_enum(
'dpb_cluster_boot_fs', BaseDpbService.GCS_FS,
[BaseDpbService.GCS_FS, BaseDpbService.S3_FS, BaseDpbService.HDFS_FS],
'File System to use in the dpb cluster boot benchmark')
FLAGS = flags.FLAGS
SUPPORTED_DPB_BACKENDS = [dpb_service.DATAPROC, dpb_service.EMR]
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Args:
benchmark_config: Config needed to run the dpb cluster boot benchmark.
Raises:
perfkitbenchmarker.errors.Config.InvalidValue: On encountering invalid
configuration.
"""
dpb_service_type = benchmark_config.dpb_service.service_type
if dpb_service_type not in SUPPORTED_DPB_BACKENDS:
raise errors.Config.InvalidValue(
'Invalid backend for dpb cluster boot . Not in:{}'.format(
str(SUPPORTED_DPB_BACKENDS)))
def Prepare(benchmark_spec):
del benchmark_spec # Unused.
def Run(benchmark_spec):
"""Runs the dpb cluster boot benchmark.
The benchmark computes and publishes the time taken from the issuance of
cluster creation command till the cluster is ready to accept jobs for
processing.
Args:
benchmark_spec: Spec needed to run the dpb cluster boot benchmark
Returns:
A list of samples, comprised of the dpb cluster boot latency in seconds.
"""
results = [] # list of the samples that will be returned
dpb_service_instance = benchmark_spec.dpb_service
metadata = copy.copy(benchmark_spec.dpb_service.GetMetadata())
logging.info('metadata %s ', str(metadata))
logging.info('Resource create_start_time %s ',
str(dpb_service_instance.create_start_time))
logging.info('Resource resource_ready_time %s ',
str(dpb_service_instance.resource_ready_time))
create_time = (
dpb_service_instance.resource_ready_time -
dpb_service_instance.create_start_time)
logging.info('create_time %s ', str(create_time))
results.append(
sample.Sample('dpb_cluster_create_time', create_time, 'seconds',
metadata))
return results
def Cleanup(benchmark_spec):
"""Cleans up the dpb cluster boot benchmark.
Args:
benchmark_spec: Spec needed to run the dpb cluster boot benchmark
"""
del benchmark_spec # Unused.
|
from __future__ import division
import numpy as np
import chainer
import chainer.functions as F
from chainer import initializers
import chainer.links as L
from chainercv.links import Conv2DBNActiv
from chainercv.links.model.resnet.resblock import ResBlock
from chainercv.links import PickableSequentialChain
from chainercv import utils
# RGB order
# This is channel wise mean of mean image distributed at
# https://github.com/KaimingHe/deep-residual-networks
_imagenet_mean = np.array(
[123.15163084, 115.90288257, 103.0626238],
dtype=np.float32)[:, np.newaxis, np.newaxis]
class ResNet(PickableSequentialChain):
"""Base class for ResNet architecture.
This is a pickable sequential link.
The network can choose output layers from set of all
intermediate layers.
The attribute :obj:`pick` is the names of the layers that are going
to be picked by :meth:`__call__`.
The attribute :obj:`layer_names` is the names of all layers
that can be picked.
Examples:
>>> model = ResNet50()
# By default, __call__ returns a probability score (after Softmax).
>>> prob = model(imgs)
>>> model.pick = 'res5'
# This is layer res5
>>> res5 = model(imgs)
>>> model.pick = ['res5', 'fc6']
>>> # These are layers res5 and fc6.
>>> res5, fc6 = model(imgs)
.. seealso::
:class:`chainercv.links.model.PickableSequentialChain`
When :obj:`pretrained_model` is the path of a pre-trained chainer model
serialized as a :obj:`.npz` file in the constructor, this chain model
automatically initializes all the parameters with it.
When a string in the prespecified set is provided, a pretrained model is
loaded from weights distributed on the Internet.
The list of pretrained models supported are as follows:
* :obj:`imagenet`: Loads weights trained with ImageNet. \
When :obj:`arch=='he'`, the weights distributed \
at `Model Zoo \
<https://github.com/BVLC/caffe/wiki/Model-Zoo>`_ \
are used.
Args:
n_layer (int): The number of layers.
n_class (int): The number of classes. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the number of classes used to train the pretrained model
is used. Otherwise, the number of classes in ILSVRC 2012 dataset
is used.
pretrained_model (string): The destination of the pre-trained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
mean (numpy.ndarray): A mean value. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the mean value used to train the pretrained model is used.
Otherwise, the mean value calculated from ILSVRC 2012 dataset
is used.
initialW (callable): Initializer for the weights of
convolution kernels.
fc_kwargs (dict): Keyword arguments passed to initialize
the :class:`chainer.links.Linear`.
arch (string): If :obj:`fb`, use Facebook ResNet
architecture. When :obj:`he`, use the architecture presented
by `the original ResNet paper \
<https://arxiv.org/pdf/1512.03385.pdf>`_.
This option changes where to apply strided convolution.
The default value is :obj:`fb`.
"""
_blocks = {
50: [3, 4, 6, 3],
101: [3, 4, 23, 3],
152: [3, 8, 36, 3]
}
_models = {
'fb': {
50: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet50_imagenet_trained_2018_11_26.npz',
'cv2': True,
},
},
101: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet101_imagenet_trained_2018_11_26.npz',
'cv2': True,
},
},
152: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet152_imagenet_trained_2018_11_26.npz',
'cv2': True,
},
},
},
'he': {
50: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet50_imagenet_converted_2018_03_07.npz'
},
},
101: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet101_imagenet_converted_2018_03_07.npz'
},
},
152: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'resnet152_imagenet_converted_2018_03_07.npz'
},
}
}
}
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
if arch == 'fb':
stride_first = False
conv1_no_bias = True
elif arch == 'he':
stride_first = True
# Kaiming He uses bias only for ResNet50
conv1_no_bias = n_layer != 50
else:
raise ValueError('arch is expected to be one of [\'he\', \'fb\']')
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[arch][n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {'initialW': initialW, 'stride_first': stride_first}
super(ResNet, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=conv1_no_bias,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 64, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 128, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 512, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)
class ResNet50(ResNet):
"""ResNet-50 Network.
Please consult the documentation for :class:`ResNet`.
.. seealso::
:class:`chainercv.links.model.resnet.ResNet`
"""
def __init__(self, n_class=None, pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
super(ResNet50, self).__init__(
50, n_class, pretrained_model,
mean, initialW, fc_kwargs, arch)
class ResNet101(ResNet):
"""ResNet-101 Network.
Please consult the documentation for :class:`ResNet`.
.. seealso::
:class:`chainercv.links.model.resnet.ResNet`
"""
def __init__(self, n_class=None, pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
super(ResNet101, self).__init__(
101, n_class, pretrained_model,
mean, initialW, fc_kwargs, arch)
class ResNet152(ResNet):
"""ResNet-152 Network.
Please consult the documentation for :class:`ResNet`.
.. seealso::
:class:`chainercv.links.model.resnet.ResNet`
"""
def __init__(self, n_class=None, pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}, arch='fb'):
super(ResNet152, self).__init__(
152, n_class, pretrained_model,
mean, initialW, fc_kwargs, arch)
|
import json
import logging
import string
import threading
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.alicloud import util
FLAGS = flags.FLAGS
DISK_TYPE = {
disk.STANDARD: 'cloud',
disk.REMOTE_SSD: 'cloud_ssd',
disk.PIOPS: 'cloud_efficiency',
disk.LOCAL: 'ephemeral_ssd',
disk.REMOTE_ESSD: 'cloud_essd',
}
class AliDisk(disk.BaseDisk):
"""Object representing an AliCloud Disk."""
_lock = threading.Lock()
vm_devices = {}
def __init__(self, disk_spec, zone):
super(AliDisk, self).__init__(disk_spec)
self.id = None
self.zone = zone
self.region = util.GetRegionByZone(self.zone)
self.attached_vm_id = None
def _Create(self):
"""Creates the disk."""
create_cmd = util.ALI_PREFIX + [
'ecs',
'CreateDisk',
'--RegionId %s' % self.region,
'--ZoneId %s' % self.zone,
'--Size %s' % self.disk_size,
'--DiskCategory %s' % DISK_TYPE[self.disk_type]]
create_cmd = util.GetEncodedCmd(create_cmd)
stdout, _, _ = vm_util.IssueCommand(create_cmd, raise_on_failure=False)
response = json.loads(stdout)
self.id = response['DiskId']
def _Delete(self):
"""Deletes the disk."""
delete_cmd = util.ALI_PREFIX + [
'ecs',
'DeleteDisk',
'--DiskId %s' % self.id]
logging.info('Deleting AliCloud disk %s. This may fail if the disk is not '
'yet detached, but will be retried.', self.id)
delete_cmd = util.GetEncodedCmd(delete_cmd)
vm_util.IssueRetryableCommand(delete_cmd)
def Attach(self, vm):
"""Attaches the disk to a VM.
Args:
vm: The AliVirtualMachine instance to which the disk will be attached.
"""
with self._lock:
self.attached_vm_id = vm.id
if self.attached_vm_id not in AliDisk.vm_devices:
AliDisk.vm_devices[self.attached_vm_id] = set(
string.ascii_lowercase[1:])
self.device_letter = min(AliDisk.vm_devices[self.attached_vm_id])
AliDisk.vm_devices[self.attached_vm_id].remove(self.device_letter)
attach_cmd = util.ALI_PREFIX + [
'ecs',
'AttachDisk',
'--InstanceId %s' % self.attached_vm_id,
'--DiskId %s' % self.id,
'--Device %s' % self.GetVirtualDevicePath()]
attach_cmd = util.GetEncodedCmd(attach_cmd)
vm_util.IssueRetryableCommand(attach_cmd)
def Detach(self):
"""Detaches the disk from a VM."""
detach_cmd = util.ALI_PREFIX + [
'ecs',
'DetachDisk',
'--InstanceId %s' % self.attached_vm_id,
'--DiskId %s' % self.id]
detach_cmd = util.GetEncodedCmd(detach_cmd)
vm_util.IssueRetryableCommand(detach_cmd)
with self._lock:
assert self.attached_vm_id in AliDisk.vm_devices
AliDisk.vm_devices[self.attached_vm_id].add(self.device_letter)
self.attached_vm_id = None
self.device_letter = None
def GetDevicePath(self):
"""Returns the path to the device inside the VM."""
return '/dev/vd%s' % self.device_letter
def GetVirtualDevicePath(self):
"""Returns the path to the device visible to console users."""
return '/dev/xvd%s' % self.device_letter
@vm_util.Retry(poll_interval=5, max_retries=30, log_errors=False)
def WaitForDiskStatus(self, status_list):
"""Waits until disk is attach to the instance"""
logging.info('Waits until the disk\'s status is one of statuses: %s',
status_list)
describe_cmd = util.ALI_PREFIX + [
'ecs',
'DescribeDisks',
'--RegionId %s' % self.region,
'--ZoneId %s' % self.zone,
'--DiskIds \'["%s"]\'' % self.id]
attach_cmd = util.GetEncodedCmd(describe_cmd)
stdout, _ = vm_util.IssueRetryableCommand(attach_cmd)
response = json.loads(stdout)
disk = response['Disks']['Disk']
assert len(disk) == 1
status = disk[0]['Status']
assert status in status_list
|
import sys
import urwid
class DialogExit(Exception):
pass
class DialogDisplay:
palette = [
('body','black','light gray', 'standout'),
('border','black','dark blue'),
('shadow','white','black'),
('selectable','black', 'dark cyan'),
('focus','white','dark blue','bold'),
('focustext','light gray','dark blue'),
]
def __init__(self, text, height, width, body=None):
width = int(width)
if width <= 0:
width = ('relative', 80)
height = int(height)
if height <= 0:
height = ('relative', 80)
self.body = body
if body is None:
# fill space with nothing
body = urwid.Filler(urwid.Divider(),'top')
self.frame = urwid.Frame( body, focus_part='footer')
if text is not None:
self.frame.header = urwid.Pile( [urwid.Text(text),
urwid.Divider()] )
w = self.frame
# pad area around listbox
w = urwid.Padding(w, ('fixed left',2), ('fixed right',2))
w = urwid.Filler(w, ('fixed top',1), ('fixed bottom',1))
w = urwid.AttrWrap(w, 'body')
# "shadow" effect
w = urwid.Columns( [w,('fixed', 2, urwid.AttrWrap(
urwid.Filler(urwid.Text(('border',' ')), "top")
,'shadow'))])
w = urwid.Frame( w, footer =
urwid.AttrWrap(urwid.Text(('border',' ')),'shadow'))
# outermost border area
w = urwid.Padding(w, 'center', width )
w = urwid.Filler(w, 'middle', height )
w = urwid.AttrWrap( w, 'border' )
self.view = w
def add_buttons(self, buttons):
l = []
for name, exitcode in buttons:
b = urwid.Button( name, self.button_press )
b.exitcode = exitcode
b = urwid.AttrWrap( b, 'selectable','focus' )
l.append( b )
self.buttons = urwid.GridFlow(l, 10, 3, 1, 'center')
self.frame.footer = urwid.Pile( [ urwid.Divider(),
self.buttons ], focus_item = 1)
def button_press(self, button):
raise DialogExit(button.exitcode)
def main(self):
self.loop = urwid.MainLoop(self.view, self.palette)
try:
self.loop.run()
except DialogExit as e:
return self.on_exit( e.args[0] )
def on_exit(self, exitcode):
return exitcode, ""
class InputDialogDisplay(DialogDisplay):
def __init__(self, text, height, width):
self.edit = urwid.Edit()
body = urwid.ListBox(urwid.SimpleListWalker([self.edit]))
body = urwid.AttrWrap(body, 'selectable','focustext')
DialogDisplay.__init__(self, text, height, width, body)
self.frame.set_focus('body')
def unhandled_key(self, size, k):
if k in ('up','page up'):
self.frame.set_focus('body')
if k in ('down','page down'):
self.frame.set_focus('footer')
if k == 'enter':
# pass enter to the "ok" button
self.frame.set_focus('footer')
self.view.keypress( size, k )
def on_exit(self, exitcode):
return exitcode, self.edit.get_edit_text()
class TextDialogDisplay(DialogDisplay):
def __init__(self, file, height, width):
l = []
# read the whole file (being slow, not lazy this time)
for line in open(file).readlines():
l.append( urwid.Text( line.rstrip() ))
body = urwid.ListBox(urwid.SimpleListWalker(l))
body = urwid.AttrWrap(body, 'selectable','focustext')
DialogDisplay.__init__(self, None, height, width, body)
def unhandled_key(self, size, k):
if k in ('up','page up','down','page down'):
self.frame.set_focus('body')
self.view.keypress( size, k )
self.frame.set_focus('footer')
class ListDialogDisplay(DialogDisplay):
def __init__(self, text, height, width, constr, items, has_default):
j = []
if has_default:
k, tail = 3, ()
else:
k, tail = 2, ("no",)
while items:
j.append( items[:k] + tail )
items = items[k:]
l = []
self.items = []
for tag, item, default in j:
w = constr( tag, default=="on" )
self.items.append(w)
w = urwid.Columns( [('fixed', 12, w),
urwid.Text(item)], 2 )
w = urwid.AttrWrap(w, 'selectable','focus')
l.append(w)
lb = urwid.ListBox(urwid.SimpleListWalker(l))
lb = urwid.AttrWrap( lb, "selectable" )
DialogDisplay.__init__(self, text, height, width, lb )
self.frame.set_focus('body')
def unhandled_key(self, size, k):
if k in ('up','page up'):
self.frame.set_focus('body')
if k in ('down','page down'):
self.frame.set_focus('footer')
if k == 'enter':
# pass enter to the "ok" button
self.frame.set_focus('footer')
self.buttons.set_focus(0)
self.view.keypress( size, k )
def on_exit(self, exitcode):
"""Print the tag of the item selected."""
if exitcode != 0:
return exitcode, ""
s = ""
for i in self.items:
if i.get_state():
s = i.get_label()
break
return exitcode, s
class CheckListDialogDisplay(ListDialogDisplay):
def on_exit(self, exitcode):
"""
Mimic dialog(1)'s --checklist exit.
Put each checked item in double quotes with a trailing space.
"""
if exitcode != 0:
return exitcode, ""
l = []
for i in self.items:
if i.get_state():
l.append(i.get_label())
return exitcode, "".join(['"'+tag+'" ' for tag in l])
class MenuItem(urwid.Text):
"""A custom widget for the --menu option"""
def __init__(self, label):
urwid.Text.__init__(self, label)
self.state = False
def selectable(self):
return True
def keypress(self,size,key):
if key == "enter":
self.state = True
raise DialogExit(0)
return key
def mouse_event(self,size,event,button,col,row,focus):
if event=='mouse release':
self.state = True
raise DialogExit(0)
return False
def get_state(self):
return self.state
def get_label(self):
text, attr = self.get_text()
return text
def do_checklist(text, height, width, list_height, *items):
def constr(tag, state):
return urwid.CheckBox(tag, state)
d = CheckListDialogDisplay( text, height, width, constr, items, True)
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_inputbox(text, height, width):
d = InputDialogDisplay( text, height, width )
d.add_buttons([ ("Exit", 0) ])
return d
def do_menu(text, height, width, menu_height, *items):
def constr(tag, state ):
return MenuItem(tag)
d = ListDialogDisplay(text, height, width, constr, items, False)
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_msgbox(text, height, width):
d = DialogDisplay( text, height, width )
d.add_buttons([ ("OK", 0) ])
return d
def do_radiolist(text, height, width, list_height, *items):
radiolist = []
def constr(tag, state, radiolist=radiolist):
return urwid.RadioButton(radiolist, tag, state)
d = ListDialogDisplay( text, height, width, constr, items, True )
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_textbox(file, height, width):
d = TextDialogDisplay( file, height, width )
d.add_buttons([ ("Exit", 0) ])
return d
def do_yesno(text, height, width):
d = DialogDisplay( text, height, width )
d.add_buttons([ ("Yes", 0), ("No", 1) ])
return d
MODES={ '--checklist': (do_checklist,
"text height width list-height [ tag item status ] ..."),
'--inputbox': (do_inputbox,
"text height width"),
'--menu': (do_menu,
"text height width menu-height [ tag item ] ..."),
'--msgbox': (do_msgbox,
"text height width"),
'--radiolist': (do_radiolist,
"text height width list-height [ tag item status ] ..."),
'--textbox': (do_textbox,
"file height width"),
'--yesno': (do_yesno,
"text height width"),
}
def show_usage():
"""
Display a helpful usage message.
"""
modelist = [(mode, help) for (mode, (fn, help)) in MODES.items()]
modelist.sort()
sys.stdout.write(
__doc__ +
"\n".join(["%-15s %s"%(mode,help) for (mode,help) in modelist])
+ """
height and width may be set to 0 to auto-size.
list-height and menu-height are currently ignored.
status may be either on or off.
""" )
def main():
if len(sys.argv) < 2 or sys.argv[1] not in MODES:
show_usage()
return
# Create a DialogDisplay instance
fn, help = MODES[sys.argv[1]]
d = fn( * sys.argv[2:] )
# Run it
exitcode, exitstring = d.main()
# Exit
if exitstring:
sys.stderr.write(exitstring+"\n")
sys.exit(exitcode)
if __name__=="__main__":
main()
|
import pytest
import voluptuous as vol
import homeassistant.components.media_player as mp
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_ENTITY_PICTURE,
ATTR_SUPPORTED_FEATURES,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.helpers.aiohttp_client import DATA_CLIENTSESSION
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
TEST_ENTITY_ID = "media_player.walkman"
@pytest.fixture(name="mock_media_seek")
def media_player_media_seek_fixture():
"""Mock demo YouTube player media seek."""
with patch(
"homeassistant.components.demo.media_player.DemoYoutubePlayer.media_seek",
autospec=True,
) as seek:
yield seek
async def test_source_select(hass):
"""Test the input source service."""
entity_id = "media_player.lounge_room"
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd"
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: None},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd"
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: "xbox"},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "xbox"
async def test_repeat_set(hass):
"""Test the repeat set service."""
entity_id = "media_player.walkman"
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_OFF
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_REPEAT_SET,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_MEDIA_REPEAT: mp.const.REPEAT_MODE_ALL},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_ALL
async def test_clear_playlist(hass):
"""Test clear playlist."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_CLEAR_PLAYLIST,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF
async def test_volume_services(hass):
"""Test the volume service."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: None},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: 0.5},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_DOWN,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.4
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_UP,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_MUTE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: None},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_MUTE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: True},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is True
async def test_turning_off_and_on(hass):
"""Test turn_on and turn_off."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF
assert not mp.is_on(hass, TEST_ENTITY_ID)
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TURN_ON,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
assert mp.is_on(hass, TEST_ENTITY_ID)
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TOGGLE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF
assert not mp.is_on(hass, TEST_ENTITY_ID)
async def test_playing_pausing(hass):
"""Test media_pause."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PAUSED
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PAUSED
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
async def test_prev_next_track(hass):
"""Test media_next_track and media_previous_track ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 1
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 3
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PREVIOUS_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
ent_id = "media_player.lounge_room"
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: ent_id},
blocking=True,
)
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 2
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PREVIOUS_TRACK,
{ATTR_ENTITY_ID: ent_id},
blocking=True,
)
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1
async def test_play_media(hass):
"""Test play_media ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
ent_id = "media_player.living_room"
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) is not None
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_PLAY_MEDIA,
{ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_CONTENT_ID: "some_id"},
blocking=True,
)
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) != "some_id"
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: ent_id,
mp.ATTR_MEDIA_CONTENT_TYPE: "youtube",
mp.ATTR_MEDIA_CONTENT_ID: "some_id",
},
blocking=True,
)
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) == "some_id"
async def test_seek(hass, mock_media_seek):
"""Test seek."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
ent_id = "media_player.living_room"
state = hass.states.get(ent_id)
assert state.attributes[ATTR_SUPPORTED_FEATURES] & mp.SUPPORT_SEEK
assert not mock_media_seek.called
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_SEEK,
{
ATTR_ENTITY_ID: ent_id,
mp.ATTR_MEDIA_SEEK_POSITION: None,
},
blocking=True,
)
assert not mock_media_seek.called
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_SEEK,
{
ATTR_ENTITY_ID: ent_id,
mp.ATTR_MEDIA_SEEK_POSITION: 100,
},
blocking=True,
)
assert mock_media_seek.called
async def test_media_image_proxy(hass, hass_client):
"""Test the media server image proxy server ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
fake_picture_data = "test.test"
class MockResponse:
"""Test response."""
def __init__(self):
"""Test response init."""
self.status = 200
self.headers = {"Content-Type": "sometype"}
async def read(self):
"""Test response read."""
return fake_picture_data.encode("ascii")
async def release(self):
"""Test response release."""
class MockWebsession:
"""Test websession."""
async def get(self, url):
"""Test websession get."""
return MockResponse()
def detach(self):
"""Test websession detach."""
hass.data[DATA_CLIENTSESSION] = MockWebsession()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
client = await hass_client()
req = await client.get(state.attributes.get(ATTR_ENTITY_PICTURE))
assert req.status == 200
assert await req.text() == fake_picture_data
|
import logging
from kalliope.core.ConfigurationManager import BrainLoader
from kalliope.core.Cortex import Cortex
from kalliope.core.HookManager import HookManager
from kalliope.core.Lifo.LifoManager import LifoManager
from kalliope.core.Models.MatchedSynapse import MatchedSynapse
from kalliope.core.OrderAnalyser import OrderAnalyser
logging.basicConfig()
logger = logging.getLogger("kalliope")
class SynapseNameNotFound(Exception):
"""
The Synapse has not been found
.. seealso: Synapse
"""
pass
class SynapseLauncher(object):
@classmethod
def start_synapse_by_list_name(cls, list_name, brain=None, overriding_parameter_dict=None, new_lifo=False):
"""
Start synapses by their name
:param list_name: List of name of the synapse to launch
:param brain: Brain instance
:param overriding_parameter_dict: parameter to pass to neurons
:param new_lifo: If True, ask the LifoManager to return a new lifo and not the singleton
"""
logger.debug("[SynapseLauncher] start_synapse_by_list_name called with synapse list: %s " % list_name)
if list_name:
if brain is None:
brain = BrainLoader().brain
if overriding_parameter_dict:
# this dict is used by signals to pass parameter to neuron,
# save in temp memory in case the user want to save in kalliope memory
Cortex.add_parameters_from_order(overriding_parameter_dict)
# get all synapse object
list_synapse_object_to_start = list()
for name in list_name:
synapse_to_start = brain.get_synapse_by_name(synapse_name=name)
if not synapse_to_start:
raise SynapseNameNotFound("[SynapseLauncher] The synapse name \"%s\" does not exist "
"in the brain file" % name)
if synapse_to_start.enabled:
list_synapse_object_to_start.append(synapse_to_start)
else: logger.debug("[SynapseLauncher] Synapse not activated: %s " % synapse_to_start)
# run the LIFO with all synapse
if new_lifo:
lifo_buffer = LifoManager.get_new_lifo()
else:
lifo_buffer = LifoManager.get_singleton_lifo()
list_synapse_to_process = list()
for synapse in list_synapse_object_to_start:
if synapse is not None:
new_matching_synapse = MatchedSynapse(matched_synapse=synapse,
matched_order=None,
user_order=None,
overriding_parameter=overriding_parameter_dict)
list_synapse_to_process.append(new_matching_synapse)
lifo_buffer.add_synapse_list_to_lifo(list_synapse_to_process)
return lifo_buffer.execute(is_api_call=True)
return None
@classmethod
def run_matching_synapse_from_order(cls, order_to_process, brain, settings, is_api_call=False):
"""
:param order_to_process: the spoken order sent by the user
:param brain: Brain object
:param settings: Settings object
:param is_api_call: if True, the current call come from the API. This info must be known by launched Neuron
:return: list of matched synapse
"""
# get our singleton LIFO
lifo_buffer = LifoManager.get_singleton_lifo()
# if the LIFO is not empty, so, the current order is passed to the current processing synapse as an answer
if len(lifo_buffer.lifo_list) > 0:
# the LIFO is not empty, this is an answer to a previous call
return lifo_buffer.execute(answer=order_to_process, is_api_call=is_api_call)
else: # the LIFO is empty, this is a new call
# get a list of matched synapse from the order
list_synapse_to_process = OrderAnalyser.get_matching_synapse(order=order_to_process, brain=brain)
if not list_synapse_to_process: # the order analyser returned us an empty list
return HookManager.on_order_not_found()
else:
HookManager.on_order_found()
lifo_buffer.add_synapse_list_to_lifo(list_synapse_to_process)
lifo_buffer.api_response.user_order = order_to_process
execdata = lifo_buffer.execute(is_api_call=is_api_call)
HookManager.on_processed_synapses()
return execdata
|
import unittest
import numpy as np
import chainer
from chainer.backends import cuda
from chainer.functions import relu
from chainer import testing
from chainer.testing import attr
from chainercv.links import Conv2DActiv
def _add_one(x):
return x + 1
@testing.parameterize(*testing.product({
'dilate': [1, 2],
'args_style': ['explicit', 'None', 'omit'],
'activ': ['relu', 'add_one', None]
}))
class TestConv2DActiv(unittest.TestCase):
in_channels = 1
out_channels = 1
ksize = 3
stride = 1
pad = 1
def setUp(self):
if self.activ == 'relu':
activ = relu
elif self.activ == 'add_one':
activ = _add_one
elif self.activ is None:
activ = None
self.x = np.random.uniform(
-1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
self.gy = np.random.uniform(
-1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)
# Convolution is the identity function.
initialW = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
dtype=np.float32).reshape((1, 1, 3, 3))
initial_bias = 0
if self.args_style == 'explicit':
self.l = Conv2DActiv(
self.in_channels, self.out_channels, self.ksize,
self.stride, self.pad, self.dilate,
initialW=initialW, initial_bias=initial_bias,
activ=activ)
elif self.args_style == 'None':
self.l = Conv2DActiv(
None, self.out_channels, self.ksize, self.stride, self.pad,
self.dilate, initialW=initialW, initial_bias=initial_bias,
activ=activ)
elif self.args_style == 'omit':
self.l = Conv2DActiv(
self.out_channels, self.ksize, stride=self.stride,
pad=self.pad, dilate=self.dilate, initialW=initialW,
initial_bias=initial_bias, activ=activ)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = self.l(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, self.l.xp.ndarray)
if self.dilate == 1:
_x_data = x_data
elif self.dilate == 2:
_x_data = x_data[:, :, 1:-1, 1:-1]
if self.activ == 'relu':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), np.maximum(cuda.to_cpu(_x_data), 0),
decimal=6
)
elif self.activ == 'add_one':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data) + 1,
decimal=6
)
elif self.activ is None:
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data),
decimal=6)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.l(x)
if self.dilate == 1:
y.grad = y_grad
elif self.dilate == 2:
y.grad = y_grad[:, :, 1:-1, 1:-1]
y.backward()
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.l.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
import numpy as np
def check_indices(indices):
"""Check indices parameter."""
if not isinstance(indices, tuple) or len(indices) != 2:
raise ValueError('indices must be a tuple of length 2')
if len(indices[0]) != len(indices[1]):
raise ValueError('Index arrays indices[0] and indices[1] must '
'have the same length')
return indices
def seed_target_indices(seeds, targets):
"""Generate indices parameter for seed based connectivity analysis.
Parameters
----------
seeds : array of int | int
Seed indices.
targets : array of int | int
Indices of signals for which to compute connectivity.
Returns
-------
indices : tuple of array
The indices parameter used for connectivity computation.
"""
# make them arrays
seeds = np.asarray((seeds,)).ravel()
targets = np.asarray((targets,)).ravel()
n_seeds = len(seeds)
n_targets = len(targets)
indices = (np.concatenate([np.tile(i, n_targets) for i in seeds]),
np.tile(targets, n_seeds))
return indices
def degree(connectivity, threshold_prop=0.2):
"""Compute the undirected degree of a connectivity matrix.
Parameters
----------
connectivity : ndarray, shape (n_nodes, n_nodes)
The connectivity matrix.
threshold_prop : float
The proportion of edges to keep in the graph before
computing the degree. The value should be between 0
and 1.
Returns
-------
degree : ndarray, shape (n_nodes,)
The computed degree.
Notes
-----
During thresholding, the symmetry of the connectivity matrix is
auto-detected based on :func:`numpy.allclose` of it with its transpose.
"""
connectivity = np.array(connectivity)
if connectivity.ndim != 2 or \
connectivity.shape[0] != connectivity.shape[1]:
raise ValueError('connectivity must be have shape (n_nodes, n_nodes), '
'got %s' % (connectivity.shape,))
n_nodes = len(connectivity)
if np.allclose(connectivity, connectivity.T):
split = 2.
connectivity[np.tril_indices(n_nodes)] = 0
else:
split = 1.
threshold_prop = float(threshold_prop)
if not 0 < threshold_prop <= 1:
raise ValueError('threshold must be 0 <= threshold < 1, got %s'
% (threshold_prop,))
degree = connectivity.ravel() # no need to copy because np.array does
degree[::n_nodes + 1] = 0.
n_keep = int(round((degree.size - len(connectivity)) *
threshold_prop / split))
degree[np.argsort(degree)[:-n_keep]] = 0
degree.shape = connectivity.shape
if split == 2:
degree += degree.T # normally unsafe, but we know where our zeros are
degree = np.sum(degree > 0, axis=0)
return degree
|
import diamond.collector
import json
import urllib2
METRICS_KEYS = ['sendPkgRate',
'recvPkgRate',
'sendAppendRequestCnt',
'recvAppendRequestCnt',
'sendBandwidthRate',
'recvBandwidthRate']
class EtcdCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(EtcdCollector,
self).get_default_config_help()
config_help.update({
'host': 'Hostname',
'port': 'Port (default is 2379)',
'timeout': 'Timeout per HTTP(s) call',
'use_tls': 'Use TLS/SSL or just unsecure (default is unsecure)',
'ca_file': 'Only applies when use_tls=true. Path to CA certificate'
' file to use for server identity verification',
})
return config_help
def get_default_config(self):
config = super(EtcdCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 2379,
'path': 'etcd',
'timeout': 5,
'use_tls': False,
'ca_file': '',
})
return config
def __init__(self, *args, **kwargs):
super(EtcdCollector, self).__init__(*args, **kwargs)
def collect(self):
self.collect_self_metrics()
self.collect_store_metrics()
def collect_self_metrics(self):
metrics = self.get_self_metrics()
if 'state' in metrics and metrics['state'] == "StateLeader":
self.publish("self.is_leader", 1)
else:
self.publish("self.is_leader", 0)
for k in METRICS_KEYS:
if k not in metrics:
continue
v = metrics[k]
key = self.clean_up(k)
self.publish("self.%s" % key, v)
def collect_store_metrics(self):
metrics = self.get_store_metrics()
for k, v in metrics.iteritems():
key = self.clean_up(k)
self.publish("store.%s" % key, v)
def get_self_metrics(self):
return self.get_metrics("self")
def get_store_metrics(self):
return self.get_metrics("store")
def get_metrics(self, category):
try:
opts = {
'timeout': int(self.config['timeout']),
}
if self.config['use_tls']:
protocol = "https"
opts['cafile'] = self.config['ca_file']
else:
protocol = "http"
url = "%s://%s:%s/v2/stats/%s" % (protocol, self.config['host'],
self.config['port'], category)
return json.load(urllib2.urlopen(url, **opts))
except (urllib2.HTTPError, ValueError) as err:
self.log.error('Unable to read JSON response: %s' % err)
return {}
def clean_up(self, text):
return text.replace('/', '.')
|
import copy as cp
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_equal)
import pytest
from scipy import linalg
from mne import (compute_proj_epochs, compute_proj_evoked, compute_proj_raw,
pick_types, read_events, Epochs, sensitivity_map,
read_source_estimate, compute_raw_covariance, create_info,
read_forward_solution, convert_forward_solution)
from mne.cov import regularize, compute_whitener
from mne.datasets import testing
from mne.io import read_raw_fif, RawArray
from mne.io.proj import (make_projector, activate_proj,
_needs_eeg_average_ref_proj)
from mne.preprocessing import maxwell_filter
from mne.proj import (read_proj, write_proj, make_eeg_average_ref_proj,
_has_eeg_average_ref_proj)
from mne.rank import _compute_rank_int
from mne.utils import run_tests_if_main
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_fname = op.join(base_dir, 'test-eve.fif')
proj_fname = op.join(base_dir, 'test-proj.fif')
proj_gz_fname = op.join(base_dir, 'test-proj.fif.gz')
bads_fname = op.join(base_dir, 'test_bads.txt')
sample_path = op.join(testing.data_path(download=False), 'MEG', 'sample')
fwd_fname = op.join(sample_path, 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
sensmap_fname = op.join(sample_path,
'sample_audvis_trunc-%s-oct-4-fwd-sensmap-%s.w')
eog_fname = op.join(sample_path, 'sample_audvis_eog-proj.fif')
ecg_fname = op.join(sample_path, 'sample_audvis_ecg-proj.fif')
def test_bad_proj():
"""Test dealing with bad projection application."""
raw = read_raw_fif(raw_fname, preload=True)
events = read_events(event_fname)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[2:18:3]
_check_warnings(raw, events, picks)
# still bad
raw.pick_channels([raw.ch_names[ii] for ii in picks])
_check_warnings(raw, events)
# "fixed"
raw.info.normalize_proj() # avoid projection warnings
_check_warnings(raw, events, count=0)
# eeg avg ref is okay
raw = read_raw_fif(raw_fname, preload=True).pick_types(meg=False, eeg=True)
raw.set_eeg_reference(projection=True)
_check_warnings(raw, events, count=0)
raw.info['bads'] = raw.ch_names[:10]
_check_warnings(raw, events, count=0)
raw = read_raw_fif(raw_fname)
pytest.raises(ValueError, raw.del_proj, 'foo')
n_proj = len(raw.info['projs'])
raw.del_proj(0)
assert_equal(len(raw.info['projs']), n_proj - 1)
raw.del_proj()
assert_equal(len(raw.info['projs']), 0)
# Ensure we deal with newer-style Neuromag projs properly, were getting:
#
# Projection vector "PCA-v2" has magnitude 1.00 (should be unity),
# applying projector with 101/306 of the original channels available
# may be dangerous.
raw = read_raw_fif(raw_fname).crop(0, 1)
raw.set_eeg_reference(projection=True)
raw.info['bads'] = ['MEG 0111']
meg_picks = pick_types(raw.info, meg=True, exclude=())
ch_names = [raw.ch_names[pick] for pick in meg_picks]
for p in raw.info['projs'][:-1]:
data = np.zeros((1, len(ch_names)))
idx = [ch_names.index(ch_name) for ch_name in p['data']['col_names']]
data[:, idx] = p['data']['data']
p['data'].update(ncol=len(meg_picks), col_names=ch_names, data=data)
# smoke test for no warnings during reg
regularize(compute_raw_covariance(raw, verbose='error'), raw.info)
def _check_warnings(raw, events, picks=None, count=3):
"""Count warnings."""
with pytest.warns(None) as w:
Epochs(raw, events, dict(aud_l=1, vis_l=3),
-0.2, 0.5, picks=picks, preload=True, proj=True)
assert len(w) == count
assert all('dangerous' in str(ww.message) for ww in w)
@testing.requires_testing_data
def test_sensitivity_maps():
"""Test sensitivity map computation."""
fwd = read_forward_solution(fwd_fname)
fwd = convert_forward_solution(fwd, surf_ori=True)
projs = read_proj(eog_fname)
projs.extend(read_proj(ecg_fname))
decim = 6
for ch_type in ['eeg', 'grad', 'mag']:
w = read_source_estimate(sensmap_fname % (ch_type, 'lh')).data
stc = sensitivity_map(fwd, projs=None, ch_type=ch_type,
mode='free', exclude='bads')
assert_array_almost_equal(stc.data, w, decim)
assert stc.subject == 'sample'
# let's just make sure the others run
if ch_type == 'grad':
# fixed (2)
w = read_source_estimate(sensmap_fname % (ch_type, '2-lh')).data
stc = sensitivity_map(fwd, projs=None, mode='fixed',
ch_type=ch_type, exclude='bads')
assert_array_almost_equal(stc.data, w, decim)
if ch_type == 'mag':
# ratio (3)
w = read_source_estimate(sensmap_fname % (ch_type, '3-lh')).data
stc = sensitivity_map(fwd, projs=None, mode='ratio',
ch_type=ch_type, exclude='bads')
assert_array_almost_equal(stc.data, w, decim)
if ch_type == 'eeg':
# radiality (4), angle (5), remaining (6), and dampening (7)
modes = ['radiality', 'angle', 'remaining', 'dampening']
ends = ['4-lh', '5-lh', '6-lh', '7-lh']
for mode, end in zip(modes, ends):
w = read_source_estimate(sensmap_fname % (ch_type, end)).data
stc = sensitivity_map(fwd, projs=projs, mode=mode,
ch_type=ch_type, exclude='bads')
assert_array_almost_equal(stc.data, w, decim)
# test corner case for EEG
stc = sensitivity_map(fwd, projs=[make_eeg_average_ref_proj(fwd['info'])],
ch_type='eeg', exclude='bads')
# test corner case for projs being passed but no valid ones (#3135)
pytest.raises(ValueError, sensitivity_map, fwd, projs=None, mode='angle')
pytest.raises(RuntimeError, sensitivity_map, fwd, projs=[], mode='angle')
# test volume source space
fname = op.join(sample_path, 'sample_audvis_trunc-meg-vol-7-fwd.fif')
fwd = read_forward_solution(fname)
sensitivity_map(fwd)
def test_compute_proj_epochs(tmpdir):
"""Test SSP computation on epochs."""
tempdir = str(tmpdir)
event_id, tmin, tmax = 1, -0.2, 0.3
raw = read_raw_fif(raw_fname, preload=True)
events = read_events(event_fname)
bad_ch = 'MEG 2443'
picks = pick_types(raw.info, meg=True, eeg=False, stim=False, eog=False,
exclude=[])
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=None, proj=False)
evoked = epochs.average()
projs = compute_proj_epochs(epochs, n_grad=1, n_mag=1, n_eeg=0, n_jobs=1)
write_proj(op.join(tempdir, 'test-proj.fif.gz'), projs)
for p_fname in [proj_fname, proj_gz_fname,
op.join(tempdir, 'test-proj.fif.gz')]:
projs2 = read_proj(p_fname)
assert len(projs) == len(projs2)
for p1, p2 in zip(projs, projs2):
assert p1['desc'] == p2['desc']
assert p1['data']['col_names'] == p2['data']['col_names']
assert p1['active'] == p2['active']
# compare with sign invariance
p1_data = p1['data']['data'] * np.sign(p1['data']['data'][0, 0])
p2_data = p2['data']['data'] * np.sign(p2['data']['data'][0, 0])
if bad_ch in p1['data']['col_names']:
bad = p1['data']['col_names'].index('MEG 2443')
mask = np.ones(p1_data.size, dtype=bool)
mask[bad] = False
p1_data = p1_data[:, mask]
p2_data = p2_data[:, mask]
corr = np.corrcoef(p1_data, p2_data)[0, 1]
assert_array_almost_equal(corr, 1.0, 5)
if p2['explained_var']:
assert_array_almost_equal(p1['explained_var'],
p2['explained_var'])
# test that you can compute the projection matrix
projs = activate_proj(projs)
proj, nproj, U = make_projector(projs, epochs.ch_names, bads=[])
assert nproj == 2
assert U.shape[1] == 2
# test that you can save them
epochs.info['projs'] += projs
evoked = epochs.average()
evoked.save(op.join(tempdir, 'foo-ave.fif'))
projs = read_proj(proj_fname)
projs_evoked = compute_proj_evoked(evoked, n_grad=1, n_mag=1, n_eeg=0)
assert len(projs_evoked) == 2
# XXX : test something
# test parallelization
projs = compute_proj_epochs(epochs, n_grad=1, n_mag=1, n_eeg=0, n_jobs=1,
desc_prefix='foobar')
assert all('foobar' in x['desc'] for x in projs)
projs = activate_proj(projs)
proj_par, _, _ = make_projector(projs, epochs.ch_names, bads=[])
assert_allclose(proj, proj_par, rtol=1e-8, atol=1e-16)
# test warnings on bad filenames
proj_badname = op.join(tempdir, 'test-bad-name.fif.gz')
with pytest.warns(RuntimeWarning, match='-proj.fif'):
write_proj(proj_badname, projs)
with pytest.warns(RuntimeWarning, match='-proj.fif'):
read_proj(proj_badname)
# bad inputs
fname = op.join(tempdir, 'out-proj.fif')
with pytest.raises(TypeError, match='projs'):
write_proj(fname, 'foo')
with pytest.raises(TypeError, match=r'projs\[0\] must be .*'):
write_proj(fname, ['foo'])
@pytest.mark.slowtest
def test_compute_proj_raw(tmpdir):
"""Test SSP computation on raw."""
tempdir = str(tmpdir)
# Test that the raw projectors work
raw_time = 2.5 # Do shorter amount for speed
raw = read_raw_fif(raw_fname).crop(0, raw_time)
raw.load_data()
for ii in (0.25, 0.5, 1, 2):
with pytest.warns(RuntimeWarning, match='Too few samples'):
projs = compute_proj_raw(raw, duration=ii - 0.1, stop=raw_time,
n_grad=1, n_mag=1, n_eeg=0)
# test that you can compute the projection matrix
projs = activate_proj(projs)
proj, nproj, U = make_projector(projs, raw.ch_names, bads=[])
assert nproj == 2
assert U.shape[1] == 2
# test that you can save them
raw.info['projs'] += projs
raw.save(op.join(tempdir, 'foo_%d_raw.fif' % ii), overwrite=True)
# Test that purely continuous (no duration) raw projection works
with pytest.warns(RuntimeWarning, match='Too few samples'):
projs = compute_proj_raw(raw, duration=None, stop=raw_time,
n_grad=1, n_mag=1, n_eeg=0)
# test that you can compute the projection matrix
projs = activate_proj(projs)
proj, nproj, U = make_projector(projs, raw.ch_names, bads=[])
assert nproj == 2
assert U.shape[1] == 2
# test that you can save them
raw.info['projs'] += projs
raw.save(op.join(tempdir, 'foo_rawproj_continuous_raw.fif'))
# test resampled-data projector, upsampling instead of downsampling
# here to save an extra filtering (raw would have to be LP'ed to be equiv)
raw_resamp = cp.deepcopy(raw)
raw_resamp.resample(raw.info['sfreq'] * 2, n_jobs=2, npad='auto')
projs = compute_proj_raw(raw_resamp, duration=None, stop=raw_time,
n_grad=1, n_mag=1, n_eeg=0)
projs = activate_proj(projs)
proj_new, _, _ = make_projector(projs, raw.ch_names, bads=[])
assert_array_almost_equal(proj_new, proj, 4)
# test with bads
raw.load_bad_channels(bads_fname) # adds 2 bad mag channels
with pytest.warns(RuntimeWarning, match='Too few samples'):
projs = compute_proj_raw(raw, n_grad=0, n_mag=0, n_eeg=1)
assert len(projs) == 1
# test that bad channels can be excluded, and empty support
for projs_ in (projs, []):
proj, nproj, U = make_projector(projs_, raw.ch_names,
bads=raw.ch_names)
assert_array_almost_equal(proj, np.eye(len(raw.ch_names)))
assert nproj == 0 # all channels excluded
assert U.shape == (len(raw.ch_names), nproj)
@pytest.mark.parametrize('duration', [1, np.pi / 2.])
@pytest.mark.parametrize('sfreq', [600.614990234375, 1000.])
def test_proj_raw_duration(duration, sfreq):
"""Test equivalence of `duration` options."""
n_ch, n_dim = 30, 3
rng = np.random.RandomState(0)
signals = rng.randn(n_dim, 10000)
mixing = rng.randn(n_ch, n_dim) + [0, 1, 2]
data = np.dot(mixing, signals)
raw = RawArray(data, create_info(n_ch, sfreq, 'eeg'))
raw.set_eeg_reference(projection=True)
n_eff = int(round(raw.info['sfreq'] * duration))
# crop to an even "duration" number of epochs
stop = ((len(raw.times) // n_eff) * n_eff - 1) / raw.info['sfreq']
raw.crop(0, stop)
proj_def = compute_proj_raw(raw, n_eeg=n_dim)
proj_dur = compute_proj_raw(raw, duration=duration, n_eeg=n_dim)
proj_none = compute_proj_raw(raw, duration=None, n_eeg=n_dim)
assert len(proj_dur) == len(proj_none) == len(proj_def) == n_dim
# proj_def is not in here because it does not necessarily evenly divide
# the signal length:
for pu, pn in zip(proj_dur, proj_none):
assert_allclose(pu['data']['data'], pn['data']['data'])
# but we can test it here since it should still be a small subspace angle:
for proj in (proj_dur, proj_none, proj_def):
computed = np.concatenate([p['data']['data'] for p in proj], 0)
angle = np.rad2deg(linalg.subspace_angles(computed.T, mixing)[0])
assert angle < 1e-5
def test_make_eeg_average_ref_proj():
"""Test EEG average reference projection."""
raw = read_raw_fif(raw_fname, preload=True)
eeg = pick_types(raw.info, meg=False, eeg=True)
# No average EEG reference
assert not np.all(raw._data[eeg].mean(axis=0) < 1e-19)
# Apply average EEG reference
car = make_eeg_average_ref_proj(raw.info)
reref = raw.copy()
reref.add_proj(car)
reref.apply_proj()
assert_array_almost_equal(reref._data[eeg].mean(axis=0), 0, decimal=19)
# Error when custom reference has already been applied
raw.info['custom_ref_applied'] = True
pytest.raises(RuntimeError, make_eeg_average_ref_proj, raw.info)
# test that an average EEG ref is not added when doing proj
raw.set_eeg_reference(projection=True)
assert _has_eeg_average_ref_proj(raw.info['projs'])
raw.del_proj(idx=-1)
assert not _has_eeg_average_ref_proj(raw.info['projs'])
raw.apply_proj()
assert not _has_eeg_average_ref_proj(raw.info['projs'])
def test_has_eeg_average_ref_proj():
"""Test checking whether an EEG average reference exists."""
assert not _has_eeg_average_ref_proj([])
raw = read_raw_fif(raw_fname)
raw.set_eeg_reference(projection=True)
assert _has_eeg_average_ref_proj(raw.info['projs'])
def test_needs_eeg_average_ref_proj():
"""Test checking whether a recording needs an EEG average reference."""
raw = read_raw_fif(raw_fname)
assert _needs_eeg_average_ref_proj(raw.info)
raw.set_eeg_reference(projection=True)
assert not _needs_eeg_average_ref_proj(raw.info)
# No EEG channels
raw = read_raw_fif(raw_fname, preload=True)
eeg = [raw.ch_names[c] for c in pick_types(raw.info, meg=False, eeg=True)]
raw.drop_channels(eeg)
assert not _needs_eeg_average_ref_proj(raw.info)
# Custom ref flag set
raw = read_raw_fif(raw_fname)
raw.info['custom_ref_applied'] = True
assert not _needs_eeg_average_ref_proj(raw.info)
def test_sss_proj():
"""Test `meg` proj option."""
raw = read_raw_fif(raw_fname)
raw.crop(0, 1.0).load_data().pick_types(meg=True, exclude=())
raw.pick_channels(raw.ch_names[:51]).del_proj()
raw_sss = maxwell_filter(raw, int_order=5, ext_order=2)
sss_rank = 21 # really low due to channel picking
assert len(raw_sss.info['projs']) == 0
for meg, n_proj, want_rank in (('separate', 6, sss_rank),
('combined', 3, sss_rank - 3)):
proj = compute_proj_raw(raw_sss, n_grad=3, n_mag=3, meg=meg,
verbose='error')
this_raw = raw_sss.copy().add_proj(proj).apply_proj()
assert len(this_raw.info['projs']) == n_proj
sss_proj_rank = _compute_rank_int(this_raw)
cov = compute_raw_covariance(this_raw, verbose='error')
W, ch_names, rank = compute_whitener(cov, this_raw.info,
return_rank=True)
assert ch_names == this_raw.ch_names
assert want_rank == sss_proj_rank == rank # proper reduction
if meg == 'combined':
assert this_raw.info['projs'][0]['data']['col_names'] == ch_names
else:
mag_names = ch_names[2::3]
assert this_raw.info['projs'][3]['data']['col_names'] == mag_names
run_tests_if_main()
|
from datetime import timedelta
import lupupy.constants as CONST
from homeassistant.components.binary_sensor import DEVICE_CLASSES, BinarySensorEntity
from . import DOMAIN as LUPUSEC_DOMAIN, LupusecDevice
SCAN_INTERVAL = timedelta(seconds=2)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for an Lupusec device."""
if discovery_info is None:
return
data = hass.data[LUPUSEC_DOMAIN]
device_types = [CONST.TYPE_OPENING]
devices = []
for device in data.lupusec.get_devices(generic_type=device_types):
devices.append(LupusecBinarySensor(data, device))
add_entities(devices)
class LupusecBinarySensor(LupusecDevice, BinarySensorEntity):
"""A binary sensor implementation for Lupusec device."""
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._device.is_on
@property
def device_class(self):
"""Return the class of the binary sensor."""
if self._device.generic_type not in DEVICE_CLASSES:
return None
return self._device.generic_type
|
from datetime import datetime, timedelta
import json
from homeassistant.components.brother.const import UNIT_PAGES
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_TIMESTAMP,
PERCENTAGE,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import UTC, utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed, load_fixture
from tests.components.brother import init_integration
ATTR_REMAINING_PAGES = "remaining_pages"
ATTR_COUNTER = "counter"
async def test_sensors(hass):
"""Test states of the sensors."""
test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=UTC)
with patch(
"homeassistant.components.brother.sensor.utcnow", return_value=test_time
):
await init_integration(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("sensor.hl_l2340dw_status")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer"
assert state.state == "waiting"
entry = registry.async_get("sensor.hl_l2340dw_status")
assert entry
assert entry.unique_id == "0123456789_status"
state = hass.states.get("sensor.hl_l2340dw_black_toner_remaining")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "75"
entry = registry.async_get("sensor.hl_l2340dw_black_toner_remaining")
assert entry
assert entry.unique_id == "0123456789_black_toner_remaining"
state = hass.states.get("sensor.hl_l2340dw_cyan_toner_remaining")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "10"
entry = registry.async_get("sensor.hl_l2340dw_cyan_toner_remaining")
assert entry
assert entry.unique_id == "0123456789_cyan_toner_remaining"
state = hass.states.get("sensor.hl_l2340dw_magenta_toner_remaining")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "8"
entry = registry.async_get("sensor.hl_l2340dw_magenta_toner_remaining")
assert entry
assert entry.unique_id == "0123456789_magenta_toner_remaining"
state = hass.states.get("sensor.hl_l2340dw_yellow_toner_remaining")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d-nozzle"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "2"
entry = registry.async_get("sensor.hl_l2340dw_yellow_toner_remaining")
assert entry
assert entry.unique_id == "0123456789_yellow_toner_remaining"
state = hass.states.get("sensor.hl_l2340dw_drum_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut"
assert state.attributes.get(ATTR_REMAINING_PAGES) == 11014
assert state.attributes.get(ATTR_COUNTER) == 986
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "92"
entry = registry.async_get("sensor.hl_l2340dw_drum_remaining_life")
assert entry
assert entry.unique_id == "0123456789_drum_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_black_drum_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut"
assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389
assert state.attributes.get(ATTR_COUNTER) == 1611
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "92"
entry = registry.async_get("sensor.hl_l2340dw_black_drum_remaining_life")
assert entry
assert entry.unique_id == "0123456789_black_drum_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_cyan_drum_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut"
assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389
assert state.attributes.get(ATTR_COUNTER) == 1611
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "92"
entry = registry.async_get("sensor.hl_l2340dw_cyan_drum_remaining_life")
assert entry
assert entry.unique_id == "0123456789_cyan_drum_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_magenta_drum_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut"
assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389
assert state.attributes.get(ATTR_COUNTER) == 1611
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "92"
entry = registry.async_get("sensor.hl_l2340dw_magenta_drum_remaining_life")
assert entry
assert entry.unique_id == "0123456789_magenta_drum_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_yellow_drum_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:chart-donut"
assert state.attributes.get(ATTR_REMAINING_PAGES) == 16389
assert state.attributes.get(ATTR_COUNTER) == 1611
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "92"
entry = registry.async_get("sensor.hl_l2340dw_yellow_drum_remaining_life")
assert entry
assert entry.unique_id == "0123456789_yellow_drum_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_fuser_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "97"
entry = registry.async_get("sensor.hl_l2340dw_fuser_remaining_life")
assert entry
assert entry.unique_id == "0123456789_fuser_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_belt_unit_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:current-ac"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "97"
entry = registry.async_get("sensor.hl_l2340dw_belt_unit_remaining_life")
assert entry
assert entry.unique_id == "0123456789_belt_unit_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_pf_kit_1_remaining_life")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer-3d"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.state == "98"
entry = registry.async_get("sensor.hl_l2340dw_pf_kit_1_remaining_life")
assert entry
assert entry.unique_id == "0123456789_pf_kit_1_remaining_life"
state = hass.states.get("sensor.hl_l2340dw_page_counter")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES
assert state.state == "986"
entry = registry.async_get("sensor.hl_l2340dw_page_counter")
assert entry
assert entry.unique_id == "0123456789_page_counter"
state = hass.states.get("sensor.hl_l2340dw_duplex_unit_pages_counter")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES
assert state.state == "538"
entry = registry.async_get("sensor.hl_l2340dw_duplex_unit_pages_counter")
assert entry
assert entry.unique_id == "0123456789_duplex_unit_pages_counter"
state = hass.states.get("sensor.hl_l2340dw_b_w_counter")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES
assert state.state == "709"
entry = registry.async_get("sensor.hl_l2340dw_b_w_counter")
assert entry
assert entry.unique_id == "0123456789_b/w_counter"
state = hass.states.get("sensor.hl_l2340dw_color_counter")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:file-document-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UNIT_PAGES
assert state.state == "902"
entry = registry.async_get("sensor.hl_l2340dw_color_counter")
assert entry
assert entry.unique_id == "0123456789_color_counter"
state = hass.states.get("sensor.hl_l2340dw_uptime")
assert state
assert state.attributes.get(ATTR_ICON) is None
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TIMESTAMP
assert state.state == "2019-09-24T12:14:56+00:00"
entry = registry.async_get("sensor.hl_l2340dw_uptime")
assert entry
assert entry.unique_id == "0123456789_uptime"
async def test_availability(hass):
"""Ensure that we mark the entities unavailable correctly when device is offline."""
await init_integration(hass)
state = hass.states.get("sensor.hl_l2340dw_status")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "waiting"
future = utcnow() + timedelta(minutes=5)
with patch("brother.Brother._get_data", side_effect=ConnectionError()):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.hl_l2340dw_status")
assert state
assert state.state == STATE_UNAVAILABLE
future = utcnow() + timedelta(minutes=10)
with patch(
"brother.Brother._get_data",
return_value=json.loads(load_fixture("brother_printer_data.json")),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.hl_l2340dw_status")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "waiting"
async def test_manual_update_entity(hass):
"""Test manual update entity via service homeasasistant/update_entity."""
await init_integration(hass)
await async_setup_component(hass, "homeassistant", {})
with patch("homeassistant.components.brother.Brother.async_update") as mock_update:
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["sensor.hl_l2340dw_status"]},
blocking=True,
)
assert len(mock_update.mock_calls) == 1
|
import re
from datetime import datetime
from functools import lru_cache, reduce
from dateutil.parser import ParserError, parse
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import gettext as _
from jellyfish import damerau_levenshtein_distance
from pyparsing import (
CaselessKeyword,
Optional,
Regex,
Word,
infixNotation,
oneOf,
opAssoc,
)
from weblate.checks.parser import RawQuotedString
from weblate.trans.util import PLURAL_SEPARATOR
from weblate.utils.state import (
STATE_APPROVED,
STATE_FUZZY,
STATE_NAMES,
STATE_READONLY,
STATE_TRANSLATED,
)
class Comparer:
"""String comparer abstraction.
The reason is to be able to change implementation.
"""
def similarity(self, first, second):
"""Returns string similarity in range 0 - 100%."""
try:
distance = damerau_levenshtein_distance(first, second)
return int(
100 * (1.0 - (float(distance) / max(len(first), len(second), 1)))
)
except MemoryError:
# Too long string, mark them as not much similar
return 50
# Field type definitions
PLAIN_FIELDS = ("source", "target", "context", "note", "location")
NONTEXT_FIELDS = {
"priority": "priority",
"state": "state",
"pending": "pending",
"changed": "change__timestamp",
"change_time": "change__timestamp",
"added": "timestamp",
"change_action": "change__action",
}
STRING_FIELD_MAP = {
"suggestion": "suggestion__target",
"comment": "comment__comment",
"key": "context",
}
EXACT_FIELD_MAP = {
"check": "check__check",
"dismissed_check": "check__check",
"language": "translation__language__code",
"component": "translation__component__slug",
"project": "translation__component__project__slug",
"changed_by": "change__author__username",
"suggestion_author": "suggestion__user__username",
"comment_author": "comment__user__username",
"label": "source_unit__labels__name",
}
OPERATOR_MAP = {
":": "substring",
":=": "iexact",
":<": "lt",
":<=": "lte",
":>": "gt",
":>=": "gte",
}
# Parsing grammar
AND = CaselessKeyword("AND")
OR = Optional(CaselessKeyword("OR"))
NOT = CaselessKeyword("NOT")
# Search operator
OPERATOR = oneOf(OPERATOR_MAP.keys())
# Field name, explicitely exlude URL like patters
FIELD = Regex(r"""(?!http|ftp|https|mailto)[a-zA-Z_]+""")
# Match token
WORD = Regex(r"""[^ \(\)]([^ '"]*[^ '"\)])?""")
DATE = Word("0123456789:.-T")
# Date range
RANGE = "[" + DATE + "to" + DATE + "]"
# Match value
REGEX_STRING = "r" + RawQuotedString('"')
STRING = REGEX_STRING | RawQuotedString("'") | RawQuotedString('"') | WORD
# Single term, either field specific or not
TERM = (FIELD + OPERATOR + (RANGE | STRING)) | STRING
# Multi term with or without operator
QUERY = Optional(
infixNotation(
TERM,
[
(
NOT,
1,
opAssoc.RIGHT,
),
(
AND,
2,
opAssoc.LEFT,
),
(
OR,
2,
opAssoc.LEFT,
),
],
)
)
# Helper parsing objects
class RegexExpr:
def __init__(self, tokens):
self.expr = tokens[1]
REGEX_STRING.addParseAction(RegexExpr)
class RangeExpr:
def __init__(self, tokens):
self.start = tokens[1]
self.end = tokens[3]
RANGE.addParseAction(RangeExpr)
class TermExpr:
def __init__(self, tokens):
if len(tokens) == 1:
self.field = None
self.operator = ":"
self.match = tokens[0]
else:
self.field, self.operator, self.match = tokens
self.fixup()
def __repr__(self):
return f"<TermExpr: '{self.field}', '{self.operator}', '{self.match}'>"
def fixup(self):
# Avoid unwanted lt/gt searches on plain text fields
if self.field in PLAIN_FIELDS and self.operator not in (":", ":="):
self.match = self.operator[1:] + self.match
self.operator = ":"
def is_field(self, text):
if text in ("read-only", "readonly"):
return Q(state=STATE_READONLY)
if text == "approved":
return Q(state=STATE_APPROVED)
if text in ("fuzzy", "needs-editing"):
return Q(state=STATE_FUZZY)
if text == "translated":
return Q(state__gte=STATE_TRANSLATED)
if text == "untranslated":
return Q(state__lt=STATE_TRANSLATED)
if text == "pending":
return Q(pending=True)
raise ValueError(f"Unsupported is lookup: {text}")
def has_field(self, text):
if text == "plural":
return Q(source__contains=PLURAL_SEPARATOR)
if text == "suggestion":
return Q(suggestion__isnull=False)
if text == "comment":
return Q(comment__resolved=False)
if text in ("resolved-comment", "resolved_comment"):
return Q(comment__resolved=True)
if text in ("check", "failing-check", "failing_check"):
return Q(check__dismissed=False)
if text in (
"dismissed-check",
"dismissed_check",
"ignored-check",
"ignored_check",
):
return Q(check__dismissed=True)
if text == "translation":
return Q(state__gte=STATE_TRANSLATED)
if text in ("variant", "shaping"):
return Q(variant__isnull=False)
if text == "label":
return Q(source_unit__labels__isnull=False)
if text == "context":
return ~Q(context="")
if text == "screenshot":
return Q(screenshots__isnull=False) | Q(
source_unit__screenshots__isnull=False
)
if text == "flags":
return ~Q(source_unit__extra_flags="")
raise ValueError(f"Unsupported has lookup: {text}")
def field_extra(self, field, query, match):
from weblate.trans.models import Change
if field in {"changed", "changed_by"}:
return query & Q(change__action__in=Change.ACTIONS_CONTENT)
if field == "check":
return query & Q(check__dismissed=False)
if field == "dismissed_check":
return query & Q(check__dismissed=True)
return query
def convert_state(self, text):
if text is None:
return None
if text.isdigit():
return int(text)
try:
return STATE_NAMES[text]
except KeyError:
raise ValueError(_("Unsupported state: {}").format(text))
def convert_bool(self, text):
ltext = text.lower()
if ltext in ("yes", "true", "on", "1"):
return True
if ltext in ("no", "false", "off", "0"):
return False
raise ValueError(f"Invalid boolean value: {text}")
def convert_pending(self, text):
return self.convert_bool(text)
def convert_int(self, text):
return int(text)
def convert_priority(self, text):
return self.convert_int(text)
def convert_datetime(self, text, hour=5, minute=55, second=55, microsecond=0):
if isinstance(text, RangeExpr):
return (
self.convert_datetime(
text.start, hour=0, minute=0, second=0, microsecond=0
),
self.convert_datetime(
text.end, hour=23, minute=59, second=59, microsecond=999999
),
)
if text.isdigit() and len(text) == 4:
year = int(text)
tzinfo = timezone.get_current_timezone()
return (
datetime(
year=year,
month=1,
day=1,
hour=0,
minute=0,
second=0,
microsecond=0,
tzinfo=tzinfo,
),
datetime(
year=year,
month=12,
day=31,
hour=23,
minute=59,
second=59,
microsecond=999999,
tzinfo=tzinfo,
),
)
try:
# Here we inject 5:55:55 time and if that was not changed
# during parsing, we assume it was not specified while
# generating the query
result = parse(
text,
default=timezone.now().replace(
hour=hour, minute=minute, second=second, microsecond=microsecond
),
)
except ParserError as error:
raise ValueError(_("Invalid timestamp: {}").format(error))
if result.hour == 5 and result.minute == 55 and result.second == 55:
return (
result.replace(hour=0, minute=0, second=0, microsecond=0),
result.replace(hour=23, minute=59, second=59, microsecond=999999),
)
return result
def convert_change_action(self, text):
from weblate.trans.models import Change
try:
return Change.ACTION_NAMES[text]
except KeyError:
return Change.ACTION_STRINGS[text]
def convert_change_time(self, text):
return self.convert_datetime(text)
def convert_changed(self, text):
return self.convert_datetime(text)
def convert_added(self, text):
return self.convert_datetime(text)
def field_name(self, field, suffix=None):
if suffix is None:
suffix = OPERATOR_MAP[self.operator]
if field in PLAIN_FIELDS:
return f"{field}__{suffix}"
if field in STRING_FIELD_MAP:
return "{}__{}".format(STRING_FIELD_MAP[field], suffix)
if field in EXACT_FIELD_MAP:
# Change contains to exact, do not change other (for example regex)
if suffix == "substring":
suffix = "iexact"
return "{}__{}".format(EXACT_FIELD_MAP[field], suffix)
if field in NONTEXT_FIELDS:
if suffix not in ("substring", "iexact"):
return "{}__{}".format(NONTEXT_FIELDS[field], suffix)
return NONTEXT_FIELDS[field]
raise ValueError(f"Unsupported field: {field}")
def as_sql(self):
field = self.field
match = self.match
# Simple term based search
if not field:
return (
Q(source__substring=self.match)
| Q(target__substring=self.match)
| Q(context__substring=self.match)
)
# Field specific code
field_method = getattr(self, f"{field}_field", None)
if field_method is not None:
return field_method(match)
# Field conversion
convert_method = getattr(self, f"convert_{field}", None)
if convert_method is not None:
match = convert_method(match)
if isinstance(match, RegexExpr):
# Regullar expression
try:
re.compile(match.expr)
except re.error as error:
raise ValueError(_("Invalid regular expression: {}").format(error))
return Q(**{self.field_name(field, "regex"): match.expr})
if isinstance(match, tuple):
start, end = match
# Ranges
if self.operator in (":", ":="):
query = Q(
**{
self.field_name(field, "gte"): start,
self.field_name(field, "lte"): end,
}
)
elif self.operator in (":>", ":>="):
query = Q(**{self.field_name(field, "gte"): start})
else:
query = Q(**{self.field_name(field, "lte"): end})
else:
# Generic query
query = Q(**{self.field_name(field): match})
return self.field_extra(field, query, match)
TERM.addParseAction(TermExpr)
def parser_to_query(obj):
# Simple lookups
if isinstance(obj, TermExpr):
return obj.as_sql()
# Operators
operator = "AND"
expressions = []
for item in obj:
if isinstance(item, str) and item.upper() in ("OR", "AND", "NOT"):
operator = item.upper()
continue
expressions.append(parser_to_query(item))
if not expressions:
return Q()
if operator == "NOT":
return ~expressions[0]
if operator == "AND":
return reduce(lambda x, y: x & y, expressions)
return reduce(lambda x, y: x | y, expressions)
@lru_cache(maxsize=512)
def parse_query(text):
if "\x00" in text:
raise ValueError("Invalid query string.")
return parser_to_query(QUERY.parseString(text, parseAll=True))
|
import functools
import opt_einsum
# pylint: disable=line-too-long
from tensornetwork.network_operations import check_connected, get_all_edges, get_subgraph_dangling
# pylint: disable=line-too-long
from tensornetwork.network_components import get_all_nondangling, contract_parallel, contract_between
from tensornetwork.network_components import Edge, AbstractNode
from tensornetwork.contractors.opt_einsum_paths import utils
from typing import Any, Optional, Sequence, Iterable, Text, Tuple, List
#TODO (martin): add return types of functions back once TensorNetwork is gone
# remove _base_network
# _base_nodes -> base
def base(nodes: Iterable[AbstractNode],
algorithm: utils.Algorithm,
output_edge_order: Optional[Sequence[Edge]] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Base method for all `opt_einsum` contractors.
Args:
nodes: A collection of connected nodes.
algorithm: `opt_einsum` contraction method to use.
output_edge_order: An optional list of edges. Edges of the
final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
`output_edge_order` must be provided.
ignore_edge_order: An option to ignore the output edge
order.
Returns:
Final node after full contraction.
"""
nodes_set = set(nodes)
edges = get_all_edges(nodes_set)
#output edge order has to be determinded before any contraction
#(edges are refreshed after contractions)
if not ignore_edge_order:
if output_edge_order is None:
output_edge_order = list(get_subgraph_dangling(nodes))
if len(output_edge_order) > 1:
raise ValueError("The final node after contraction has more than "
"one remaining edge. In this case `output_edge_order` "
"has to be provided.")
if set(output_edge_order) != get_subgraph_dangling(nodes):
raise ValueError("output edges are not equal to the remaining "
"non-contracted edges of the final node.")
for edge in edges:
if not edge.is_disabled: #if its disabled we already contracted it
if edge.is_trace():
nodes_set.remove(edge.node1)
nodes_set.add(contract_parallel(edge))
if len(nodes_set) == 1:
# There's nothing to contract.
if ignore_edge_order:
return list(nodes_set)[0]
return list(nodes_set)[0].reorder_edges(output_edge_order)
# Then apply `opt_einsum`'s algorithm
path, nodes = utils.get_path(nodes_set, algorithm)
for a, b in path:
new_node = contract_between(nodes[a], nodes[b], allow_outer_product=True)
nodes.append(new_node)
nodes = utils.multi_remove(nodes, [a, b])
# if the final node has more than one edge,
# output_edge_order has to be specified
final_node = nodes[0] # nodes were connected, we checked this
if not ignore_edge_order:
final_node.reorder_edges(output_edge_order)
return final_node
def optimal(nodes: Iterable[AbstractNode],
output_edge_order: Optional[Sequence[Edge]] = None,
memory_limit: Optional[int] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Optimal contraction order via `opt_einsum`.
This method will find the truly optimal contraction order via
`opt_einsum`'s depth first search algorithm. Since this search is
exhaustive, if your network is large (n>10), then the search may
take longer than just contracting in a suboptimal way.
Args:
nodes: an iterable of Nodes
output_edge_order: An optional list of edges.
Edges of the final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
`output_edge_order` must be provided.
memory_limit: Maximum number of elements in an array during contractions.
ignore_edge_order: An option to ignore the output edge order.
Returns:
The final node after full contraction.
"""
alg = functools.partial(
opt_einsum.paths.dynamic_programming, memory_limit=memory_limit)
return base(nodes, alg, output_edge_order, ignore_edge_order)
def branch(nodes: Iterable[AbstractNode],
output_edge_order: Optional[Sequence[Edge]] = None,
memory_limit: Optional[int] = None,
nbranch: Optional[int] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Branch contraction path via `opt_einsum`.
This method uses the DFS approach of `optimal` while sorting potential
contractions based on a heuristic cost, in order to reduce time spent
in exploring paths which are unlikely to be optimal.
More details on `branching path`_.
.. _branching path:
https://optimized-einsum.readthedocs.io/en/latest/branching_path.html
Args:
nodes: an iterable of Nodes
output_edge_order: An optional list of edges.
Edges of the final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
`output_edge_order` must be provided.
memory_limit: Maximum number of elements in an array during contractions.
nbranch: Number of best contractions to explore.
If None it explores all inner products starting with those that
have the best cost heuristic.
ignore_edge_order: An option to ignore the output edge order.
Returns:
The final node after full contraction.
"""
alg = functools.partial(
opt_einsum.paths.branch, memory_limit=memory_limit, nbranch=nbranch)
return base(nodes, alg, output_edge_order, ignore_edge_order)
def greedy(nodes: Iterable[AbstractNode],
output_edge_order: Optional[Sequence[Edge]] = None,
memory_limit: Optional[int] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Greedy contraction path via `opt_einsum`.
This provides a more efficient strategy than `optimal` for finding
contraction paths in large networks. First contracts pairs of tensors
by finding the pair with the lowest cost at each step. Then it performs
the outer products. More details on `greedy path`_.
.. _greedy path:
https://optimized-einsum.readthedocs.io/en/latest/greedy_path.html
Args:
nodes: an iterable of Nodes
output_edge_order: An optional list of edges.
Edges of the final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
`output_edge_order` must be provided.
memory_limit: Maximum number of elements in an array during contractions.
ignore_edge_order: An option to ignore the output edge order.
Returns:
The final node after full contraction.
"""
alg = functools.partial(opt_einsum.paths.greedy, memory_limit=memory_limit)
return base(nodes, alg, output_edge_order, ignore_edge_order)
# pylint: disable=too-many-return-statements
def auto(nodes: Iterable[AbstractNode],
output_edge_order: Optional[Sequence[Edge]] = None,
memory_limit: Optional[int] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Chooses one of the above algorithms according to network size.
Default behavior is based on `opt_einsum`'s `auto` contractor.
Args:
nodes: A collection of connected nodes.
output_edge_order: An optional list of edges.
Edges of the final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
`output_edge_order` must be provided.
memory_limit: Maximum number of elements in an array during contractions.
ignore_edge_order: An option to ignore the output edge order.
Returns:
Final node after full contraction.
"""
n = len(list(nodes)) #pytype thing
_nodes = nodes
if n <= 0:
raise ValueError("Cannot contract empty tensor network.")
if n == 1:
if not ignore_edge_order:
if output_edge_order is None:
output_edge_order = list(
(get_all_edges(_nodes) - get_all_nondangling(_nodes)))
if len(output_edge_order) > 1:
raise ValueError(
"The final node after contraction has more than "
"one dangling edge. In this case `output_edge_order` "
"has to be provided.")
edges = get_all_nondangling(_nodes)
if edges:
final_node = contract_parallel(edges.pop())
else:
final_node = list(_nodes)[0]
final_node.reorder_edges(output_edge_order)
if not ignore_edge_order:
final_node.reorder_edges(output_edge_order)
return final_node
if n < 5:
return optimal(nodes, output_edge_order, memory_limit, ignore_edge_order)
if n < 7:
return branch(
nodes,
output_edge_order=output_edge_order,
memory_limit=memory_limit,
ignore_edge_order=ignore_edge_order)
if n < 9:
return branch(
nodes,
output_edge_order=output_edge_order,
memory_limit=memory_limit,
nbranch=2,
ignore_edge_order=ignore_edge_order)
if n < 15:
return branch(
nodes,
output_edge_order=output_edge_order,
nbranch=1,
ignore_edge_order=ignore_edge_order)
return greedy(nodes, output_edge_order, memory_limit, ignore_edge_order)
def custom(nodes: Iterable[AbstractNode],
optimizer: Any,
output_edge_order: Sequence[Edge] = None,
memory_limit: Optional[int] = None,
ignore_edge_order: bool = False) -> AbstractNode:
"""Uses a custom path optimizer created by the user to calculate paths.
The custom path optimizer should inherit `opt_einsum`'s `PathOptimizer`.
See `custom paths`_.
.. _custom paths:
https://optimized-einsum.readthedocs.io/en/latest/custom_paths.html
Args:
nodes: an iterable of Nodes
output_edge_order: An optional list of edges.
Edges of the final node in `nodes_set`
are reordered into `output_edge_order`;
if final node has more than one edge,
output_edge_order` must be provided.
optimizer: A custom `opt_einsum.PathOptimizer` object.
memory_limit: Maximum number of elements in an array during contractions.
ignore_edge_order: An option to ignore the output edge order.
Returns:
Final node after full contraction.
"""
alg = functools.partial(optimizer, memory_limit=memory_limit)
return base(nodes, alg, output_edge_order, ignore_edge_order)
def path_solver(
algorithm: Text,
nodes: Iterable[AbstractNode],
memory_limit: Optional[int] = None,
nbranch: Optional[int] = None
) -> Tuple[List[Tuple[int, int]], List[AbstractNode]]:
"""Calculates the contraction paths using `opt_einsum` methods.
Args:
algorithm: `opt_einsum` method to use for calculating the contraction path.
nodes: an iterable of `AbstractNode` objects to contract.
memory_limit: Maximum number of elements in an array during contractions.
Only relevant for `algorithm in (optimal, greedy)`
nbranch: Number of best contractions to explore.
If None it explores all inner products starting with those that
have the best cost heuristic. Only relevant for `algorithm=branch`.
Returns:
The optimal contraction path as returned by `opt_einsum`.
"""
if algorithm == "optimal":
alg = functools.partial(
opt_einsum.paths.dynamic_programming, memory_limit=memory_limit)
elif algorithm == "branch":
alg = functools.partial(
opt_einsum.paths.branch, memory_limit=memory_limit, nbranch=nbranch)
elif algorithm == "greedy":
alg = functools.partial(opt_einsum.paths.greedy, memory_limit=memory_limit)
elif algorithm == "auto":
n = len(list(nodes)) #pytype thing
_nodes = nodes
if n <= 1:
return []
if n < 5:
alg = functools.partial(
opt_einsum.paths.dynamic_programming, memory_limit=memory_limit)
if n < 7:
alg = functools.partial(
opt_einsum.paths.branch, memory_limit=memory_limit, nbranch=None)
if n < 9:
alg = functools.partial(
opt_einsum.paths.branch, memory_limit=memory_limit, nbranch=2)
if n < 15:
alg = functools.partial(
opt_einsum.paths.branch, memory_limit=memory_limit, nbranch=1)
else:
alg = functools.partial(
opt_einsum.paths.greedy, memory_limit=memory_limit)
else:
raise ValueError("algorithm {algorithm} not implemented")
path, _ = utils.get_path(nodes, alg)
return path
def contract_path(path: Tuple[List[Tuple[int, int]]],
nodes: Iterable[AbstractNode],
output_edge_order: Sequence[Edge]) -> AbstractNode:
"""Contract `nodes` using `path`.
Args:
path: The contraction path as returned from `path_solver`.
nodes: A collection of connected nodes.
output_edge_order: A list of edges. Edges of the
final node in `nodes`
are reordered into `output_edge_order`;
Returns:
Final node after full contraction.
"""
if len(path) == 0:
return nodes
for a, b in path:
new_node = contract_between(nodes[a], nodes[b], allow_outer_product=True)
nodes.append(new_node)
nodes = utils.multi_remove(nodes, [a, b])
# if the final node has more than one edge,
# output_edge_order has to be specified
final_node = nodes[0] # nodes were connected, we checked this
final_node.reorder_edges(output_edge_order)
return final_node
|
import functools
import logging
import os
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import cassandra
from perfkitbenchmarker.linux_packages import ycsb
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'cassandra_ycsb'
BENCHMARK_CONFIG = """
cassandra_ycsb:
description: >
Run YCSB against Cassandra. Specify the
Cassandra cluster size with --num_vms. Specify the number
of YCSB VMs with --ycsb_client_vms.
vm_groups:
workers:
vm_spec: *default_single_core
disk_spec: *default_500_gb
clients:
vm_spec: *default_single_core
"""
# TODO: Add flags.
REPLICATION_FACTOR = 3
WRITE_CONSISTENCY = 'QUORUM'
READ_CONSISTENCY = 'QUORUM'
KEYSPACE_NAME = 'usertable'
COLUMN_FAMILY = 'data'
CREATE_TABLE_SCRIPT = 'cassandra/create-ycsb-table.cql.j2'
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
config['vm_groups']['workers']['vm_count'] = FLAGS.num_vms if FLAGS[
'num_vms'].present else 3
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['clients']['vm_count'] = FLAGS.ycsb_client_vms
return config
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
cassandra.CheckPrerequisites()
ycsb.CheckPrerequisites()
data.ResourcePath(CREATE_TABLE_SCRIPT)
def _InstallCassandra(vm, seed_vms):
"""Install and start Cassandra on 'vm'."""
vm.Install('cassandra')
cassandra.Configure(vm, seed_vms=seed_vms)
def _CreateYCSBTable(vm, keyspace=KEYSPACE_NAME, column_family=COLUMN_FAMILY,
replication_factor=REPLICATION_FACTOR):
"""Creates a Cassandra table for use with YCSB."""
template_path = data.ResourcePath(CREATE_TABLE_SCRIPT)
remote_path = os.path.join(
cassandra.CASSANDRA_DIR,
os.path.basename(os.path.splitext(template_path)[0]))
vm.RenderTemplate(template_path, remote_path,
context={'keyspace': keyspace,
'column_family': column_family,
'replication_factor': replication_factor})
cassandra_cli = cassandra.GetCassandraCliPath(vm)
command = '{0} -f {1} -h {2}'.format(cassandra_cli, remote_path,
vm.internal_ip)
vm.RemoteCommand(command, should_log=True)
def _GetVMsByRole(benchmark_spec):
"""Gets a dictionary mapping role to a list of VMs."""
cassandra_vms = benchmark_spec.vm_groups['workers']
if FLAGS.ycsb_client_vms:
clients = benchmark_spec.vm_groups['clients']
else:
clients = cassandra_vms
return {'vms': benchmark_spec.vms,
'cassandra_vms': cassandra_vms,
'seed_vm': cassandra_vms[0],
'non_seed_cassandra_vms': cassandra_vms[1:],
'clients': clients}
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run YCSB against Cassandra.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
by_role = _GetVMsByRole(benchmark_spec)
loaders = by_role['clients']
assert loaders, vms
# Cassandra cluster
cassandra_vms = by_role['cassandra_vms']
assert cassandra_vms, 'No Cassandra VMs: {0}'.format(by_role)
seed_vm = by_role['seed_vm']
assert seed_vm, 'No seed VM: {0}'.format(by_role)
cassandra_install_fns = [functools.partial(_InstallCassandra,
vm, seed_vms=[seed_vm])
for vm in cassandra_vms]
ycsb_install_fns = [functools.partial(vm.Install, 'ycsb')
for vm in loaders]
if FLAGS.ycsb_client_vms:
vm_util.RunThreaded(lambda f: f(), cassandra_install_fns + ycsb_install_fns)
else:
# If putting server and client on same vm, prepare packages one by one to
# avoid race condition.
vm_util.RunThreaded(lambda f: f(), cassandra_install_fns)
vm_util.RunThreaded(lambda f: f(), ycsb_install_fns)
cassandra.StartCluster(seed_vm, by_role['non_seed_cassandra_vms'])
_CreateYCSBTable(
seed_vm, replication_factor=FLAGS.cassandra_replication_factor)
benchmark_spec.executor = ycsb.YCSBExecutor(
'cassandra2-cql',
hosts=','.join(vm.internal_ip for vm in cassandra_vms))
def Run(benchmark_spec):
"""Spawn YCSB and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
loaders = _GetVMsByRole(benchmark_spec)['clients']
cassandra_vms = _GetVMsByRole(benchmark_spec)['cassandra_vms']
logging.debug('Loaders: %s', loaders)
kwargs = {'hosts': ','.join(vm.internal_ip for vm in cassandra_vms),
'columnfamily': COLUMN_FAMILY,
'cassandra.readconsistencylevel': READ_CONSISTENCY,
'cassandra.scanconsistencylevel': READ_CONSISTENCY,
'cassandra.writeconsistencylevel': WRITE_CONSISTENCY,
'cassandra.deleteconsistencylevel': WRITE_CONSISTENCY}
metadata = {'ycsb_client_vms': FLAGS.ycsb_client_vms,
'num_vms': len(cassandra_vms),
'concurrent_reads': FLAGS.cassandra_concurrent_reads,
'replication_factor': FLAGS.cassandra_replication_factor}
if not FLAGS.ycsb_client_vms:
metadata['ycsb_client_on_server'] = True
samples = list(benchmark_spec.executor.LoadAndRun(
loaders, load_kwargs=kwargs, run_kwargs=kwargs))
for sample in samples:
sample.metadata.update(metadata)
return samples
def Cleanup(benchmark_spec):
"""Cleanup.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
cassandra_vms = _GetVMsByRole(benchmark_spec)['cassandra_vms']
vm_util.RunThreaded(cassandra.Stop, cassandra_vms)
vm_util.RunThreaded(cassandra.CleanNode, cassandra_vms)
|
import asyncio
import os
import pytest
from redbot import _update_event_loop_policy
from redbot.core import drivers, data_manager
_update_event_loop_policy()
@pytest.fixture(scope="session")
def event_loop(request):
"""Create an instance of the default event loop for entire session."""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
yield loop
asyncio.set_event_loop(None)
loop.close()
def _get_backend_type():
if os.getenv("RED_STORAGE_TYPE") == "postgres":
return drivers.BackendType.POSTGRES
else:
return drivers.BackendType.JSON
@pytest.fixture(scope="session", autouse=True)
async def _setup_driver():
backend_type = _get_backend_type()
storage_details = {}
data_manager.storage_type = lambda: backend_type.value
data_manager.storage_details = lambda: storage_details
driver_cls = drivers.get_driver_class(backend_type)
await driver_cls.initialize(**storage_details)
yield
await driver_cls.teardown()
|
import numpy as np
import spacy
from sklearn.linear_model import LogisticRegression
from scattertext import SampleCorpora, produce_scattertext_explorer
from scattertext.CorpusFromPandas import CorpusFromPandas
from scattertext.Scalers import scale
nlp = spacy.load('en')
convention_df = SampleCorpora.ConventionData2012.get_data()
corpus = CorpusFromPandas(convention_df,
category_col='party',
text_col='text',
nlp=nlp).build()
term_freq_df = corpus.get_term_freq_df()
def zero_centered_scale(ar):
ar[ar > 0] = scale(ar[ar > 0])
ar[ar < 0] = -scale(-ar[ar < 0])
return (ar + 1) / 2.
frequencies_scaled = scale(np.log(term_freq_df.sum(axis=1).values))
scores = corpus.get_logreg_coefs('democrat',
LogisticRegression(penalty='l2', C=10, max_iter=10000, n_jobs=-1))
scores_scaled = zero_centered_scale(scores)
html = produce_scattertext_explorer(corpus,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
minimum_term_frequency=5,
pmi_threshold_coefficient=4,
width_in_pixels=1000,
x_coords=frequencies_scaled,
y_coords=scores_scaled,
scores=scores,
sort_by_dist=False,
metadata=convention_df['speaker'],
x_label='Log frequency',
y_label='L2-penalized logistic regression coef')
fn = 'demo_custom_coordinates.html'
open(fn, 'wb').write(html.encode('utf-8'))
print('Open %s in Chrome or Firefox.' % fn)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.linux_packages import memcached_server
from perfkitbenchmarker.linux_packages import memtier
FLAGS = flags.FLAGS
flags.DEFINE_string('memcached_memtier_client_machine_type', None,
'Machine type to use for the memtier client if different '
'from memcached server machine type.')
flags.DEFINE_string('memcached_memtier_server_machine_type', None,
'Machine type to use for the memtier server if different '
'from memcached client machine type.')
BENCHMARK_NAME = 'memcached_memtier'
BENCHMARK_CONFIG = """
memcached_memtier:
description: Run memtier against a memcached installation.
vm_groups:
server:
vm_spec: *default_single_core
vm_count: 1
client:
vm_spec: *default_dual_core
vm_count: 1
"""
def GetConfig(user_config):
"""Load and return benchmark config.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS.memcached_memtier_client_machine_type:
vm_spec = config['vm_groups']['client']['vm_spec']
for cloud in vm_spec:
vm_spec[cloud]['machine_type'] = (
FLAGS.memcached_memtier_client_machine_type)
if FLAGS.memcached_memtier_server_machine_type:
vm_spec = config['vm_groups']['server']['vm_spec']
for cloud in vm_spec:
vm_spec[cloud]['machine_type'] = (
FLAGS.memcached_memtier_server_machine_type)
return config
def _InstallMemtier(vm):
vm.Install('memtier')
def _InstallMemcached(vm):
vm.Install('memcached_server')
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run memtier against memcached.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
client = benchmark_spec.vm_groups['client'][0]
server = benchmark_spec.vm_groups['server'][0]
_InstallMemtier(client)
_InstallMemcached(server)
memcached_server.ConfigureAndStart(server)
memtier.Load(client, server.internal_ip, memcached_server.MEMCACHED_PORT)
def Run(benchmark_spec):
"""Runs memtier against memcached and gathers the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
client = benchmark_spec.vm_groups['client'][0]
server = benchmark_spec.vm_groups['server'][0]
server_ip = server.internal_ip
metadata = {'memcached_version': memcached_server.GetVersion(server),
'memcached_server_size': FLAGS.memcached_size_mb,
'memcached_server_threads': FLAGS.memcached_num_threads}
samples = memtier.RunOverAllThreadsAndPipelines(
client, server_ip, memcached_server.MEMCACHED_PORT)
for sample in samples:
sample.metadata.update(metadata)
return samples
def Cleanup(unused_benchmark_spec):
pass
|
from datetime import timedelta
import logging
import async_timeout
from pyowm.exceptions.api_call_error import APICallError
from pyowm.exceptions.api_response_error import UnauthorizedError
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
)
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_API_FORECAST,
ATTR_API_THIS_DAY_FORECAST,
CONDITION_CLASSES,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
FORECAST_UPDATE_INTERVAL = timedelta(minutes=30)
class ForecastUpdateCoordinator(DataUpdateCoordinator):
"""Forecast data update coordinator."""
def __init__(self, owm, latitude, longitude, forecast_mode, hass):
"""Initialize coordinator."""
self._owm_client = owm
self._forecast_mode = forecast_mode
self._latitude = latitude
self._longitude = longitude
self._forecast_limit = 15
super().__init__(
hass, _LOGGER, name=DOMAIN, update_interval=FORECAST_UPDATE_INTERVAL
)
async def _async_update_data(self):
data = {}
with async_timeout.timeout(20):
try:
forecast_response = await self._get_owm_forecast()
data = self._convert_forecast_response(forecast_response)
except (APICallError, UnauthorizedError) as error:
raise UpdateFailed(error) from error
return data
async def _get_owm_forecast(self):
if self._forecast_mode == "daily":
forecast_response = await self.hass.async_add_executor_job(
self._owm_client.daily_forecast_at_coords,
self._latitude,
self._longitude,
self._forecast_limit,
)
else:
forecast_response = await self.hass.async_add_executor_job(
self._owm_client.three_hours_forecast_at_coords,
self._latitude,
self._longitude,
)
return forecast_response.get_forecast()
def _convert_forecast_response(self, forecast_response):
weathers = self._get_weathers(forecast_response)
forecast_entries = self._convert_forecast_entries(weathers)
return {
ATTR_API_FORECAST: forecast_entries,
ATTR_API_THIS_DAY_FORECAST: forecast_entries[0],
}
def _get_weathers(self, forecast_response):
if self._forecast_mode == "freedaily":
return forecast_response.get_weathers()[::8]
return forecast_response.get_weathers()
def _convert_forecast_entries(self, entries):
if self._forecast_mode == "daily":
return list(map(self._convert_daily_forecast, entries))
return list(map(self._convert_forecast, entries))
def _convert_daily_forecast(self, entry):
return {
ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000,
ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("day"),
ATTR_FORECAST_TEMP_LOW: entry.get_temperature("celsius").get("night"),
ATTR_FORECAST_PRECIPITATION: self._calc_daily_precipitation(
entry.get_rain().get("all"), entry.get_snow().get("all")
),
ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"),
ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"),
ATTR_FORECAST_CONDITION: self._get_condition(entry.get_weather_code()),
}
def _convert_forecast(self, entry):
return {
ATTR_FORECAST_TIME: entry.get_reference_time("unix") * 1000,
ATTR_FORECAST_TEMP: entry.get_temperature("celsius").get("temp"),
ATTR_FORECAST_PRECIPITATION: self._calc_precipitation(entry),
ATTR_FORECAST_WIND_SPEED: entry.get_wind().get("speed"),
ATTR_FORECAST_WIND_BEARING: entry.get_wind().get("deg"),
ATTR_FORECAST_CONDITION: self._get_condition(entry.get_weather_code()),
}
@staticmethod
def _calc_daily_precipitation(rain, snow):
"""Calculate the precipitation."""
rain_value = 0 if rain is None else rain
snow_value = 0 if snow is None else snow
if round(rain_value + snow_value, 1) == 0:
return None
return round(rain_value + snow_value, 1)
@staticmethod
def _calc_precipitation(entry):
return (
round(entry.get_rain().get("1h"), 1)
if entry.get_rain().get("1h") is not None
and (round(entry.get_rain().get("1h"), 1) > 0)
else None
)
@staticmethod
def _get_condition(weather_code):
return [k for k, v in CONDITION_CLASSES.items() if weather_code in v][0]
|
import socket
import struct
import select
import sys
from datetime import datetime
from copy import deepcopy
if sys.version_info.major == 2:
# Python 2.7 io.StringIO does not like unicode
from StringIO import StringIO
else:
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
DEFAULT_PORT = 25826
"""Default port"""
DEFAULT_IPv4_GROUP = "239.192.74.66"
"""Default IPv4 multicast group"""
DEFAULT_IPv6_GROUP = "ff18::efc0:4a42"
"""Default IPv6 multicast group"""
HR_TIME_DIV = (2.0 ** 30)
# Message kinds
TYPE_HOST = 0x0000
TYPE_TIME = 0x0001
TYPE_TIME_HR = 0x0008
TYPE_PLUGIN = 0x0002
TYPE_PLUGIN_INSTANCE = 0x0003
TYPE_TYPE = 0x0004
TYPE_TYPE_INSTANCE = 0x0005
TYPE_VALUES = 0x0006
TYPE_INTERVAL = 0x0007
TYPE_INTERVAL_HR = 0x0009
# For notifications
TYPE_MESSAGE = 0x0100
TYPE_SEVERITY = 0x0101
# DS kinds
DS_TYPE_COUNTER = 0
DS_TYPE_GAUGE = 1
DS_TYPE_DERIVE = 2
DS_TYPE_ABSOLUTE = 3
if hasattr(struct, 'Struct'):
header = struct.Struct("!2H")
number = struct.Struct("!Q")
short = struct.Struct("!H")
double = struct.Struct("<d")
def decode_network_values(ptype, plen, buf):
"""Decodes a list of DS values in collectd network format
"""
nvalues = short.unpack_from(buf, header.size)[0]
off = header.size + short.size + nvalues
valskip = double.size
# Check whether our expected packet size is the reported one
assert ((valskip + 1) * nvalues + short.size + header.size) == plen
assert double.size == number.size
result = []
for dstype in [ord(x) for x in buf[header.size + short.size:off]]:
if dstype == DS_TYPE_COUNTER:
result.append((dstype, number.unpack_from(buf, off)[0]))
off += valskip
elif dstype == DS_TYPE_GAUGE:
result.append((dstype, double.unpack_from(buf, off)[0]))
off += valskip
elif dstype == DS_TYPE_DERIVE:
result.append((dstype, number.unpack_from(buf, off)[0]))
off += valskip
elif dstype == DS_TYPE_ABSOLUTE:
result.append((dstype, number.unpack_from(buf, off)[0]))
off += valskip
else:
raise ValueError("DS type %i unsupported" % dstype)
return result
def decode_network_number(ptype, plen, buf):
"""Decodes a number (64-bit unsigned) from collectd network format.
"""
return number.unpack_from(buf, header.size)[0]
def decode_network_string(msgtype, plen, buf):
"""Decodes a string from collectd network format.
"""
return buf[header.size:plen - 1]
# Mapping of message types to decoding functions.
_decoders = {
TYPE_VALUES: decode_network_values,
TYPE_TIME: decode_network_number,
TYPE_TIME_HR: decode_network_number,
TYPE_INTERVAL: decode_network_number,
TYPE_INTERVAL_HR: decode_network_number,
TYPE_HOST: decode_network_string,
TYPE_PLUGIN: decode_network_string,
TYPE_PLUGIN_INSTANCE: decode_network_string,
TYPE_TYPE: decode_network_string,
TYPE_TYPE_INSTANCE: decode_network_string,
TYPE_MESSAGE: decode_network_string,
TYPE_SEVERITY: decode_network_number,
}
def decode_network_packet(buf):
"""Decodes a network packet in collectd format.
"""
off = 0
blen = len(buf)
while off < blen:
ptype, plen = header.unpack_from(buf, off)
if plen > blen - off:
raise ValueError("Packet longer than amount of data in buffer")
if ptype not in _decoders:
raise ValueError("Message type %i not recognized" % ptype)
yield ptype, _decoders[ptype](ptype, plen, buf[off:])
off += plen
class Data(object):
time = 0
host = None
plugin = None
plugininstance = None
type = None
typeinstance = None
def __init__(self, **kw):
[setattr(self, k, v) for k, v in kw.items()]
@property
def datetime(self):
return datetime.fromtimestamp(self.time)
@property
def source(self):
buf = StringIO()
if self.host:
buf.write(str(self.host))
if self.plugin:
buf.write("/")
buf.write(str(self.plugin))
if self.plugininstance:
buf.write("/")
buf.write(str(self.plugininstance))
if self.type:
buf.write("/")
buf.write(str(self.type))
if self.typeinstance:
buf.write("/")
buf.write(str(self.typeinstance))
return buf.getvalue()
def __str__(self):
return "[%i] %s" % (self.time, self.source)
class Notification(Data):
FAILURE = 1
WARNING = 2
OKAY = 4
SEVERITY = {
FAILURE: "FAILURE",
WARNING: "WARNING",
OKAY: "OKAY",
}
__severity = 0
message = ""
def __set_severity(self, value):
if value in (self.FAILURE, self.WARNING, self.OKAY):
self.__severity = value
severity = property(lambda self: self.__severity, __set_severity)
@property
def severitystring(self):
return self.SEVERITY.get(self.severity, "UNKNOWN")
def __str__(self):
return "%s [%s] %s" % (
super(Notification, self).__str__(),
self.severitystring,
self.message)
class Values(Data, list):
def __str__(self):
return "%s %s" % (Data.__str__(self), list.__str__(self))
def interpret_opcodes(iterable):
vl = Values()
nt = Notification()
for kind, data in iterable:
if kind == TYPE_TIME:
vl.time = nt.time = data
elif kind == TYPE_TIME_HR:
vl.time = nt.time = data / HR_TIME_DIV
elif kind == TYPE_INTERVAL:
vl.interval = data
elif kind == TYPE_INTERVAL_HR:
vl.interval = data / HR_TIME_DIV
elif kind == TYPE_HOST:
vl.host = nt.host = data
elif kind == TYPE_PLUGIN:
vl.plugin = nt.plugin = data
elif kind == TYPE_PLUGIN_INSTANCE:
vl.plugininstance = nt.plugininstance = data
elif kind == TYPE_TYPE:
vl.type = nt.type = data
elif kind == TYPE_TYPE_INSTANCE:
vl.typeinstance = nt.typeinstance = data
elif kind == TYPE_SEVERITY:
nt.severity = data
elif kind == TYPE_MESSAGE:
nt.message = data
yield deepcopy(nt)
elif kind == TYPE_VALUES:
vl[:] = data
yield deepcopy(vl)
class Reader(object):
"""Network reader for collectd data.
Listens on the network in a given address, which can be a multicast
group address, and handles reading data when it arrives.
"""
addr = None
host = None
port = DEFAULT_PORT
BUFFER_SIZE = 16384
def __init__(self, host=None, port=DEFAULT_PORT, multicast=False):
if host is None:
multicast = True
host = DEFAULT_IPv4_GROUP
self.host, self.port = host, port
self.ipv6 = ":" in self.host
if multicast:
hostname = None
else:
hostname = self.host
if self.ipv6:
sock_type = socket.AF_INET6
else:
sock_type = socket.AF_UNSPEC
family, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
hostname,
self.port,
sock_type,
socket.SOCK_DGRAM, 0, socket.AI_PASSIVE)[0]
self._sock = socket.socket(family, socktype, proto)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind(sockaddr)
if multicast:
if hasattr(socket, "SO_REUSEPORT"):
self._sock.setsockopt(
socket.SOL_SOCKET,
socket.SO_REUSEPORT, 1)
val = None
if family == socket.AF_INET:
assert "." in self.host
val = struct.pack("4sl",
socket.inet_aton(self.host),
socket.INADDR_ANY)
elif family == socket.AF_INET6:
raise NotImplementedError("IPv6 support not ready yet")
else:
raise ValueError("Unsupported network address family")
if self.ipv6:
sock_type = socket.IPPROTO_IPV6
else:
sock_type = socket.IPPROTO_IP
self._sock.setsockopt(
sock_type,
socket.IP_ADD_MEMBERSHIP, val)
self._sock.setsockopt(
sock_type,
socket.IP_MULTICAST_LOOP, 0)
self._readlist = [self._sock]
def receive(self, poll_interval):
"""Receives a single raw collect network packet.
"""
readable, writeable, errored = select.select(self._readlist, [], [],
poll_interval)
for s in readable:
data, addr = s.recvfrom(self.BUFFER_SIZE)
if data:
return data
return None
def decode(self, poll_interval, buf=None):
"""Decodes a given buffer or the next received packet.
"""
if buf is None:
buf = self.receive(poll_interval)
if buf is None:
return None
return decode_network_packet(buf)
def interpret(self, iterable=None, poll_interval=0.2):
"""Interprets a sequence
"""
if iterable is None:
iterable = self.decode(poll_interval)
if iterable is None:
return None
if isinstance(iterable, basestring):
iterable = self.decode(poll_interval, iterable)
return interpret_opcodes(iterable)
|
import asyncio
import logging
import os
import aionotify
from evdev import InputDevice, categorize, ecodes, list_devices
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEVICE_DESCRIPTOR = "device_descriptor"
DEVICE_ID_GROUP = "Device description"
DEVICE_NAME = "device_name"
DOMAIN = "keyboard_remote"
ICON = "mdi:remote"
KEY_CODE = "key_code"
KEY_VALUE = {"key_up": 0, "key_down": 1, "key_hold": 2}
KEYBOARD_REMOTE_COMMAND_RECEIVED = "keyboard_remote_command_received"
KEYBOARD_REMOTE_CONNECTED = "keyboard_remote_connected"
KEYBOARD_REMOTE_DISCONNECTED = "keyboard_remote_disconnected"
TYPE = "type"
EMULATE_KEY_HOLD = "emulate_key_hold"
EMULATE_KEY_HOLD_DELAY = "emulate_key_hold_delay"
EMULATE_KEY_HOLD_REPEAT = "emulate_key_hold_repeat"
DEVINPUT = "/dev/input"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Exclusive(DEVICE_DESCRIPTOR, DEVICE_ID_GROUP): cv.string,
vol.Exclusive(DEVICE_NAME, DEVICE_ID_GROUP): cv.string,
vol.Optional(TYPE, default=["key_up"]): vol.All(
cv.ensure_list, [vol.In(KEY_VALUE)]
),
vol.Optional(EMULATE_KEY_HOLD, default=False): cv.boolean,
vol.Optional(EMULATE_KEY_HOLD_DELAY, default=0.250): float,
vol.Optional(EMULATE_KEY_HOLD_REPEAT, default=0.033): float,
}
),
cv.has_at_least_one_key(DEVICE_DESCRIPTOR, DEVICE_ID_GROUP),
],
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the keyboard_remote."""
config = config.get(DOMAIN)
remote = KeyboardRemote(hass, config)
remote.setup()
return True
class KeyboardRemote:
"""Manage device connection/disconnection using inotify to asynchronously monitor."""
def __init__(self, hass, config):
"""Create handlers and setup dictionaries to keep track of them."""
self.hass = hass
self.handlers_by_name = {}
self.handlers_by_descriptor = {}
self.active_handlers_by_descriptor = {}
self.watcher = None
self.monitor_task = None
for dev_block in config:
handler = self.DeviceHandler(hass, dev_block)
descriptor = dev_block.get(DEVICE_DESCRIPTOR)
if descriptor is not None:
self.handlers_by_descriptor[descriptor] = handler
else:
name = dev_block.get(DEVICE_NAME)
self.handlers_by_name[name] = handler
def setup(self):
"""Listen for Home Assistant start and stop events."""
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, self.async_start_monitoring
)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, self.async_stop_monitoring
)
async def async_start_monitoring(self, event):
"""Start monitoring of events and devices.
Start inotify watching for events, start event monitoring for those already
connected, and start monitoring for device connection/disconnection.
"""
# start watching
self.watcher = aionotify.Watcher()
self.watcher.watch(
alias="devinput",
path=DEVINPUT,
flags=aionotify.Flags.CREATE
| aionotify.Flags.ATTRIB
| aionotify.Flags.DELETE,
)
await self.watcher.setup(self.hass.loop)
# add initial devices (do this AFTER starting watcher in order to
# avoid race conditions leading to missing device connections)
initial_start_monitoring = set()
descriptors = await self.hass.async_add_executor_job(list_devices, DEVINPUT)
for descriptor in descriptors:
dev, handler = await self.hass.async_add_executor_job(
self.get_device_handler, descriptor
)
if handler is None:
continue
self.active_handlers_by_descriptor[descriptor] = handler
initial_start_monitoring.add(handler.async_start_monitoring(dev))
if initial_start_monitoring:
await asyncio.wait(initial_start_monitoring)
self.monitor_task = self.hass.async_create_task(self.async_monitor_devices())
async def async_stop_monitoring(self, event):
"""Stop and cleanup running monitoring tasks."""
_LOGGER.debug("Cleanup on shutdown")
if self.monitor_task is not None:
if not self.monitor_task.done():
self.monitor_task.cancel()
await self.monitor_task
handler_stop_monitoring = set()
for handler in self.active_handlers_by_descriptor.values():
handler_stop_monitoring.add(handler.async_stop_monitoring())
if handler_stop_monitoring:
await asyncio.wait(handler_stop_monitoring)
def get_device_handler(self, descriptor):
"""Find the correct device handler given a descriptor (path)."""
# devices are often added and then correct permissions set after
try:
dev = InputDevice(descriptor)
except (OSError, PermissionError):
return (None, None)
handler = None
if descriptor in self.handlers_by_descriptor:
handler = self.handlers_by_descriptor[descriptor]
elif dev.name in self.handlers_by_name:
handler = self.handlers_by_name[dev.name]
else:
# check for symlinked paths matching descriptor
for test_descriptor, test_handler in self.handlers_by_descriptor.items():
if test_handler.dev is not None:
fullpath = test_handler.dev.path
else:
fullpath = os.path.realpath(test_descriptor)
if fullpath == descriptor:
handler = test_handler
return (dev, handler)
async def async_monitor_devices(self):
"""Monitor asynchronously for device connection/disconnection or permissions changes."""
try:
while True:
event = await self.watcher.get_event()
descriptor = f"{DEVINPUT}/{event.name}"
descriptor_active = descriptor in self.active_handlers_by_descriptor
if (event.flags & aionotify.Flags.DELETE) and descriptor_active:
handler = self.active_handlers_by_descriptor[descriptor]
del self.active_handlers_by_descriptor[descriptor]
await handler.async_stop_monitoring()
elif (
(event.flags & aionotify.Flags.CREATE)
or (event.flags & aionotify.Flags.ATTRIB)
) and not descriptor_active:
dev, handler = await self.hass.async_add_executor_job(
self.get_device_handler, descriptor
)
if handler is None:
continue
self.active_handlers_by_descriptor[descriptor] = handler
await handler.async_start_monitoring(dev)
except asyncio.CancelledError:
return
class DeviceHandler:
"""Manage input events using evdev with asyncio."""
def __init__(self, hass, dev_block):
"""Fill configuration data."""
self.hass = hass
key_types = dev_block.get(TYPE)
self.key_values = set()
for key_type in key_types:
self.key_values.add(KEY_VALUE[key_type])
self.emulate_key_hold = dev_block.get(EMULATE_KEY_HOLD)
self.emulate_key_hold_delay = dev_block.get(EMULATE_KEY_HOLD_DELAY)
self.emulate_key_hold_repeat = dev_block.get(EMULATE_KEY_HOLD_REPEAT)
self.monitor_task = None
self.dev = None
async def async_keyrepeat(self, path, name, code, delay, repeat):
"""Emulate keyboard delay/repeat behaviour by sending key events on a timer."""
await asyncio.sleep(delay)
while True:
self.hass.bus.async_fire(
KEYBOARD_REMOTE_COMMAND_RECEIVED,
{KEY_CODE: code, DEVICE_DESCRIPTOR: path, DEVICE_NAME: name},
)
await asyncio.sleep(repeat)
async def async_start_monitoring(self, dev):
"""Start event monitoring task and issue event."""
if self.monitor_task is None:
self.dev = dev
self.monitor_task = self.hass.async_create_task(
self.async_monitor_input(dev)
)
self.hass.bus.async_fire(
KEYBOARD_REMOTE_CONNECTED,
{DEVICE_DESCRIPTOR: dev.path, DEVICE_NAME: dev.name},
)
_LOGGER.debug("Keyboard (re-)connected, %s", dev.name)
async def async_stop_monitoring(self):
"""Stop event monitoring task and issue event."""
if self.monitor_task is not None:
try:
await self.hass.async_add_executor_job(self.dev.ungrab)
except OSError:
pass
# monitoring of the device form the event loop and closing of the
# device has to occur before cancelling the task to avoid
# triggering unhandled exceptions inside evdev coroutines
asyncio.get_event_loop().remove_reader(self.dev.fileno())
self.dev.close()
if not self.monitor_task.done():
self.monitor_task.cancel()
await self.monitor_task
self.monitor_task = None
self.hass.bus.async_fire(
KEYBOARD_REMOTE_DISCONNECTED,
{DEVICE_DESCRIPTOR: self.dev.path, DEVICE_NAME: self.dev.name},
)
_LOGGER.debug("Keyboard disconnected, %s", self.dev.name)
self.dev = None
async def async_monitor_input(self, dev):
"""Event monitoring loop.
Monitor one device for new events using evdev with asyncio,
start and stop key hold emulation tasks as needed.
"""
repeat_tasks = {}
try:
_LOGGER.debug("Start device monitoring")
await self.hass.async_add_executor_job(dev.grab)
async for event in dev.async_read_loop():
if event.type is ecodes.EV_KEY:
if event.value in self.key_values:
_LOGGER.debug(categorize(event))
self.hass.bus.async_fire(
KEYBOARD_REMOTE_COMMAND_RECEIVED,
{
KEY_CODE: event.code,
DEVICE_DESCRIPTOR: dev.path,
DEVICE_NAME: dev.name,
},
)
if (
event.value == KEY_VALUE["key_down"]
and self.emulate_key_hold
):
repeat_tasks[event.code] = self.hass.async_create_task(
self.async_keyrepeat(
dev.path,
dev.name,
event.code,
self.emulate_key_hold_delay,
self.emulate_key_hold_repeat,
)
)
elif event.value == KEY_VALUE["key_up"]:
if event.code in repeat_tasks:
repeat_tasks[event.code].cancel()
del repeat_tasks[event.code]
except (OSError, PermissionError, asyncio.CancelledError):
# cancel key repeat tasks
for task in repeat_tasks.values():
task.cancel()
if repeat_tasks:
await asyncio.wait(repeat_tasks.values())
|
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import islamic_prayer_times
from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD, DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture(name="mock_setup", autouse=True)
def mock_setup():
"""Mock entry setup."""
with patch(
"homeassistant.components.islamic_prayer_times.async_setup_entry",
return_value=True,
):
yield
async def test_flow_works(hass):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
islamic_prayer_times.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Islamic Prayer Times"
async def test_options(hass):
"""Test updating options."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Islamic Prayer Times",
data={},
options={CONF_CALC_METHOD: "isna"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_CALC_METHOD: "makkah"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_CALC_METHOD] == "makkah"
async def test_import(hass):
"""Test import step."""
result = await hass.config_entries.flow.async_init(
islamic_prayer_times.DOMAIN,
context={"source": "import"},
data={CONF_CALC_METHOD: "makkah"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Islamic Prayer Times"
assert result["data"][CONF_CALC_METHOD] == "makkah"
async def test_integration_already_configured(hass):
"""Test integration is already configured."""
entry = MockConfigEntry(
domain=DOMAIN,
data={},
options={},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
islamic_prayer_times.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
|
from requests import HTTPError
from twitch.resources import Channel, Follow, Stream, Subscription, User
from homeassistant.components import sensor
from homeassistant.const import CONF_CLIENT_ID
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
ENTITY_ID = "sensor.channel123"
CONFIG = {
sensor.DOMAIN: {
"platform": "twitch",
CONF_CLIENT_ID: "1234",
"channels": ["channel123"],
}
}
CONFIG_WITH_OAUTH = {
sensor.DOMAIN: {
"platform": "twitch",
CONF_CLIENT_ID: "1234",
"channels": ["channel123"],
"token": "9876",
}
}
USER_ID = User({"id": 123, "display_name": "channel123", "logo": "logo.png"})
STREAM_OBJECT_ONLINE = Stream(
{
"channel": {"game": "Good Game", "status": "Title"},
"preview": {"medium": "stream-medium.png"},
}
)
CHANNEL_OBJECT = Channel({"followers": 42, "views": 24})
OAUTH_USER_ID = User({"id": 987})
SUB_ACTIVE = Subscription({"created_at": "2020-01-20T21:22:42", "is_gift": False})
FOLLOW_ACTIVE = Follow({"created_at": "2020-01-20T21:22:42"})
async def test_init(hass):
"""Test initial config."""
channels = MagicMock()
channels.get_by_id.return_value = CHANNEL_OBJECT
streams = MagicMock()
streams.get_stream_by_user.return_value = None
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels = channels
twitch_mock.streams = streams
with patch(
"homeassistant.components.twitch.sensor.TwitchClient", return_value=twitch_mock
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG) is True
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.state == "offline"
assert sensor_state.name == "channel123"
assert sensor_state.attributes["icon"] == "mdi:twitch"
assert sensor_state.attributes["friendly_name"] == "channel123"
assert sensor_state.attributes["views"] == 24
assert sensor_state.attributes["followers"] == 42
async def test_offline(hass):
"""Test offline state."""
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels.get_by_id.return_value = CHANNEL_OBJECT
twitch_mock.streams.get_stream_by_user.return_value = None
with patch(
"homeassistant.components.twitch.sensor.TwitchClient",
return_value=twitch_mock,
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG) is True
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.state == "offline"
assert sensor_state.attributes["entity_picture"] == "logo.png"
async def test_streaming(hass):
"""Test streaming state."""
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels.get_by_id.return_value = CHANNEL_OBJECT
twitch_mock.streams.get_stream_by_user.return_value = STREAM_OBJECT_ONLINE
with patch(
"homeassistant.components.twitch.sensor.TwitchClient",
return_value=twitch_mock,
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG) is True
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.state == "streaming"
assert sensor_state.attributes["entity_picture"] == "stream-medium.png"
assert sensor_state.attributes["game"] == "Good Game"
assert sensor_state.attributes["title"] == "Title"
async def test_oauth_without_sub_and_follow(hass):
"""Test state with oauth."""
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels.get_by_id.return_value = CHANNEL_OBJECT
twitch_mock._oauth_token = True # A replacement for the token
twitch_mock.users.get.return_value = OAUTH_USER_ID
twitch_mock.users.check_subscribed_to_channel.side_effect = HTTPError()
twitch_mock.users.check_follows_channel.side_effect = HTTPError()
with patch(
"homeassistant.components.twitch.sensor.TwitchClient",
return_value=twitch_mock,
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG_WITH_OAUTH)
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.attributes["subscribed"] is False
assert sensor_state.attributes["following"] is False
async def test_oauth_with_sub(hass):
"""Test state with oauth and sub."""
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels.get_by_id.return_value = CHANNEL_OBJECT
twitch_mock._oauth_token = True # A replacement for the token
twitch_mock.users.get.return_value = OAUTH_USER_ID
twitch_mock.users.check_subscribed_to_channel.return_value = SUB_ACTIVE
twitch_mock.users.check_follows_channel.side_effect = HTTPError()
with patch(
"homeassistant.components.twitch.sensor.TwitchClient",
return_value=twitch_mock,
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG_WITH_OAUTH)
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.attributes["subscribed"] is True
assert sensor_state.attributes["subscribed_since"] == "2020-01-20T21:22:42"
assert sensor_state.attributes["subscription_is_gifted"] is False
assert sensor_state.attributes["following"] is False
async def test_oauth_with_follow(hass):
"""Test state with oauth and follow."""
twitch_mock = MagicMock()
twitch_mock.users.translate_usernames_to_ids.return_value = [USER_ID]
twitch_mock.channels.get_by_id.return_value = CHANNEL_OBJECT
twitch_mock._oauth_token = True # A replacement for the token
twitch_mock.users.get.return_value = OAUTH_USER_ID
twitch_mock.users.check_subscribed_to_channel.side_effect = HTTPError()
twitch_mock.users.check_follows_channel.return_value = FOLLOW_ACTIVE
with patch(
"homeassistant.components.twitch.sensor.TwitchClient",
return_value=twitch_mock,
):
assert await async_setup_component(hass, sensor.DOMAIN, CONFIG_WITH_OAUTH)
await hass.async_block_till_done()
sensor_state = hass.states.get(ENTITY_ID)
assert sensor_state.attributes["subscribed"] is False
assert sensor_state.attributes["following"] is True
assert sensor_state.attributes["following_since"] == "2020-01-20T21:22:42"
|
import asyncio
from logging import getLogger
from aiohttp.client_exceptions import ClientResponseError
import async_timeout
from kaiterra_async_client import AQIStandard, KaiterraAPIClient, Units
from homeassistant.const import CONF_API_KEY, CONF_DEVICE_ID, CONF_DEVICES, CONF_TYPE
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
AQI_LEVEL,
AQI_SCALE,
CONF_AQI_STANDARD,
CONF_PREFERRED_UNITS,
DISPATCHER_KAITERRA,
)
_LOGGER = getLogger(__name__)
POLLUTANTS = {"rpm25c": "PM2.5", "rpm10c": "PM10", "rtvoc": "TVOC", "rco2": "CO2"}
class KaiterraApiData:
"""Get data from Kaiterra API."""
def __init__(self, hass, config, session):
"""Initialize the API data object."""
api_key = config[CONF_API_KEY]
aqi_standard = config[CONF_AQI_STANDARD]
devices = config[CONF_DEVICES]
units = config[CONF_PREFERRED_UNITS]
self._hass = hass
self._api = KaiterraAPIClient(
session,
api_key=api_key,
aqi_standard=AQIStandard.from_str(aqi_standard),
preferred_units=[Units.from_str(unit) for unit in units],
)
self._devices_ids = [device[CONF_DEVICE_ID] for device in devices]
self._devices = [
f"/{device[CONF_TYPE]}s/{device[CONF_DEVICE_ID]}" for device in devices
]
self._scale = AQI_SCALE[aqi_standard]
self._level = AQI_LEVEL[aqi_standard]
self._update_listeners = []
self.data = {}
async def async_update(self) -> None:
"""Get the data from Kaiterra API."""
try:
with async_timeout.timeout(10):
data = await self._api.get_latest_sensor_readings(self._devices)
except (ClientResponseError, asyncio.TimeoutError):
_LOGGER.debug("Couldn't fetch data from Kaiterra API")
self.data = {}
async_dispatcher_send(self._hass, DISPATCHER_KAITERRA)
return
_LOGGER.debug("New data retrieved: %s", data)
try:
self.data = {}
for i, device in enumerate(data):
if not device:
self.data[self._devices_ids[i]] = {}
continue
aqi, main_pollutant = None, None
for sensor_name, sensor in device.items():
points = sensor.get("points")
if not points:
continue
point = points[0]
sensor["value"] = point.get("value")
if "aqi" not in point:
continue
sensor["aqi"] = point["aqi"]
if not aqi or aqi < point["aqi"]:
aqi = point["aqi"]
main_pollutant = POLLUTANTS.get(sensor_name)
level = None
for j in range(1, len(self._scale)):
if aqi <= self._scale[j]:
level = self._level[j - 1]
break
device["aqi"] = {"value": aqi}
device["aqi_level"] = {"value": level}
device["aqi_pollutant"] = {"value": main_pollutant}
self.data[self._devices_ids[i]] = device
except IndexError as err:
_LOGGER.error("Parsing error %s", err)
async_dispatcher_send(self._hass, DISPATCHER_KAITERRA)
|
from homeassistant.exceptions import HomeAssistantError
class HmipcException(HomeAssistantError):
"""Base class for HomematicIP Cloud exceptions."""
class HmipcConnectionError(HmipcException):
"""Unable to connect to the HomematicIP Cloud server."""
class HmipcConnectionWait(HmipcException):
"""Wait for registration to the HomematicIP Cloud server."""
class HmipcRegistrationFailed(HmipcException):
"""Registration on HomematicIP Cloud failed."""
class HmipcPressButton(HmipcException):
"""User needs to press the blue button."""
|
import urllib2
import diamond.collector
from diamond.convertor import time as time_convertor
try:
import json
except ImportError:
import simplejson as json
class PuppetDBCollector(diamond.collector.Collector):
PATHS = {
'memory':
"v2/metrics/mbean/java.lang:type=Memory",
'queue':
"v2/metrics/mbean/org.apache.activemq:BrokerName=localhost," +
"Type=Queue,Destination=com.puppetlabs.puppetdb.commands",
'processing-time':
"v2/metrics/mbean/com.puppetlabs.puppetdb.command:" +
"type=global,name=processing-time",
'processed':
"v2/metrics/mbean/com.puppetlabs.puppetdb.command:" +
"type=global,name=processed",
'retried':
"v2/metrics/mbean/com.puppetlabs.puppetdb.command:" +
"type=global,name=retried",
'discarded':
"v2/metrics/mbean/com.puppetlabs.puppetdb.command:" +
"type=global,name=discarded",
'fatal': "v2/metrics/mbean/com.puppetlabs.puppetdb.command:" +
"type=global,name=fatal",
'commands.service-time':
"v2/metrics/mbean/com.puppetlabs.puppetdb." +
"http.server:type=/v3/commands,name=service-time",
'resources.service-time':
"v2/metrics/mbean/com.puppetlabs.puppetdb." +
"http.server:type=/v3/resources,name=service-time",
'gc-time':
"v2/metrics/mbean/com.puppetlabs.puppetdb.scf.storage:" +
"type=default,name=gc-time",
'duplicate-pct':
"v2/metrics/mbean/com.puppetlabs.puppetdb.scf.storage:" +
"type=default,name=duplicate-pct",
'pct-resource-dupes':
"v2/metrics/mbean/com.puppetlabs.puppetdb.query." +
"population:type=default,name=pct-resource-dupes",
'num-nodes':
"v2/metrics/mbean/com.puppetlabs.puppetdb.query." +
"population:type=default,name=num-nodes",
'num-resources':
"v2/metrics/mbean/com.puppetlabs.puppetdb.query." +
"population:type=default,name=num-resources",
}
def get_default_config_help(self):
config_help = super(PuppetDBCollector,
self).get_default_config_help()
config_help.update({
'host': 'Hostname to collect from',
'port': 'Port number to collect from',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(PuppetDBCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 8080,
'path': 'PuppetDB',
})
return config
def fetch_metrics(self, url):
try:
url = "http://%s:%s/%s" % (
self.config['host'], int(self.config['port']), url)
response = urllib2.urlopen(url)
except Exception as e:
self.log.error('Couldn\'t connect to puppetdb: %s -> %s', url, e)
return {}
return json.load(response)
def collect(self):
rawmetrics = {}
for subnode in self.PATHS:
path = self.PATHS[subnode]
rawmetrics[subnode] = self.fetch_metrics(path)
self.publish_gauge('num_resources',
rawmetrics['num-resources']['Value'])
self.publish_gauge('catalog_duplicate_pct',
rawmetrics['duplicate-pct']['Value'])
self.publish_gauge(
'sec_command',
time_convertor.convert(
rawmetrics['processing-time']['50thPercentile'],
rawmetrics['processing-time']['LatencyUnit'],
'seconds'))
self.publish_gauge(
'resources_service_time',
time_convertor.convert(
rawmetrics['resources.service-time']['50thPercentile'],
rawmetrics['resources.service-time']['LatencyUnit'],
'seconds'))
self.publish_gauge(
'enqueueing_service_time',
time_convertor.convert(
rawmetrics['commands.service-time']['50thPercentile'],
rawmetrics['commands.service-time']['LatencyUnit'],
'seconds'))
self.publish_gauge('discarded', rawmetrics['discarded']['Count'])
self.publish_gauge('processed', rawmetrics['processed']['Count'])
self.publish_gauge('rejected', rawmetrics['fatal']['Count'])
self.publish_gauge(
'DB_Compaction',
time_convertor.convert(
rawmetrics['gc-time']['50thPercentile'],
rawmetrics['gc-time']['LatencyUnit'],
'seconds'))
self.publish_gauge('resource_duplicate_pct',
rawmetrics['pct-resource-dupes']['Value'])
self.publish_gauge('num_nodes',
rawmetrics['num-nodes']['Value'])
self.publish_counter('queue.ProducerCount',
rawmetrics['queue']['ProducerCount'])
self.publish_counter('queue.DequeueCount',
rawmetrics['queue']['DequeueCount'])
self.publish_counter('queue.ConsumerCount',
rawmetrics['queue']['ConsumerCount'])
self.publish_gauge('queue.QueueSize',
rawmetrics['queue']['QueueSize'])
self.publish_counter('queue.ExpiredCount',
rawmetrics['queue']['ExpiredCount'])
self.publish_counter('queue.EnqueueCount',
rawmetrics['queue']['EnqueueCount'])
self.publish_counter('queue.InFlightCount',
rawmetrics['queue']['InFlightCount'])
self.publish_gauge('queue.CursorPercentUsage',
rawmetrics['queue']['CursorPercentUsage'])
self.publish_gauge('queue.MemoryUsagePortion',
rawmetrics['queue']['MemoryUsagePortion'])
self.publish_gauge('memory.NonHeapMemoryUsage.used',
rawmetrics['memory']['NonHeapMemoryUsage']['used'])
self.publish_gauge(
'memory.NonHeapMemoryUsage.committed',
rawmetrics['memory']['NonHeapMemoryUsage']['committed'])
self.publish_gauge('memory.HeapMemoryUsage.used',
rawmetrics['memory']['HeapMemoryUsage']['used'])
self.publish_gauge('memory.HeapMemoryUsage.committed',
rawmetrics['memory']['HeapMemoryUsage']['committed'])
|
import logging
import os
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
FLAGS = flags.FLAGS
flags.DEFINE_string(
'aws_credentials_local_path', os.path.join('~', '.aws'),
'Path where the AWS credential files can be found on the local machine.')
flags.DEFINE_string(
'aws_credentials_remote_path', '.aws',
'Path where the AWS credential files will be written on remote machines.')
flags.DEFINE_boolean(
'aws_credentials_overwrite', False,
'When set, if an AWS credential file already exists at the destination '
'specified by --aws_credentials_remote_path, it will be overwritten during '
'AWS credential file installation.')
flags.DEFINE_string('aws_s3_region', None, 'Region for the S3 bucket')
def _GetLocalPath():
"""Gets the expanded local path of the credential files.
Returns:
string. Path to the credential files on the local machine.
"""
return os.path.expanduser(FLAGS.aws_credentials_local_path)
def GetCredentials(credentials_file_name='credentials'):
"""Gets the credentials from the local credential file.
AWS credentials file is expected to be called 'credentials'.
AWS credentials file looks like this, and ends with a newline:
[default]
aws_access_key_id = {access_key}
aws_secret_access_key = {secret_access_key}
Args:
credentials_file_name: String name of the file containing the credentials.
Returns:
A string, string tuple of access_key and secret_access_key
"""
with open(os.path.join(_GetLocalPath(), credentials_file_name)) as fp:
text = fp.read().split('\n')
return (text[1].split(' = ')[1]), (text[2].split(' = ')[1])
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
local_path = _GetLocalPath()
if not os.path.exists(local_path):
raise data.ResourceNotFound(
'AWS credential files were not found at {0}'.format(local_path))
def Install(vm):
"""Copies credential files to the specified VM.
Args:
vm: BaseVirtualMachine. VM that receives the credential files.
Raises:
errors.Error: If the file destination on the VM already exists, and the
overwrite behavior is not specified via --aws_credentials_overwrite.
"""
local_path = _GetLocalPath()
remote_path = FLAGS.aws_credentials_remote_path
overwrite = FLAGS.aws_credentials_overwrite
try:
vm.RemoteCommand('[[ ! -e {0} ]]'.format(remote_path))
except errors.VirtualMachine.RemoteCommandError:
err_msg = 'File {0} already exists on VM {1}.'.format(remote_path, vm)
if overwrite:
logging.info('%s Overwriting.', err_msg)
else:
raise errors.Error(err_msg)
remote_dir = os.path.dirname(remote_path)
if remote_dir:
vm.RemoteCommand('mkdir -p {0}'.format(remote_dir))
vm.PushFile(local_path, remote_path)
def Uninstall(vm):
"""Deletes the credential files from the specified VM.
Args:
vm: BaseVirtualMachine. VM that has the credential files.
"""
vm.RemoveFile(FLAGS.aws_credentials_remote_path)
|
from pygal import Pie
def test_donut():
"""Test a donut pie chart"""
chart = Pie(inner_radius=.3, pretty_print=True)
chart.title = 'Browser usage in February 2012 (in %)'
chart.add('IE', 19.5)
chart.add('Firefox', 36.6)
chart.add('Chrome', 36.3)
chart.add('Safari', 4.5)
chart.add('Opera', 2.3)
assert chart.render()
def test_multiseries_donut():
"""Test a donut pie chart with multiserie"""
# this just demos that the multiseries pie does not respect
# the inner_radius
chart = Pie(inner_radius=.3, pretty_print=True)
chart.title = 'Browser usage by version in February 2012 (in %)'
chart.add('IE', [5.7, 10.2, 2.6, 1])
chart.add('Firefox', [.6, 16.8, 7.4, 2.2, 1.2, 1, 1, 1.1, 4.3, 1])
chart.add('Chrome', [.3, .9, 17.1, 15.3, .6, .5, 1.6])
chart.add('Safari', [4.4, .1])
chart.add('Opera', [.1, 1.6, .1, .5])
assert chart.render()
def test_half_pie():
"""Test a half pie chart"""
pie = Pie()
pie.add('IE', 19.5)
pie.add('Firefox', 36.6)
pie.add('Chrome', 36.3)
pie.add('Safari', 4.5)
pie.add('Opera', 2.3)
half = Pie(half_pie=True)
half.add('IE', 19.5)
half.add('Firefox', 36.6)
half.add('Chrome', 36.3)
half.add('Safari', 4.5)
half.add('Opera', 2.3)
assert pie.render() != half.render()
|
import os
import os.path as op
import shutil
import sys
import numpy as np
import pytest
from pathlib import Path
import mne
from mne.datasets import testing
from mne.io.pick import pick_channels_cov
from mne.utils import (check_random_state, _check_fname, check_fname,
_check_subject, requires_mayavi, traits_test,
_check_mayavi_version, _check_info_inv, _check_option,
check_version, _check_path_like, _validate_type,
_suggest, _on_missing, requires_nibabel, _safe_input)
data_path = testing.data_path(download=False)
base_dir = op.join(data_path, 'MEG', 'sample')
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_event = op.join(base_dir, 'sample_audvis_trunc_raw-eve.fif')
fname_fwd = op.join(base_dir, 'sample_audvis_trunc-meg-vol-7-fwd.fif')
fname_mgz = op.join(data_path, 'subjects', 'sample', 'mri', 'aseg.mgz')
reject = dict(grad=4000e-13, mag=4e-12)
@testing.requires_testing_data
def test_check(tmpdir):
"""Test checking functions."""
pytest.raises(ValueError, check_random_state, 'foo')
pytest.raises(TypeError, _check_fname, 1)
_check_fname(Path('./'))
fname = str(tmpdir.join('foo'))
with open(fname, 'wb'):
pass
assert op.isfile(fname)
_check_fname(fname, overwrite='read', must_exist=True)
orig_perms = os.stat(fname).st_mode
os.chmod(fname, 0)
if not sys.platform.startswith('win'):
with pytest.raises(PermissionError, match='read permissions'):
_check_fname(fname, overwrite='read', must_exist=True)
os.chmod(fname, orig_perms)
os.remove(fname)
assert not op.isfile(fname)
pytest.raises(IOError, check_fname, 'foo', 'tets-dip.x', (), ('.fif',))
pytest.raises(ValueError, _check_subject, None, None)
pytest.raises(TypeError, _check_subject, None, 1)
pytest.raises(TypeError, _check_subject, 1, None)
# smoke tests for permitted types
check_random_state(None).choice(1)
check_random_state(0).choice(1)
check_random_state(np.random.RandomState(0)).choice(1)
if check_version('numpy', '1.17'):
check_random_state(np.random.default_rng(0)).choice(1)
# _meg.fif is a valid ending and should not raise an error
new_fname = str(
tmpdir.join(op.basename(fname_raw).replace('_raw.', '_meg.')))
shutil.copyfile(fname_raw, new_fname)
mne.io.read_raw_fif(new_fname)
@requires_mayavi
@traits_test
def test_check_mayavi():
"""Test mayavi version check."""
pytest.raises(RuntimeError, _check_mayavi_version, '100.0.0')
def _get_data():
"""Read in data used in tests."""
# read forward model
forward = mne.read_forward_solution(fname_fwd)
# read data
raw = mne.io.read_raw_fif(fname_raw, preload=True)
events = mne.read_events(fname_event)
event_id, tmin, tmax = 1, -0.1, 0.15
# decimate for speed
left_temporal_channels = mne.read_selection('Left-temporal')
picks = mne.pick_types(raw.info, meg=True,
selection=left_temporal_channels)
picks = picks[::2]
raw.pick_channels([raw.ch_names[ii] for ii in picks])
del picks
raw.info.normalize_proj() # avoid projection warnings
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
baseline=(None, 0.), preload=True, reject=reject)
noise_cov = mne.compute_covariance(epochs, tmin=None, tmax=0.)
data_cov = mne.compute_covariance(epochs, tmin=0.01, tmax=0.15)
return epochs, data_cov, noise_cov, forward
@testing.requires_testing_data
def test_check_info_inv():
"""Test checks for common channels across fwd model and cov matrices."""
epochs, data_cov, noise_cov, forward = _get_data()
# make sure same channel lists exist in data to make testing life easier
assert epochs.info['ch_names'] == data_cov.ch_names
assert epochs.info['ch_names'] == noise_cov.ch_names
# check whether bad channels get excluded from the channel selection
# info
info_bads = epochs.info.copy()
info_bads['bads'] = info_bads['ch_names'][1:3] # include two bad channels
picks = _check_info_inv(info_bads, forward, noise_cov=noise_cov)
assert [1, 2] not in picks
# covariance matrix
data_cov_bads = data_cov.copy()
data_cov_bads['bads'] = data_cov_bads.ch_names[0]
picks = _check_info_inv(epochs.info, forward, data_cov=data_cov_bads)
assert 0 not in picks
# noise covariance matrix
noise_cov_bads = noise_cov.copy()
noise_cov_bads['bads'] = noise_cov_bads.ch_names[1]
picks = _check_info_inv(epochs.info, forward, noise_cov=noise_cov_bads)
assert 1 not in picks
# test whether reference channels get deleted
info_ref = epochs.info.copy()
info_ref['chs'][0]['kind'] = 301 # pretend to have a ref channel
picks = _check_info_inv(info_ref, forward, noise_cov=noise_cov)
assert 0 not in picks
# pick channels in all inputs and make sure common set is returned
epochs.pick_channels([epochs.ch_names[ii] for ii in range(10)])
data_cov = pick_channels_cov(data_cov, include=[data_cov.ch_names[ii]
for ii in range(5, 20)])
noise_cov = pick_channels_cov(noise_cov, include=[noise_cov.ch_names[ii]
for ii in range(7, 12)])
picks = _check_info_inv(epochs.info, forward, noise_cov=noise_cov,
data_cov=data_cov)
assert list(range(7, 10)) == picks
def test_check_option():
"""Test checking the value of a parameter against a list of options."""
allowed_values = ['valid', 'good', 'ok']
# Value is allowed
assert _check_option('option', 'valid', allowed_values)
assert _check_option('option', 'good', allowed_values)
assert _check_option('option', 'ok', allowed_values)
assert _check_option('option', 'valid', ['valid'])
# Check error message for invalid value
msg = ("Invalid value for the 'option' parameter. Allowed values are "
"'valid', 'good', and 'ok', but got 'bad' instead.")
with pytest.raises(ValueError, match=msg):
assert _check_option('option', 'bad', allowed_values)
# Special error message if only one value is allowed
msg = ("Invalid value for the 'option' parameter. The only allowed value "
"is 'valid', but got 'bad' instead.")
with pytest.raises(ValueError, match=msg):
assert _check_option('option', 'bad', ['valid'])
def test_check_path_like():
"""Test _check_path_like()."""
str_path = str(base_dir)
pathlib_path = Path(base_dir)
no_path = dict(foo='bar')
assert _check_path_like(str_path) is True
assert _check_path_like(pathlib_path) is True
assert _check_path_like(no_path) is False
def test_validate_type():
"""Test _validate_type."""
_validate_type(1, 'int-like')
with pytest.raises(TypeError, match='int-like'):
_validate_type(False, 'int-like')
@requires_nibabel()
@testing.requires_testing_data
def test_suggest():
"""Test suggestions."""
names = mne.get_volume_labels_from_aseg(fname_mgz)
sug = _suggest('', names)
assert sug == '' # nothing
sug = _suggest('Left-cerebellum', names)
assert sug == " Did you mean 'Left-Cerebellum-Cortex'?"
sug = _suggest('Cerebellum-Cortex', names)
assert sug == " Did you mean one of ['Left-Cerebellum-Cortex', 'Right-Cerebellum-Cortex', 'Left-Cerebral-Cortex']?" # noqa: E501
def test_on_missing():
"""Test _on_missing."""
msg = 'test'
with pytest.raises(ValueError, match=msg):
_on_missing('raise', msg)
with pytest.warns(RuntimeWarning, match=msg):
_on_missing('warn', msg)
_on_missing('ignore', msg)
with pytest.raises(ValueError,
match='Invalid value for the \'on_missing\' parameter'):
_on_missing('foo', msg)
def _matlab_input(msg):
raise EOFError()
def test_safe_input(monkeypatch):
"""Test _safe_input."""
monkeypatch.setattr(mne.utils.check, 'input', _matlab_input)
with pytest.raises(RuntimeError, match='Could not use input'):
_safe_input('whatever', alt='nothing')
assert _safe_input('whatever', use='nothing') == 'nothing'
|
from collections import Counter
from dataclasses import dataclass
from typing import Any, Callable, Optional, Union
import aioshelly
from homeassistant.const import TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers import device_registry, entity
from . import ShellyDeviceWrapper
from .const import DATA_CONFIG_ENTRY, DOMAIN
def temperature_unit(block_info: dict) -> str:
"""Detect temperature unit."""
if block_info[aioshelly.BLOCK_VALUE_UNIT] == "F":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
def shelly_naming(self, block, entity_type: str):
"""Naming for switch and sensors."""
entity_name = self.wrapper.name
if not block:
return f"{entity_name} {self.description.name}"
channels = 0
mode = block.type + "s"
if "num_outputs" in self.wrapper.device.shelly:
channels = self.wrapper.device.shelly["num_outputs"]
if (
self.wrapper.model in ["SHSW-21", "SHSW-25"]
and self.wrapper.device.settings["mode"] == "roller"
):
channels = 1
if block.type == "emeter" and "num_emeters" in self.wrapper.device.shelly:
channels = self.wrapper.device.shelly["num_emeters"]
if channels > 1 and block.type != "device":
# Shelly EM (SHEM) with firmware v1.8.1 doesn't have "name" key; will be fixed in next firmware release
if "name" in self.wrapper.device.settings[mode][int(block.channel)]:
entity_name = self.wrapper.device.settings[mode][int(block.channel)]["name"]
else:
entity_name = None
if not entity_name:
if self.wrapper.model == "SHEM-3":
base = ord("A")
else:
base = ord("1")
entity_name = f"{self.wrapper.name} channel {chr(int(block.channel)+base)}"
if entity_type == "switch":
return entity_name
if entity_type == "sensor":
return f"{entity_name} {self.description.name}"
raise ValueError
async def async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, sensors, sensor_class
):
"""Set up entities for block attributes."""
wrapper: ShellyDeviceWrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][
config_entry.entry_id
]
blocks = []
for block in wrapper.device.blocks:
for sensor_id in block.sensor_ids:
description = sensors.get((block.type, sensor_id))
if description is None:
continue
# Filter out non-existing sensors and sensors without a value
if getattr(block, sensor_id, None) in (-1, None):
continue
blocks.append((block, sensor_id, description))
if not blocks:
return
counts = Counter([item[1] for item in blocks])
async_add_entities(
[
sensor_class(wrapper, block, sensor_id, description, counts[sensor_id])
for block, sensor_id, description in blocks
]
)
@dataclass
class BlockAttributeDescription:
"""Class to describe a sensor."""
name: str
# Callable = lambda attr_info: unit
unit: Union[None, str, Callable[[dict], str]] = None
value: Callable[[Any], Any] = lambda val: val
device_class: Optional[str] = None
default_enabled: bool = True
available: Optional[Callable[[aioshelly.Block], bool]] = None
device_state_attributes: Optional[
Callable[[aioshelly.Block], Optional[dict]]
] = None
class ShellyBlockEntity(entity.Entity):
"""Helper class to represent a block."""
def __init__(self, wrapper: ShellyDeviceWrapper, block):
"""Initialize Shelly entity."""
self.wrapper = wrapper
self.block = block
self._name = shelly_naming(self, block, "switch")
@property
def name(self):
"""Name of entity."""
return self._name
@property
def should_poll(self):
"""If device should be polled."""
return False
@property
def device_info(self):
"""Device info."""
return {
"connections": {(device_registry.CONNECTION_NETWORK_MAC, self.wrapper.mac)}
}
@property
def available(self):
"""Available."""
return self.wrapper.last_update_success
@property
def unique_id(self):
"""Return unique ID of entity."""
return f"{self.wrapper.mac}-{self.block.description}"
async def async_added_to_hass(self):
"""When entity is added to HASS."""
self.async_on_remove(self.wrapper.async_add_listener(self._update_callback))
async def async_update(self):
"""Update entity with latest info."""
await self.wrapper.async_request_refresh()
@callback
def _update_callback(self):
"""Handle device update."""
self.async_write_ha_state()
class ShellyBlockAttributeEntity(ShellyBlockEntity, entity.Entity):
"""Switch that controls a relay block on Shelly devices."""
def __init__(
self,
wrapper: ShellyDeviceWrapper,
block: aioshelly.Block,
attribute: str,
description: BlockAttributeDescription,
same_type_count: int,
) -> None:
"""Initialize sensor."""
super().__init__(wrapper, block)
self.attribute = attribute
self.description = description
self.info = block.info(attribute)
unit = self.description.unit
if callable(unit):
unit = unit(self.info)
self._unit = unit
self._unique_id = f"{super().unique_id}-{self.attribute}"
self._name = shelly_naming(self, block, "sensor")
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def name(self):
"""Name of sensor."""
return self._name
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if it should be enabled by default."""
return self.description.default_enabled
@property
def attribute_value(self):
"""Value of sensor."""
value = getattr(self.block, self.attribute)
if value is None:
return None
return self.description.value(value)
@property
def unit_of_measurement(self):
"""Return unit of sensor."""
return self._unit
@property
def device_class(self):
"""Device class of sensor."""
return self.description.device_class
@property
def available(self):
"""Available."""
available = super().available
if not available or not self.description.available:
return available
return self.description.available(self.block)
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.description.device_state_attributes is None:
return None
return self.description.device_state_attributes(self.block)
|
from weblate.trans.management.commands import WeblateLangCommand
class Command(WeblateLangCommand):
help = "updates checks for units"
def handle(self, *args, **options):
translations = {}
for unit in self.iterate_units(*args, **options):
unit.run_checks()
if unit.translation.id not in translations:
translations[unit.translation.id] = unit.translation
for translation in translations.values():
translation.invalidate_cache()
|
import hangups
from common import run_example
async def sync_recent_conversations(client, _):
user_list, conversation_list = (
await hangups.build_user_conversation_list(client)
)
all_users = user_list.get_all()
all_conversations = conversation_list.get_all(include_archived=True)
print('{} known users'.format(len(all_users)))
for user in all_users:
print(' {}: {}'.format(user.full_name, user.id_.gaia_id))
print('{} known conversations'.format(len(all_conversations)))
for conversation in all_conversations:
if conversation.name:
name = conversation.name
else:
name = 'Unnamed conversation ({})'.format(conversation.id_)
print(' {}'.format(name))
if __name__ == '__main__':
run_example(sync_recent_conversations)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from powerdns import PowerDNSCollector
##########################################################################
class TestPowerDNSCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PowerDNSCollector', {
'interval': 1,
'bin': 'true',
'use_sudo': False,
})
self.collector = PowerDNSCollector(config, None)
def test_import(self):
self.assertTrue(PowerDNSCollector)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_fake_data(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture(
'pdns_control-2.9.22.6-1.el6-A'
).getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {})
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('pdns_control-2.9.22.6-1.el6-B').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'corrupt-packets': 1.0,
'deferred-cache-inserts': 2.0,
'deferred-cache-lookup': 3.0,
'latency': 4.0,
'packetcache-hit': 5.0,
'packetcache-miss': 6.0,
'packetcache-size': 7.0,
'qsize-q': 8.0,
'query-cache-hit': 9.0,
'query-cache-miss': 10.0,
'recursing-answers': 11.0,
'recursing-questions': 12.0,
'servfail-packets': 13.0,
'tcp-answers': 14.0,
'tcp-queries': 15.0,
'timedout-packets': 16.0,
'udp-answers': 17.0,
'udp-queries': 18.0,
'udp4-answers': 19.0,
'udp4-queries': 20.0,
'udp6-answers': 21.0,
'udp6-queries': 22.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import socket
try:
import json
except ImportError:
import simplejson as json
import diamond.collector
from diamond.collector import str_to_bool
DOTS_TO_UNDERS = {ord(u'.'): u'_'}
class PostfixCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(PostfixCollector,
self).get_default_config_help()
config_help.update({
'host': 'Hostname to connect to',
'port': 'Port to connect to',
'include_clients': 'Include client connection stats',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(PostfixCollector, self).get_default_config()
config.update({
'path': 'postfix',
'host': 'localhost',
'port': 7777,
'include_clients': True,
})
return config
def get_json(self):
json_string = ''
address = (self.config['host'], int(self.config['port']))
s = None
try:
try:
s = socket.create_connection(address, timeout=1)
s.sendall('stats\n')
while 1:
data = s.recv(4096)
if not data:
break
json_string += data
except socket.error:
self.log.exception("Error talking to postfix-stats")
return '{}'
finally:
if s:
s.close()
return json_string or '{}'
def get_data(self):
json_string = self.get_json()
try:
data = json.loads(json_string)
except (ValueError, TypeError):
self.log.exception("Error parsing json from postfix-stats")
return None
return data
def collect(self):
data = self.get_data()
if not data:
return
if str_to_bool(self.config['include_clients']) and u'clients' in data:
for client, value in data['clients'].iteritems():
# translate dots to underscores in client names
metric = u'.'.join(['clients',
client.translate(DOTS_TO_UNDERS)])
dvalue = self.derivative(metric, value)
self.publish(metric, dvalue, precision=4)
for action in (u'in', u'recv', u'send'):
if action not in data:
continue
for sect, stats in data[action].iteritems():
for status, value in stats.iteritems():
metric = '.'.join([action,
sect,
status.translate(DOTS_TO_UNDERS)])
dvalue = self.derivative(metric, value)
self.publish(metric, dvalue, precision=4)
if u'local' in data:
for key, value in data[u'local'].iteritems():
metric = '.'.join(['local', key])
dvalue = self.derivative(metric, value)
self.publish(metric, dvalue, precision=4)
|
import numpy as np
from scipy.sparse import csr_matrix, lil_matrix
class CombineDocsIntoDomains(object):
def __init__(self, term_doc_matrix):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
'''
self.term_doc_matrix = term_doc_matrix
def get_new_term_doc_mat(self, doc_domains, non_text=False):
'''
Combines documents together that are in the same domain
Parameters
----------
doc_domains : array-like
Returns
-------
scipy.sparse.csr_matrix
'''
assert len(doc_domains) == self.term_doc_matrix.get_num_docs()
doc_domain_set = set(doc_domains)
num_terms = self.term_doc_matrix.get_num_metadata() if non_text else self.term_doc_matrix.get_num_terms()
num_domains = len(doc_domain_set)
domain_mat = lil_matrix((num_domains, num_terms), dtype=int)
X = self.term_doc_matrix.get_metadata_doc_mat() if non_text else self.term_doc_matrix.get_term_doc_mat()
for i, domain in enumerate(doc_domain_set):
domain_mat[i, :] = X[np.array(doc_domains == domain)].sum(axis=0)
return domain_mat.tocsr()
|
from datetime import timedelta
import arrow
from cryptography import x509
from flask import current_app
from idna.core import InvalidCodepoint
from sqlalchemy import (
event,
Integer,
ForeignKey,
String,
DefaultClause,
func,
Column,
Text,
Boolean,
Index,
)
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from sqlalchemy.sql.expression import case, extract
from sqlalchemy_utils.types.arrow import ArrowType
from werkzeug.utils import cached_property
from lemur.common import defaults, utils, validators
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.database import db
from lemur.domains.models import Domain
from lemur.extensions import metrics
from lemur.extensions import sentry
from lemur.models import (
certificate_associations,
certificate_source_associations,
certificate_destination_associations,
certificate_notification_associations,
certificate_replacement_associations,
roles_certificates,
pending_cert_replacement_associations,
)
from lemur.plugins.base import plugins
from lemur.policies.models import RotationPolicy
from lemur.utils import Vault
def get_sequence(name):
if "-" not in name:
return name, None
parts = name.split("-")
# see if we have an int at the end of our name
try:
seq = int(parts[-1])
except ValueError:
return name, None
# we might have a date at the end of our name
if len(parts[-1]) == 8:
return name, None
root = "-".join(parts[:-1])
return root, seq
def get_or_increase_name(name, serial):
certificates = Certificate.query.filter(Certificate.name == name).all()
if not certificates:
return name
serial_name = "{0}-{1}".format(name, hex(int(serial))[2:].upper())
certificates = Certificate.query.filter(Certificate.name == serial_name).all()
if not certificates:
return serial_name
certificates = Certificate.query.filter(
Certificate.name.ilike("{0}%".format(serial_name))
).all()
ends = [0]
root, end = get_sequence(serial_name)
for cert in certificates:
root, end = get_sequence(cert.name)
if end:
ends.append(end)
return "{0}-{1}".format(root, max(ends) + 1)
class Certificate(db.Model):
__tablename__ = "certificates"
__table_args__ = (
Index(
"ix_certificates_cn",
"cn",
postgresql_ops={"cn": "gin_trgm_ops"},
postgresql_using="gin",
),
Index(
"ix_certificates_name",
"name",
postgresql_ops={"name": "gin_trgm_ops"},
postgresql_using="gin",
),
)
id = Column(Integer, primary_key=True)
ix = Index(
"ix_certificates_id_desc", id.desc(), postgresql_using="btree", unique=True
)
external_id = Column(String(128))
owner = Column(String(128), nullable=False)
name = Column(String(256), unique=True)
description = Column(String(1024))
notify = Column(Boolean, default=True)
body = Column(Text(), nullable=False)
chain = Column(Text())
csr = Column(Text())
private_key = Column(Vault)
issuer = Column(String(128))
serial = Column(String(128))
cn = Column(String(128))
deleted = Column(Boolean, index=True, default=False)
dns_provider_id = Column(
Integer(), ForeignKey("dns_providers.id", ondelete="CASCADE"), nullable=True
)
not_before = Column(ArrowType)
not_after = Column(ArrowType)
not_after_ix = Index("ix_certificates_not_after", not_after.desc())
date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
signing_algorithm = Column(String(128))
status = Column(String(128))
bits = Column(Integer())
san = Column(String(1024)) # TODO this should be migrated to boolean
rotation = Column(Boolean, default=False)
user_id = Column(Integer, ForeignKey("users.id"))
authority_id = Column(Integer, ForeignKey("authorities.id", ondelete="CASCADE"))
root_authority_id = Column(
Integer, ForeignKey("authorities.id", ondelete="CASCADE")
)
rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id"))
key_type = Column(String(128))
notifications = relationship(
"Notification",
secondary=certificate_notification_associations,
backref="certificate",
)
destinations = relationship(
"Destination",
secondary=certificate_destination_associations,
backref="certificate",
)
sources = relationship(
"Source", secondary=certificate_source_associations, backref="certificate"
)
domains = relationship(
"Domain", secondary=certificate_associations, backref="certificate"
)
roles = relationship("Role", secondary=roles_certificates, backref="certificate")
replaces = relationship(
"Certificate",
secondary=certificate_replacement_associations,
primaryjoin=id == certificate_replacement_associations.c.certificate_id, # noqa
secondaryjoin=id
== certificate_replacement_associations.c.replaced_certificate_id, # noqa
backref="replaced",
)
replaced_by_pending = relationship(
"PendingCertificate",
secondary=pending_cert_replacement_associations,
backref="pending_replace",
)
logs = relationship("Log", backref="certificate")
endpoints = relationship("Endpoint", backref="certificate")
rotation_policy = relationship("RotationPolicy")
sensitive_fields = ("private_key",)
def __init__(self, **kwargs):
self.body = kwargs["body"].strip()
cert = self.parsed_cert
self.issuer = defaults.issuer(cert)
self.cn = defaults.common_name(cert)
self.san = defaults.san(cert)
self.not_before = defaults.not_before(cert)
self.not_after = defaults.not_after(cert)
self.serial = defaults.serial(cert)
# when destinations are appended they require a valid name.
if kwargs.get("name"):
self.name = get_or_increase_name(
defaults.text_to_slug(kwargs["name"]), self.serial
)
else:
self.name = get_or_increase_name(
defaults.certificate_name(
self.cn, self.issuer, self.not_before, self.not_after, self.san
),
self.serial,
)
self.owner = kwargs["owner"]
if kwargs.get("private_key"):
self.private_key = kwargs["private_key"].strip()
if kwargs.get("chain"):
self.chain = kwargs["chain"].strip()
if kwargs.get("csr"):
self.csr = kwargs["csr"].strip()
self.notify = kwargs.get("notify", True)
self.destinations = kwargs.get("destinations", [])
self.notifications = kwargs.get("notifications", [])
self.description = kwargs.get("description")
self.roles = list(set(kwargs.get("roles", [])))
self.replaces = kwargs.get("replaces", [])
self.rotation = kwargs.get("rotation")
self.rotation_policy = kwargs.get("rotation_policy")
self.key_type = kwargs.get("key_type")
self.signing_algorithm = defaults.signing_algorithm(cert)
self.bits = defaults.bitstrength(cert)
self.external_id = kwargs.get("external_id")
self.authority_id = kwargs.get("authority_id")
self.dns_provider_id = kwargs.get("dns_provider_id")
for domain in defaults.domains(cert):
self.domains.append(Domain(name=domain))
# Check integrity before saving anything into the database.
# For user-facing API calls, validation should also be done in schema validators.
self.check_integrity()
def check_integrity(self):
"""
Integrity checks: Does the cert have a valid chain and matching private key?
"""
if self.private_key:
validators.verify_private_key_match(
utils.parse_private_key(self.private_key),
self.parsed_cert,
error_class=AssertionError,
)
if self.chain:
chain = [self.parsed_cert] + utils.parse_cert_chain(self.chain)
validators.verify_cert_chain(chain, error_class=AssertionError)
@cached_property
def parsed_cert(self):
assert self.body, "Certificate body not set"
return utils.parse_certificate(self.body)
@property
def active(self):
return self.notify
@property
def organization(self):
return defaults.organization(self.parsed_cert)
@property
def organizational_unit(self):
return defaults.organizational_unit(self.parsed_cert)
@property
def country(self):
return defaults.country(self.parsed_cert)
@property
def state(self):
return defaults.state(self.parsed_cert)
@property
def location(self):
return defaults.location(self.parsed_cert)
@property
def distinguished_name(self):
return self.parsed_cert.subject.rfc4514_string()
"""
# Commenting this property as key_type is now added as a column. This code can be removed in future.
@property
def key_type(self):
if isinstance(self.parsed_cert.public_key(), rsa.RSAPublicKey):
return "RSA{key_size}".format(
key_size=self.parsed_cert.public_key().key_size
)
elif isinstance(self.parsed_cert.public_key(), ec.EllipticCurvePublicKey):
return get_key_type_from_ec_curve(self.parsed_cert.public_key().curve.name)
"""
@property
def validity_remaining(self):
return abs(self.not_after - arrow.utcnow())
@property
def validity_range(self):
return self.not_after - self.not_before
@property
def subject(self):
return self.parsed_cert.subject
@property
def public_key(self):
return self.parsed_cert.public_key()
@hybrid_property
def expired(self):
# can't compare offset-naive and offset-aware datetimes
if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow():
return True
@expired.expression
def expired(cls):
return case([(cls.not_after <= arrow.utcnow(), True)], else_=False)
@hybrid_property
def revoked(self):
if "revoked" == self.status:
return True
@revoked.expression
def revoked(cls):
return case([(cls.status == "revoked", True)], else_=False)
@hybrid_property
def has_private_key(self):
return self.private_key is not None
@has_private_key.expression
def has_private_key(cls):
return case([(cls.private_key.is_(None), True)], else_=False)
@hybrid_property
def in_rotation_window(self):
"""
Determines if a certificate is available for rotation based
on the rotation policy associated.
:return:
"""
now = arrow.utcnow()
end = now + timedelta(days=self.rotation_policy.days)
if self.not_after <= end:
return True
@in_rotation_window.expression
def in_rotation_window(cls):
"""
Determines if a certificate is available for rotation based
on the rotation policy associated.
:return:
"""
return case(
[(extract("day", cls.not_after - func.now()) <= RotationPolicy.days, True)],
else_=False,
)
@property
def extensions(self):
# setup default values
return_extensions = {"sub_alt_names": {"names": []}}
try:
for extension in self.parsed_cert.extensions:
value = extension.value
if isinstance(value, x509.BasicConstraints):
return_extensions["basic_constraints"] = value
elif isinstance(value, x509.SubjectAlternativeName):
return_extensions["sub_alt_names"]["names"] = value
elif isinstance(value, x509.ExtendedKeyUsage):
return_extensions["extended_key_usage"] = value
elif isinstance(value, x509.KeyUsage):
return_extensions["key_usage"] = value
elif isinstance(value, x509.SubjectKeyIdentifier):
return_extensions["subject_key_identifier"] = {"include_ski": True}
elif isinstance(value, x509.AuthorityInformationAccess):
return_extensions["certificate_info_access"] = {"include_aia": True}
elif isinstance(value, x509.AuthorityKeyIdentifier):
aki = {"use_key_identifier": False, "use_authority_cert": False}
if value.key_identifier:
aki["use_key_identifier"] = True
if value.authority_cert_issuer:
aki["use_authority_cert"] = True
return_extensions["authority_key_identifier"] = aki
elif isinstance(value, x509.CRLDistributionPoints):
return_extensions["crl_distribution_points"] = {
"include_crl_dp": value
}
# TODO: Not supporting custom OIDs yet. https://github.com/Netflix/lemur/issues/665
else:
current_app.logger.warning(
"Custom OIDs not yet supported for clone operation."
)
except InvalidCodepoint as e:
sentry.captureException()
current_app.logger.warning(
"Unable to parse extensions due to underscore in dns name"
)
except ValueError as e:
sentry.captureException()
current_app.logger.warning("Unable to parse")
current_app.logger.exception(e)
return return_extensions
def __repr__(self):
return "Certificate(name={name})".format(name=self.name)
@event.listens_for(Certificate.destinations, "append")
def update_destinations(target, value, initiator):
"""
Attempt to upload certificate to the new destination
:param target:
:param value:
:param initiator:
:return:
"""
destination_plugin = plugins.get(value.plugin_name)
status = FAILURE_METRIC_STATUS
if target.expired:
return
try:
if target.private_key or not destination_plugin.requires_key:
destination_plugin.upload(
target.name,
target.body,
target.private_key,
target.chain,
value.options,
)
status = SUCCESS_METRIC_STATUS
except Exception as e:
sentry.captureException()
raise
metrics.send(
"destination_upload",
"counter",
1,
metric_tags={
"status": status,
"certificate": target.name,
"destination": value.label,
},
)
@event.listens_for(Certificate.replaces, "append")
def update_replacement(target, value, initiator):
"""
When a certificate is marked as 'replaced' we should not notify.
:param target:
:param value:
:param initiator:
:return:
"""
value.notify = False
|
from datetime import datetime, timedelta
import os
import time
import pytest
import requests
from vcr import VCR
from subliminal import __short_version__
from subliminal.video import Episode
from subliminal.refiners.tvdb import TVDBClient, refine, series_re
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
cassette_library_dir=os.path.realpath(os.path.join('tests', 'cassettes', 'tvdb')))
@pytest.fixture()
def client():
return TVDBClient('2AE5D1E42E7194B9', headers={'User-Agent': 'Subliminal/%s' % __short_version__})
def test_series_re_no_year():
groups = series_re.match('Series Name').groupdict()
assert groups['series'] == 'Series Name'
assert groups['year'] is None
def test_series_re_year_parenthesis():
groups = series_re.match('Series Name (2013)').groupdict()
assert groups['series'] == 'Series Name'
assert groups['year'] == '2013'
assert groups['country'] is None
def test_series_re_text_parenthesis():
groups = series_re.match('Series Name (Rock)').groupdict()
assert groups['series'] == 'Series Name (Rock)'
assert groups['year'] is None
assert groups['country'] is None
def test_series_re_text_unclosed_parenthesis():
groups = series_re.match('Series Name (2013').groupdict()
assert groups['series'] == 'Series Name (2013'
assert groups['year'] is None
assert groups['country'] is None
def test_series_re_country():
groups = series_re.match('Series Name (UK)').groupdict()
assert groups['series'] == 'Series Name'
assert groups['year'] is None
assert groups['country'] == 'UK'
def test_language():
client = TVDBClient()
assert 'Accept-Language' in client.session.headers
assert client.session.headers['Accept-Language'] == 'en'
assert client.language == 'en'
client.language = 'fr'
assert client.session.headers['Accept-Language'] == 'fr'
assert client.language == 'fr'
def test_session():
session = requests.Session()
client = TVDBClient(session=session)
assert client.session is session
def test_headers():
client = TVDBClient(headers={'X-Test': 'Value'})
assert 'X-Test' in client.session.headers
assert client.session.headers['X-Test'] == 'Value'
@pytest.mark.integration
@vcr.use_cassette
def test_login_error():
client = TVDBClient('1234', headers={'User-Agent': 'Subliminal/%s' % __short_version__})
with pytest.raises(requests.HTTPError):
client.login()
@pytest.mark.integration
@vcr.use_cassette
def test_login(client):
assert client.token is None
assert client.token_date <= datetime.utcnow() - timedelta(hours=1)
assert client.token_expired
client.login()
assert client.token is not None
assert client.token_date > datetime.utcnow() - timedelta(seconds=1)
assert client.token_expired is False
@pytest.mark.integration
@vcr.use_cassette
def test_token_needs_refresh(client, monkeypatch):
monkeypatch.setattr(client, 'refresh_token_every', timedelta(milliseconds=100))
assert client.token_needs_refresh
client.login()
assert not client.token_needs_refresh
time.sleep(0.5)
assert client.token_needs_refresh
@pytest.mark.integration
@vcr.use_cassette
def test_refresh_token(client):
client.login()
old_token = client.token
time.sleep(0.5)
client.refresh_token()
assert client.token != old_token
@pytest.mark.integration
@vcr.use_cassette
def test_search_series(client):
data = client.search_series('The Big Bang Theory')
assert len(data) == 1
series = data[0]
assert series['id'] == 80379
assert series['firstAired'] == '2007-09-24'
@pytest.mark.integration
@vcr.use_cassette
def test_search_series_wrong_name(client):
data = client.search_series('The Bing Bag Theory')
assert data is None
@pytest.mark.integration
@vcr.use_cassette
def test_search_series_no_parameter(client):
with pytest.raises(requests.HTTPError):
client.search_series()
@pytest.mark.integration
@vcr.use_cassette
def test_search_series_multiple_parameters(client):
with pytest.raises(requests.HTTPError):
client.search_series('The Big Bang Theory', 'tt0898266')
@pytest.mark.integration
@vcr.use_cassette
def test_get_series(client):
series = client.get_series(80379)
assert series['id'] == 80379
assert series['firstAired'] == '2007-09-24'
assert series['imdbId'] == 'tt0898266'
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_wrong_id(client):
series = client.get_series(999999999)
assert series is None
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_actors(client):
actors = client.get_series_actors(80379)
assert len(actors) == 8
assert 'Jim Parsons' in {a['name'] for a in actors}
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_actors_wrong_id(client):
actors = client.get_series_actors(999999999)
assert actors is None
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_episodes(client):
episodes_data = client.get_series_episodes(80379)
assert episodes_data['links']['first'] == 1
assert episodes_data['links']['last'] == 3
assert episodes_data['links']['next'] == 2
assert episodes_data['links']['prev'] is None
assert len(episodes_data['data']) == 100
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_episodes_page(client):
episodes_data = client.get_series_episodes(80379, page=2)
assert episodes_data['links']['first'] == 1
assert episodes_data['links']['last'] == 3
assert episodes_data['links']['next'] == 3
assert episodes_data['links']['prev'] == 1
assert len(episodes_data['data']) == 100
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_episodes_wrong_id(client):
episodes_data = client.get_series_episodes(999999999)
assert episodes_data is None
@pytest.mark.integration
@vcr.use_cassette
def test_get_series_episodes_wrong_page(client):
episodes_data = client.get_series_episodes(80379, page=10)
assert episodes_data is None
@pytest.mark.integration
@vcr.use_cassette
def test_query_series_episodes(client):
episodes_data = client.query_series_episodes(80379, aired_season=7, aired_episode=5)
assert episodes_data['links']['first'] == 1
assert episodes_data['links']['last'] == 1
assert episodes_data['links']['next'] is None
assert episodes_data['links']['prev'] is None
assert len(episodes_data['data']) == 1
assert episodes_data['data'][0]['episodeName'] == 'The Workplace Proximity'
@pytest.mark.integration
@vcr.use_cassette
def test_query_series_episodes_wrong_season(client):
episodes_data = client.query_series_episodes(80379, aired_season=99)
assert episodes_data is None
@pytest.mark.integration
@vcr.use_cassette
def test_refine(episodes):
video = episodes['bbt_s07e05']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_partial(episodes):
video = episodes['csi_s15e18']
episode = Episode(video.name.lower(), video.series.lower().split(':')[0], video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_ambiguous(episodes):
video = episodes['colony_s01e09']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_ambiguous_2(episodes):
video = episodes['the_100_s03e09']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_year(episodes):
video = episodes['dallas_2012_s01e03']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode, year=video.year,
original_series=video.original_series)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_no_year(episodes):
video = episodes['dallas_s01e03']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_alternative_series(episodes):
video = episodes['turn_s04e03']
episode = Episode(video.name.lower(), video.series.lower(), video.season, video.episode)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
assert episode.alternative_series == video.alternative_series
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_with_comma(episodes):
video = episodes['alex_inc_s01e04']
episode = Episode.fromname(video.name)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
assert episode.alternative_series == video.alternative_series
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_with_country(episodes):
video = episodes['shameless_us_s08e01']
episode = Episode.fromname(video.name)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
assert episode.alternative_series == video.alternative_series
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_with_country_hoc_us(episodes):
video = episodes['house_of_cards_us_s06e01']
episode = Episode.fromname(video.name)
refine(episode)
assert episode.series == video.series
assert episode.year == video.year
assert episode.original_series == video.original_series
assert episode.title == video.title
assert episode.imdb_id == video.imdb_id
assert episode.series_imdb_id == video.series_imdb_id
assert episode.tvdb_id == video.tvdb_id
assert episode.series_tvdb_id == video.series_tvdb_id
assert episode.alternative_series == video.alternative_series
|
from copy import deepcopy
import numpy as np
from scipy import linalg
from .. import pick_channels_forward, EvokedArray, SourceEstimate
from ..io.constants import FIFF
from ..utils import logger, verbose
from ..forward.forward import convert_forward_solution
from ..minimum_norm import apply_inverse
from ..source_estimate import _prepare_label_extraction
from ..label import Label
@verbose
def make_inverse_resolution_matrix(forward, inverse_operator, method='dSPM',
lambda2=1. / 9., verbose=None):
"""Compute resolution matrix for linear inverse operator.
Parameters
----------
forward : instance of Forward
Forward Operator.
inverse_operator : instance of InverseOperator
Inverse operator.
method : 'MNE' | 'dSPM' | 'sLORETA'
Inverse method to use (MNE, dSPM, sLORETA).
lambda2 : float
The regularisation parameter.
%(verbose)s
Returns
-------
resmat: array, shape (n_orient_inv * n_dipoles, n_orient_fwd * n_dipoles)
Resolution matrix (inverse operator times forward operator).
The result of applying the inverse operator to the forward operator.
If source orientations are not fixed, all source components will be
computed (i.e. for n_orient_inv > 1 or n_orient_fwd > 1).
The columns of the resolution matrix are the point-spread functions
(PSFs) and the rows are the cross-talk functions (CTFs).
"""
# make sure forward and inverse operator match
inv = inverse_operator
fwd = _convert_forward_match_inv(forward, inv)
# don't include bad channels
# only use good channels from inverse operator
bads_inv = inv['info']['bads']
# good channels
ch_names = [c for c in inv['info']['ch_names'] if (c not in bads_inv)]
fwd = pick_channels_forward(fwd, ch_names, ordered=True)
# get leadfield matrix from forward solution
leadfield = fwd['sol']['data']
invmat = _get_matrix_from_inverse_operator(inv, fwd,
method=method, lambda2=lambda2)
resmat = invmat.dot(leadfield)
logger.info('Dimensions of resolution matrix: %d by %d.' % resmat.shape)
return resmat
@verbose
def _get_psf_ctf(resmat, src, idx, func, mode, n_comp, norm, return_pca_vars,
verbose=None):
"""Get point-spread (PSFs) or cross-talk (CTFs) functions.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : Source Space
Source space used to compute resolution matrix.
%(pctf_idx)s
func : str ('psf' | 'ctf')
Whether to produce PSFs or CTFs. Defaults to psf.
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
# check for consistencies in input parameters
_check_get_psf_ctf_params(mode, n_comp, return_pca_vars)
# backward compatibility
if norm is True:
norm = 'max'
# get relevant vertices in source space
verts_all = _vertices_for_get_psf_ctf(idx, src)
# vertices used in forward and inverse operator
vertno_lh = src[0]['vertno']
vertno_rh = src[1]['vertno']
vertno = [vertno_lh, vertno_rh]
# the following will operate on columns of funcs
if func == 'ctf':
resmat = resmat.T
# Functions and variances per label
stcs = []
pca_vars = []
for verts in verts_all:
# get relevant PSFs or CTFs for specified vertices
funcs = resmat[:, verts]
# normalise PSFs/CTFs if requested
if norm is not None:
funcs = _normalise_psf_ctf(funcs, norm)
# summarise PSFs/CTFs across vertices if requested
pca_var = None # variances computed only if return_pca_vars=True
if mode is not None:
funcs, pca_var = _summarise_psf_ctf(funcs, mode, n_comp,
return_pca_vars)
# convert to source estimate
stc = SourceEstimate(funcs, vertno, tmin=0., tstep=1.)
stcs.append(stc)
pca_vars.append(pca_var)
# if just one list or label specified, simplify output
if len(stcs) == 1:
stcs = stc
if len(pca_vars) == 1:
pca_vars = pca_var
if pca_var is not None:
return stcs, pca_vars
else:
return stcs
def _check_get_psf_ctf_params(mode, n_comp, return_pca_vars):
"""Check input parameters of _get_psf_ctf() for consistency."""
if mode in [None, 'sum', 'mean'] and n_comp > 1:
msg = 'n_comp must be 1 for mode=%s.' % mode
raise ValueError(msg)
if mode != 'pca' and return_pca_vars:
msg = 'SVD variances can only be returned if mode=''pca''.'
raise ValueError(msg)
def _vertices_for_get_psf_ctf(idx, src):
"""Get vertices in source space for PSFs/CTFs in _get_psf_ctf()."""
# idx must be list
# if label(s) specified get the indices, otherwise just carry on
if type(idx[0]) is Label:
# specify without source time courses, gets indices per label
verts_labs, _ = _prepare_label_extraction(
stc=None, labels=idx, src=src, mode='mean', allow_empty=False,
use_sparse=False)
# verts_labs can be list of lists
# concatenate indices per label across hemispheres
# one list item per label
verts = []
for v in verts_labs:
# if two hemispheres present
if type(v) is list:
# indices for both hemispheres in one list
this_verts = np.concatenate((v[0], v[1]))
else:
this_verts = np.array(v)
verts.append(this_verts)
# check if list of list or just list
else:
if type(idx[0]) is list: # if list of list of integers
verts = idx
else: # if list of integers
verts = [idx]
return verts
def _normalise_psf_ctf(funcs, norm):
"""Normalise PSFs/CTFs in _get_psf_ctf()."""
# normalise PSFs/CTFs if specified
if norm == 'max':
maxval = max(-funcs.min(), funcs.max())
funcs = funcs / maxval
elif norm == 'norm': # normalise to maximum norm across columns
norms = np.linalg.norm(funcs, axis=0)
funcs = funcs / norms.max()
return funcs
def _summarise_psf_ctf(funcs, mode, n_comp, return_pca_vars):
"""Summarise PSFs/CTFs across vertices."""
s_var = None # only computed for return_pca_vars=True
if mode == 'maxval': # pick PSF/CTF with maximum absolute value
absvals = np.maximum(-np.min(funcs, axis=0), np.max(funcs, axis=0))
if n_comp > 1: # only keep requested number of sorted PSFs/CTFs
sortidx = np.argsort(absvals)
maxidx = sortidx[-n_comp:]
else: # faster if only one required
maxidx = absvals.argmax()
funcs = funcs[:, maxidx]
elif mode == 'maxnorm': # pick PSF/CTF with maximum norm
norms = np.linalg.norm(funcs, axis=0)
if n_comp > 1: # only keep requested number of sorted PSFs/CTFs
sortidx = np.argsort(norms)
maxidx = sortidx[-n_comp:]
else: # faster if only one required
maxidx = norms.argmax()
funcs = funcs[:, maxidx]
elif mode == 'sum': # sum across PSFs/CTFs
funcs = np.sum(funcs, axis=1)
elif mode == 'mean': # mean of PSFs/CTFs
funcs = np.mean(funcs, axis=1)
elif mode == 'pca': # SVD across PSFs/CTFs
# compute SVD of PSFs/CTFs across vertices
u, s, _ = linalg.svd(funcs, full_matrices=False, compute_uv=True)
funcs = u[:, :n_comp]
# if explained variances for SVD components requested
if return_pca_vars:
# explained variance of individual SVD components
s2 = s * s
s_var = 100 * s2[:n_comp] / s2.sum()
return funcs, s_var
@verbose
def get_point_spread(resmat, src, idx, mode=None, n_comp=1, norm=False,
return_pca_vars=False, verbose=None):
"""Get point-spread (PSFs) functions for vertices.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : instance of SourceSpaces
Source space used to compute resolution matrix.
%(pctf_idx)s
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
return _get_psf_ctf(resmat, src, idx, func='psf', mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=return_pca_vars)
@verbose
def get_cross_talk(resmat, src, idx, mode=None, n_comp=1, norm=False,
return_pca_vars=False, verbose=None):
"""Get cross-talk (CTFs) function for vertices.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : instance of SourceSpaces
Source space used to compute resolution matrix.
%(pctf_idx)s
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
return _get_psf_ctf(resmat, src, idx, func='ctf', mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=return_pca_vars)
def _convert_forward_match_inv(fwd, inv):
"""Ensure forward and inverse operators match.
Inverse operator and forward operator must have same surface orientations,
but can have different source orientation constraints.
"""
# did inverse operator use fixed orientation?
is_fixed_inv = _check_fixed_ori(inv)
# did forward operator use fixed orientation?
is_fixed_fwd = _check_fixed_ori(fwd)
# if inv or fwd fixed: do nothing
# if inv loose: surf_ori must be True
# if inv free: surf_ori must be False
if not is_fixed_inv and not is_fixed_fwd:
is_loose_inv = not (inv['orient_prior']['data'] == 1.).all()
if is_loose_inv:
if not fwd['surf_ori']:
fwd = convert_forward_solution(fwd, surf_ori=True)
elif fwd['surf_ori']: # free orientation, change fwd
fwd = convert_forward_solution(fwd, surf_ori=False)
return fwd
def _prepare_info(inverse_operator):
"""Get a usable dict."""
# in order to convert sub-leadfield matrix to evoked data type (pretending
# it's an epoch, see in loop below), uses 'info' from inverse solution
# because this has all the correct projector information
info = deepcopy(inverse_operator['info'])
info['sfreq'] = 1000. # necessary
info['projs'] = inverse_operator['projs']
return info
def _get_matrix_from_inverse_operator(inverse_operator, forward, method='dSPM',
lambda2=1. / 9.):
"""Get inverse matrix from an inverse operator.
Currently works only for fixed/loose orientation constraints
For loose orientation constraint, the CTFs are computed for the normal
component (pick_ori='normal').
Parameters
----------
inverse_operator : instance of InverseOperator
The inverse operator.
forward : instance of Forward
The forward operator.
method : 'MNE' | 'dSPM' | 'sLORETA'
Inverse methods (for apply_inverse).
lambda2 : float
The regularization parameter (for apply_inverse).
Returns
-------
invmat : array, shape (n_dipoles, n_channels)
Inverse matrix associated with inverse operator and specified
parameters.
"""
# make sure forward and inverse operators match with respect to
# surface orientation
_convert_forward_match_inv(forward, inverse_operator)
info_inv = _prepare_info(inverse_operator)
# only use channels that are good for inverse operator and forward sol
ch_names_inv = info_inv['ch_names']
n_chs_inv = len(ch_names_inv)
bads_inv = inverse_operator['info']['bads']
# indices of bad channels
ch_idx_bads = [ch_names_inv.index(ch) for ch in bads_inv]
# create identity matrix as input for inverse operator
# set elements to zero for non-selected channels
id_mat = np.eye(n_chs_inv)
# convert identity matrix to evoked data type (pretending it's an epoch)
ev_id = EvokedArray(id_mat, info=info_inv, tmin=0.)
# apply inverse operator to identity matrix in order to get inverse matrix
# free orientation constraint not possible because apply_inverse would
# combine components
# check if inverse operator uses fixed source orientations
is_fixed_inv = _check_fixed_ori(inverse_operator)
# choose pick_ori according to inverse operator
if is_fixed_inv:
pick_ori = None
else:
pick_ori = 'vector'
# columns for bad channels will be zero
invmat_op = apply_inverse(ev_id, inverse_operator, lambda2=lambda2,
method=method, pick_ori=pick_ori)
# turn source estimate into numpy array
invmat = invmat_op.data
# remove columns for bad channels
# take into account it may be 3D array
invmat = np.delete(invmat, ch_idx_bads, axis=invmat.ndim - 1)
# if 3D array, i.e. multiple values per location (fixed and loose),
# reshape into 2D array
if invmat.ndim == 3:
v0o1 = invmat[0, 1].copy()
v3o2 = invmat[3, 2].copy()
shape = invmat.shape
invmat = invmat.reshape(shape[0] * shape[1], shape[2])
# make sure that reshaping worked
assert np.array_equal(v0o1, invmat[1])
assert np.array_equal(v3o2, invmat[11])
logger.info("Dimension of Inverse Matrix: %s" % str(invmat.shape))
return invmat
def _check_fixed_ori(inst):
"""Check if inverse or forward was computed for fixed orientations."""
is_fixed = inst['source_ori'] != FIFF.FIFFV_MNE_FREE_ORI
return is_fixed
|
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import callback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_BYTES_RECEIVED,
ATTR_BYTES_SENT,
ATTR_SERVER_COUNTRY,
ATTR_SERVER_ID,
ATTR_SERVER_NAME,
ATTRIBUTION,
DEFAULT_NAME,
DOMAIN,
ICON,
SENSOR_TYPES,
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Speedtestdotnet sensors."""
speedtest_coordinator = hass.data[DOMAIN]
entities = []
for sensor_type in SENSOR_TYPES:
entities.append(SpeedtestSensor(speedtest_coordinator, sensor_type))
async_add_entities(entities)
class SpeedtestSensor(CoordinatorEntity, RestoreEntity):
"""Implementation of a speedtest.net sensor."""
def __init__(self, coordinator, sensor_type):
"""Initialize the sensor."""
super().__init__(coordinator)
self._name = SENSOR_TYPES[sensor_type][0]
self.type = sensor_type
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{DEFAULT_NAME} {self._name}"
@property
def unique_id(self):
"""Return sensor unique_id."""
return self.type
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Return icon."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes."""
if not self.coordinator.data:
return None
attributes = {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_SERVER_NAME: self.coordinator.data["server"]["name"],
ATTR_SERVER_COUNTRY: self.coordinator.data["server"]["country"],
ATTR_SERVER_ID: self.coordinator.data["server"]["id"],
}
if self.type == "download":
attributes[ATTR_BYTES_RECEIVED] = self.coordinator.data["bytes_received"]
if self.type == "upload":
attributes[ATTR_BYTES_SENT] = self.coordinator.data["bytes_sent"]
return attributes
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._state = state.state
@callback
def update():
"""Update state."""
self._update_state()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self._update_state()
def _update_state(self):
"""Update sensors state."""
if self.coordinator.data:
if self.type == "ping":
self._state = self.coordinator.data["ping"]
elif self.type == "download":
self._state = round(self.coordinator.data["download"] / 10 ** 6, 2)
elif self.type == "upload":
self._state = round(self.coordinator.data["upload"] / 10 ** 6, 2)
|
import logging
from vine.utils import wraps
from kombu.log import get_logger
__all__ = ('setup_logging', 'Logwrapped')
def setup_logging(loglevel=logging.DEBUG, loggers=None):
"""Setup logging to stdout."""
loggers = ['kombu.connection', 'kombu.channel'] if not loggers else loggers
for logger_name in loggers:
logger = get_logger(logger_name)
logger.addHandler(logging.StreamHandler())
logger.setLevel(loglevel)
class Logwrapped:
"""Wrap all object methods, to log on call."""
__ignore = ('__enter__', '__exit__')
def __init__(self, instance, logger=None, ident=None):
self.instance = instance
self.logger = get_logger(logger)
self.ident = ident
def __getattr__(self, key):
meth = getattr(self.instance, key)
if not callable(meth) or key in self.__ignore:
return meth
@wraps(meth)
def __wrapped(*args, **kwargs):
info = ''
if self.ident:
info += self.ident.format(self.instance)
info += f'{meth.__name__}('
if args:
info += ', '.join(map(repr, args))
if kwargs:
if args:
info += ', '
info += ', '.join(f'{key}={value!r}'
for key, value in kwargs.items())
info += ')'
self.logger.debug(info)
return meth(*args, **kwargs)
return __wrapped
def __repr__(self):
return repr(self.instance)
def __dir__(self):
return dir(self.instance)
|
from unittest import TestCase
import numpy as np
from scattertext.termscoring.CornerScore import CornerScore
class TestCornerScore(TestCase):
def test_get_scores(self):
cat_counts, not_cat_counts = self._get_counts()
scores = CornerScore.get_scores(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(scores,
np.array([0.1820027, 0.2828427, 0.1820027, 0.5, 0.9292893,
0.2378287, 0.7930882, 0.1845603, 0.1845603, 0.8725245]))
def test_get_scores_for_category(self):
cat_counts, not_cat_counts = self._get_counts()
scores = CornerScore.get_scores_for_category(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(scores,
np.array([0.9300538, 1.0198039, 0.9300538, 0.9055385, 0.2,
0.7433034, 0.585235, 0.9861541, 0.9861541, 0.3605551]))
def test_get_scores_zero_all_same(self):
cat_counts = np.array([0, 0, 0, 0, 0, 0, 1, 2])
not_cat_counts = np.array([1, 1, 2, 1, 1, 1, 1, 2])
scores = CornerScore.get_scores(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(scores,
np.array([0.5, 0.5, 0.15625, 0.5, 0.5,
0.5, 0.8391308, 0.6685437]))
def test_get_scores_zero_median(self):
cat_counts = np.array([0, 0, 0, 0, 0, 0, 1, 2])
not_cat_counts = np.array([1, 1, 2, 1, 1, 1, 1, 3])
CornerScore.get_scores(cat_counts, not_cat_counts)
def get_scores_for_category(self):
cat_counts, not_cat_counts = self._get_counts()
scores = CornerScore.get_scores_for_category(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(scores,
np.array([0.9300538, 1.0198039,
0.9300538, 0.9055385, 0.2,
0.7433034, 0.585235, 0.9861541,
0.9861541, 0.3605551]))
def _get_counts(self):
cat_counts = np.array([1, 5, 1, 9, 100, 1, 1, 0, 0, 2])
not_cat_counts = np.array([100, 510, 100, 199, 0, 1, 0, 1, 1, 0])
return cat_counts, not_cat_counts
|
from collections import Counter
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class UseFullDocAsFeature(FeatsFromSpacyDoc):
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Docs
Returns
-------
Counter str -> count
'''
return Counter({str(doc):1})
|
from aiohttp import WSMsgType
import voluptuous as vol
from homeassistant.components.websocket_api import const, messages
from tests.async_mock import Mock, patch
async def test_invalid_message_format(websocket_client):
"""Test sending invalid JSON."""
await websocket_client.send_json({"type": 5})
msg = await websocket_client.receive_json()
assert msg["type"] == const.TYPE_RESULT
error = msg["error"]
assert error["code"] == const.ERR_INVALID_FORMAT
assert error["message"].startswith("Message incorrectly formatted")
async def test_invalid_json(websocket_client):
"""Test sending invalid JSON."""
await websocket_client.send_str("this is not JSON")
msg = await websocket_client.receive()
assert msg.type == WSMsgType.close
async def test_quiting_hass(hass, websocket_client):
"""Test sending invalid JSON."""
with patch.object(hass.loop, "stop"):
await hass.async_stop()
msg = await websocket_client.receive()
assert msg.type == WSMsgType.CLOSE
async def test_unknown_command(websocket_client):
"""Test get_panels command."""
await websocket_client.send_json({"id": 5, "type": "unknown_command"})
msg = await websocket_client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == const.ERR_UNKNOWN_COMMAND
async def test_handler_failing(hass, websocket_client):
"""Test a command that raises."""
hass.components.websocket_api.async_register_command(
"bla",
Mock(side_effect=TypeError),
messages.BASE_COMMAND_MESSAGE_SCHEMA.extend({"type": "bla"}),
)
await websocket_client.send_json({"id": 5, "type": "bla"})
msg = await websocket_client.receive_json()
assert msg["id"] == 5
assert msg["type"] == const.TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == const.ERR_UNKNOWN_ERROR
async def test_invalid_vol(hass, websocket_client):
"""Test a command that raises invalid vol error."""
hass.components.websocket_api.async_register_command(
"bla",
Mock(side_effect=TypeError),
messages.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{"type": "bla", vol.Required("test_config"): str}
),
)
await websocket_client.send_json({"id": 5, "type": "bla", "test_config": 5})
msg = await websocket_client.receive_json()
assert msg["id"] == 5
assert msg["type"] == const.TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == const.ERR_INVALID_FORMAT
assert "expected str for dictionary value" in msg["error"]["message"]
|
from __future__ import absolute_import, unicode_literals
import logging
import os
import shlex
import subprocess
import sys
from . import exc
from ._compat import console_to_str
logger = logging.getLogger(__name__)
PY2 = sys.version_info[0] == 2
def run_before_script(script_file, cwd=None):
"""Function to wrap try/except for subprocess.check_call()."""
try:
proc = subprocess.Popen(
shlex.split(str(script_file)),
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=cwd,
)
for line in iter(proc.stdout.readline, b''):
sys.stdout.write(console_to_str(line))
proc.wait()
if proc.returncode:
stderr = proc.stderr.read()
proc.stderr.close()
stderr = console_to_str(stderr).split('\n')
stderr = '\n'.join(list(filter(None, stderr))) # filter empty
raise exc.BeforeLoadScriptError(
proc.returncode, os.path.abspath(script_file), stderr
)
return proc.returncode
except OSError as e:
if e.errno == 2:
raise exc.BeforeLoadScriptNotExists(e, os.path.abspath(script_file))
else:
raise e
def oh_my_zsh_auto_title():
"""Give warning and offer to fix ``DISABLE_AUTO_TITLE``.
see: https://github.com/robbyrussell/oh-my-zsh/pull/257
"""
if 'SHELL' in os.environ and 'zsh' in os.environ.get('SHELL'):
if os.path.exists(os.path.expanduser('~/.oh-my-zsh')):
# oh-my-zsh exists
if (
'DISABLE_AUTO_TITLE' not in os.environ
or os.environ.get('DISABLE_AUTO_TITLE') == "false"
):
print(
'Please set:\n\n'
'\texport DISABLE_AUTO_TITLE=\'true\'\n\n'
'in ~/.zshrc or where your zsh profile is stored.\n'
'Remember the "export" at the beginning!\n\n'
'Then create a new shell or type:\n\n'
'\t$ source ~/.zshrc'
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.digitalocean import digitalocean_disk
from perfkitbenchmarker.providers.digitalocean import util
from six.moves import range
# DigitalOcean sets up the root account with a temporary
# password that's set as expired, requiring it to be changed
# immediately. This breaks dpkg postinst scripts, for example
# running adduser will produce errors:
#
# # chfn -f 'RabbitMQ messaging server' rabbitmq
# You are required to change your password immediately (root enforced)
# chfn: PAM: Authentication token is no longer valid; new one required
#
# To avoid this, just disable the root password (we don't need it),
# and remove the forced expiration.
CLOUD_CONFIG_TEMPLATE = '''#cloud-config
users:
- name: {0}
ssh-authorized-keys:
- {1}
sudo: ['ALL=(ALL) NOPASSWD:ALL']
groups: sudo
shell: /bin/bash
runcmd:
- [ passwd, -l, root ]
- [ chage, -d, -1, -I, -1, -E, -1, -M, 999999, root ]
'''
class DigitalOceanVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a DigitalOcean Virtual Machine (Droplet)."""
CLOUD = providers.DIGITALOCEAN
# Subclasses should override the default image.
DEFAULT_IMAGE = None
def __init__(self, vm_spec):
"""Initialize a DigitalOcean virtual machine.
Args:
vm_spec: virtual_machine.BaseVirtualMachineSpec object of the vm.
"""
super(DigitalOceanVirtualMachine, self).__init__(vm_spec)
self.droplet_id = None
self.max_local_disks = 1
self.local_disk_counter = 0
self.image = self.image or self.DEFAULT_IMAGE
def _Create(self):
"""Create a DigitalOcean VM instance (droplet)."""
with open(self.ssh_public_key) as f:
public_key = f.read().rstrip('\n')
response, retcode = util.DoctlAndParse(
['compute', 'droplet', 'create',
self.name,
'--region', self.zone,
'--size', self.machine_type,
'--image', self.image,
'--user-data', CLOUD_CONFIG_TEMPLATE.format(
self.user_name, public_key),
'--enable-private-networking',
'--wait'])
if retcode:
raise errors.Resource.RetryableCreationError('Creation failed: %s' %
(response,))
self.droplet_id = response[0]['id']
@vm_util.Retry()
def _PostCreate(self):
"""Get the instance's data."""
response, retcode = util.DoctlAndParse(
['compute', 'droplet', 'get', self.droplet_id])
for interface in response[0]['networks']['v4']:
if interface['type'] == 'public':
self.ip_address = interface['ip_address']
else:
self.internal_ip = interface['ip_address']
def _Delete(self):
"""Delete a DigitalOcean VM instance."""
response, retcode = util.DoctlAndParse(
['compute', 'droplet', 'delete', self.droplet_id, '--force'])
# The command doesn't return the HTTP status code, and the error
# format is very difficult to parse, so we string
# search. TODO(user): parse the error message.
if retcode and '404' in response['errors'][0]['detail']:
return
elif retcode:
raise errors.Resource.RetryableDeletionError('Deletion failed: %s' %
(response,))
def _Exists(self):
"""Returns true if the VM exists."""
response, retcode = util.DoctlAndParse(
['compute', 'droplet', 'get', self.droplet_id])
return retcode == 0
def CreateScratchDisk(self, disk_spec):
"""Create a VM's scratch disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
if disk_spec.disk_type == disk.LOCAL:
if self.scratch_disks and self.scratch_disks[0].disk_type == disk.LOCAL:
raise errors.Error('DigitalOcean does not support multiple local '
'disks.')
if disk_spec.num_striped_disks != 1:
raise ValueError('num_striped_disks=%s, but DigitalOcean VMs can only '
'have one local disk.' % disk_spec.num_striped_disks)
# The single unique local disk on DigitalOcean is also the boot
# disk, so we can't follow the normal procedure of formatting
# and mounting. Instead, create a folder at the "mount point" so
# the rest of PKB will work as expected and deliberately skip
# self._CreateScratchDiskFromDisks.
self.RemoteCommand('sudo mkdir -p {0} && sudo chown -R $USER:$USER {0}'
.format(disk_spec.mount_point))
self.scratch_disks.append(
digitalocean_disk.DigitalOceanLocalDisk(disk_spec))
else:
disks = []
for _ in range(disk_spec.num_striped_disks):
# Disk 0 is the local disk.
data_disk = digitalocean_disk.DigitalOceanBlockStorageDisk(
disk_spec, self.zone)
data_disk.disk_number = self.remote_disk_counter + 1
self.remote_disk_counter += 1
disks.append(data_disk)
self._CreateScratchDiskFromDisks(disk_spec, disks)
|
from collections import defaultdict
import logging
import pytest
from homeassistant.components import logger
from homeassistant.components.logger import LOGSEVERITY
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
HASS_NS = "unused.homeassistant"
COMPONENTS_NS = f"{HASS_NS}.components"
ZONE_NS = f"{COMPONENTS_NS}.zone"
GROUP_NS = f"{COMPONENTS_NS}.group"
CONFIGED_NS = "otherlibx"
UNCONFIG_NS = "unconfigurednamespace"
@pytest.fixture(autouse=True)
def restore_logging_class():
"""Restore logging class."""
klass = logging.getLoggerClass()
yield
logging.setLoggerClass(klass)
async def test_setting_level(hass):
"""Test we set log levels."""
mocks = defaultdict(Mock)
with patch("logging.getLogger", mocks.__getitem__):
assert await async_setup_component(
hass,
"logger",
{
"logger": {
"default": "warning",
"logs": {
"test": "info",
"test.child": "debug",
"test.child.child": "warning",
},
}
},
)
await hass.async_block_till_done()
assert len(mocks) == 4
assert len(mocks[""].orig_setLevel.mock_calls) == 1
assert mocks[""].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["WARNING"]
assert len(mocks["test"].orig_setLevel.mock_calls) == 1
assert mocks["test"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["INFO"]
assert len(mocks["test.child"].orig_setLevel.mock_calls) == 1
assert mocks["test.child"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["DEBUG"]
assert len(mocks["test.child.child"].orig_setLevel.mock_calls) == 1
assert (
mocks["test.child.child"].orig_setLevel.mock_calls[0][1][0]
== LOGSEVERITY["WARNING"]
)
# Test set default level
with patch("logging.getLogger", mocks.__getitem__):
await hass.services.async_call(
"logger", "set_default_level", {"level": "fatal"}, blocking=True
)
assert len(mocks[""].orig_setLevel.mock_calls) == 2
assert mocks[""].orig_setLevel.mock_calls[1][1][0] == LOGSEVERITY["FATAL"]
# Test update other loggers
with patch("logging.getLogger", mocks.__getitem__):
await hass.services.async_call(
"logger",
"set_level",
{"test.child": "info", "new_logger": "notset"},
blocking=True,
)
assert len(mocks) == 5
assert len(mocks["test.child"].orig_setLevel.mock_calls) == 2
assert mocks["test.child"].orig_setLevel.mock_calls[1][1][0] == LOGSEVERITY["INFO"]
assert len(mocks["new_logger"].orig_setLevel.mock_calls) == 1
assert (
mocks["new_logger"].orig_setLevel.mock_calls[0][1][0] == LOGSEVERITY["NOTSET"]
)
async def test_can_set_level(hass):
"""Test logger propagation."""
assert await async_setup_component(
hass,
"logger",
{
"logger": {
"logs": {
CONFIGED_NS: "warning",
f"{CONFIGED_NS}.info": "info",
f"{CONFIGED_NS}.debug": "debug",
HASS_NS: "warning",
COMPONENTS_NS: "info",
ZONE_NS: "debug",
GROUP_NS: "info",
},
}
},
)
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.CRITICAL) is True
assert (
logging.getLogger(f"{UNCONFIG_NS}.any").isEnabledFor(logging.CRITICAL) is True
)
assert (
logging.getLogger(f"{UNCONFIG_NS}.any.any").isEnabledFor(logging.CRITICAL)
is True
)
assert logging.getLogger(CONFIGED_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(CONFIGED_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(f"{CONFIGED_NS}.any").isEnabledFor(logging.WARNING) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.any.any").isEnabledFor(logging.WARNING)
is True
)
assert logging.getLogger(f"{CONFIGED_NS}.info").isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(f"{CONFIGED_NS}.info").isEnabledFor(logging.INFO) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.info.any").isEnabledFor(logging.DEBUG)
is False
)
assert (
logging.getLogger(f"{CONFIGED_NS}.info.any").isEnabledFor(logging.INFO) is True
)
assert logging.getLogger(f"{CONFIGED_NS}.debug").isEnabledFor(logging.DEBUG) is True
assert (
logging.getLogger(f"{CONFIGED_NS}.debug.any").isEnabledFor(logging.DEBUG)
is True
)
assert logging.getLogger(HASS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(HASS_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.INFO) is True
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.WARNING) is True
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.INFO) is True
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.WARNING) is True
assert logging.getLogger(f"{GROUP_NS}.any").isEnabledFor(logging.INFO) is True
assert logging.getLogger(ZONE_NS).isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(f"{ZONE_NS}.any").isEnabledFor(logging.DEBUG) is True
await hass.services.async_call(
logger.DOMAIN, "set_level", {f"{UNCONFIG_NS}.any": "debug"}, blocking=True
)
logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
logging.getLogger(f"{UNCONFIG_NS}.any").level == logging.DEBUG
logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
await hass.services.async_call(
logger.DOMAIN, "set_default_level", {"level": "debug"}, blocking=True
)
assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(f"{UNCONFIG_NS}.any").isEnabledFor(logging.DEBUG) is True
assert (
logging.getLogger(f"{UNCONFIG_NS}.any.any").isEnabledFor(logging.DEBUG) is True
)
assert logging.getLogger("").isEnabledFor(logging.DEBUG) is True
assert logging.getLogger(COMPONENTS_NS).isEnabledFor(logging.DEBUG) is False
assert logging.getLogger(GROUP_NS).isEnabledFor(logging.DEBUG) is False
logging.getLogger(CONFIGED_NS).setLevel(logging.INFO)
assert logging.getLogger(CONFIGED_NS).level == logging.WARNING
logging.getLogger("").setLevel(logging.NOTSET)
|
from homeassistant.const import STATE_ON
from .util import async_init_integration
async def test_create_binary_sensors(hass):
"""Test creation of binary_sensors."""
await async_init_integration(hass)
state = hass.states.get("binary_sensor.happy_place_myq_gateway")
assert state.state == STATE_ON
expected_attributes = {"device_class": "connectivity"}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
|
import unittest
import numpy as np
import pytest
from chainer import testing
from chainer.datasets import TupleDataset
from chainercv.datasets import MixUpSoftLabelDataset
from chainercv.datasets import SiameseDataset
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image
N = 15
@testing.parameterize(
# Positive and negative samples
{'labels_0': np.arange(N, dtype=np.int32) % 3,
'labels_1': np.arange(N, dtype=np.int32) % 3,
'pos_exist': True, 'neg_exist': True,
'alpha': 1.0,
},
# No positive
{'labels_0': np.zeros(N, dtype=np.int32),
'labels_1': np.ones(N, dtype=np.int32),
'pos_exist': False, 'neg_exist': True,
'alpha': 2.0,
},
# No negative
{'labels_0': np.ones(N, dtype=np.int32),
'labels_1': np.ones(N, dtype=np.int32),
'pos_exist': True, 'neg_exist': False,
'alpha': 5.0,
},
)
class TestMixupSoftLabelDataset(unittest.TestCase):
img_shape = (3, 32, 48)
def setUp(self):
np.random.shuffle(self.labels_0)
np.random.shuffle(self.labels_1)
dataset_0 = TupleDataset(
np.random.uniform(size=(N,) + self.img_shape), self.labels_0)
dataset_1 = TupleDataset(
np.random.uniform(size=(N,) + self.img_shape), self.labels_1)
self.n_class = np.max((self.labels_0, self.labels_1)) + 1
self.siamese_dataset = SiameseDataset(dataset_0, dataset_1)
def _check_example(self, example):
assert_is_image(example[0])
assert example[0].shape == self.img_shape
assert example[1].dtype == np.float32
assert example[1].ndim == 1
assert len(example[1]) == self.n_class
np.testing.assert_almost_equal(example[1].sum(), 1.0)
assert (example[1] >= 0.0).all()
def test_mixup(self):
dataset = MixUpSoftLabelDataset(
self.siamese_dataset, self.n_class, alpha=self.alpha)
for i in range(10):
example = dataset[i]
self._check_example(example)
assert len(dataset) == N
def test_invalid_alpha(self):
with pytest.raises(ValueError):
MixUpSoftLabelDataset(
self.siamese_dataset, self.n_class, alpha=self.alpha - 5.0)
testing.run_module(__name__, __file__)
|
import logging
from homeassistant.util import slugify
from . import DOMAIN as BMW_DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_scanner(hass, config, see, discovery_info=None):
"""Set up the BMW tracker."""
accounts = hass.data[BMW_DOMAIN]
_LOGGER.debug("Found BMW accounts: %s", ", ".join([a.name for a in accounts]))
for account in accounts:
for vehicle in account.account.vehicles:
tracker = BMWDeviceTracker(see, vehicle)
account.add_update_listener(tracker.update)
tracker.update()
return True
class BMWDeviceTracker:
"""BMW Connected Drive device tracker."""
def __init__(self, see, vehicle):
"""Initialize the Tracker."""
self._see = see
self.vehicle = vehicle
def update(self) -> None:
"""Update the device info.
Only update the state in Home Assistant if tracking in
the car is enabled.
"""
dev_id = slugify(self.vehicle.name)
if not self.vehicle.state.is_vehicle_tracking_enabled:
_LOGGER.debug("Tracking is disabled for vehicle %s", dev_id)
return
_LOGGER.debug("Updating %s", dev_id)
attrs = {"vin": self.vehicle.vin}
self._see(
dev_id=dev_id,
host_name=self.vehicle.name,
gps=self.vehicle.state.gps_position,
attributes=attrs,
icon="mdi:car",
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.testing import absltest
from absl.testing import flagsaver
flags.DEFINE_string('flagsaver_test_flag0', 'unchanged0', 'flag to test with')
flags.DEFINE_string('flagsaver_test_flag1', 'unchanged1', 'flag to test with')
flags.DEFINE_string('flagsaver_test_validated_flag', None, 'flag to test with')
flags.register_validator('flagsaver_test_validated_flag', lambda x: not x)
flags.DEFINE_string('flagsaver_test_validated_flag1', None, 'flag to test with')
flags.DEFINE_string('flagsaver_test_validated_flag2', None, 'flag to test with')
INT_FLAG = flags.DEFINE_integer(
'flagsaver_test_int_flag', default=1, help='help')
STR_FLAG = flags.DEFINE_string(
'flagsaver_test_str_flag', default='str default', help='help')
@flags.multi_flags_validator(
('flagsaver_test_validated_flag1', 'flagsaver_test_validated_flag2'))
def validate_test_flags(flag_dict):
return (flag_dict['flagsaver_test_validated_flag1'] ==
flag_dict['flagsaver_test_validated_flag2'])
FLAGS = flags.FLAGS
@flags.validator('flagsaver_test_flag0')
def check_no_upper_case(value):
return value == value.lower()
class _TestError(Exception):
"""Exception class for use in these tests."""
class FlagSaverTest(absltest.TestCase):
def test_context_manager_without_parameters(self):
with flagsaver.flagsaver():
FLAGS.flagsaver_test_flag0 = 'new value'
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_context_manager_with_overrides(self):
with flagsaver.flagsaver(flagsaver_test_flag0='new value'):
self.assertEqual('new value', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag1 = 'another value'
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
self.assertEqual('unchanged1', FLAGS.flagsaver_test_flag1)
def test_context_manager_with_flagholders(self):
with flagsaver.flagsaver((INT_FLAG, 3), (STR_FLAG, 'new value')):
self.assertEqual('new value', STR_FLAG.value)
self.assertEqual(3, INT_FLAG.value)
FLAGS.flagsaver_test_flag1 = 'another value'
self.assertEqual(INT_FLAG.value, INT_FLAG.default)
self.assertEqual(STR_FLAG.value, STR_FLAG.default)
self.assertEqual('unchanged1', FLAGS.flagsaver_test_flag1)
def test_context_manager_with_overrides_and_flagholders(self):
with flagsaver.flagsaver((INT_FLAG, 3), flagsaver_test_flag0='new value'):
self.assertEqual(STR_FLAG.default, STR_FLAG.value)
self.assertEqual(3, INT_FLAG.value)
FLAGS.flagsaver_test_flag0 = 'new value'
self.assertEqual(INT_FLAG.value, INT_FLAG.default)
self.assertEqual(STR_FLAG.value, STR_FLAG.default)
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_context_manager_with_cross_validated_overrides_set_together(self):
# When the flags are set in the same flagsaver call their validators will
# be triggered only once the setting is done.
with flagsaver.flagsaver(
flagsaver_test_validated_flag1='new_value',
flagsaver_test_validated_flag2='new_value'):
self.assertEqual('new_value', FLAGS.flagsaver_test_validated_flag1)
self.assertEqual('new_value', FLAGS.flagsaver_test_validated_flag2)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_context_manager_with_cross_validated_overrides_set_badly(self):
# Different values should violate the validator.
with self.assertRaisesRegex(flags.IllegalFlagValueError,
'Flag validation failed'):
with flagsaver.flagsaver(
flagsaver_test_validated_flag1='new_value',
flagsaver_test_validated_flag2='other_value'):
pass
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_context_manager_with_cross_validated_overrides_set_separately(self):
# Setting just one flag will trip the validator as well.
with self.assertRaisesRegex(flags.IllegalFlagValueError,
'Flag validation failed'):
with flagsaver.flagsaver(flagsaver_test_validated_flag1='new_value'):
pass
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_context_manager_with_exception(self):
with self.assertRaises(_TestError):
with flagsaver.flagsaver(flagsaver_test_flag0='new value'):
self.assertEqual('new value', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag1 = 'another value'
raise _TestError('oops')
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
self.assertEqual('unchanged1', FLAGS.flagsaver_test_flag1)
def test_context_manager_with_validation_exception(self):
with self.assertRaises(flags.IllegalFlagValueError):
with flagsaver.flagsaver(
flagsaver_test_flag0='new value',
flagsaver_test_validated_flag='new value'):
pass
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
self.assertEqual('unchanged1', FLAGS.flagsaver_test_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag)
def test_decorator_without_call(self):
@flagsaver.flagsaver
def mutate_flags(value):
"""Test function that mutates a flag."""
# The undecorated method mutates --flagsaver_test_flag0 to the given value
# and then returns the value of that flag. If the @flagsaver.flagsaver
# decorator works as designed, then this mutation will be reverted after
# this method returns.
FLAGS.flagsaver_test_flag0 = value
return FLAGS.flagsaver_test_flag0
# mutate_flags returns the flag value before it gets restored by
# the flagsaver decorator. So we check that flag value was
# actually changed in the method's scope.
self.assertEqual('new value', mutate_flags('new value'))
# But... notice that the flag is now unchanged0.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_decorator_without_parameters(self):
@flagsaver.flagsaver()
def mutate_flags(value):
FLAGS.flagsaver_test_flag0 = value
return FLAGS.flagsaver_test_flag0
self.assertEqual('new value', mutate_flags('new value'))
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_decorator_with_overrides(self):
@flagsaver.flagsaver(flagsaver_test_flag0='new value')
def mutate_flags():
"""Test function expecting new value."""
# If the @flagsaver.decorator decorator works as designed,
# then the value of the flag should be changed in the scope of
# the method but the change will be reverted after this method
# returns.
return FLAGS.flagsaver_test_flag0
# mutate_flags returns the flag value before it gets restored by
# the flagsaver decorator. So we check that flag value was
# actually changed in the method's scope.
self.assertEqual('new value', mutate_flags())
# But... notice that the flag is now unchanged0.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_decorator_with_cross_validated_overrides_set_together(self):
# When the flags are set in the same flagsaver call their validators will
# be triggered only once the setting is done.
@flagsaver.flagsaver(
flagsaver_test_validated_flag1='new_value',
flagsaver_test_validated_flag2='new_value')
def mutate_flags_together():
return (FLAGS.flagsaver_test_validated_flag1,
FLAGS.flagsaver_test_validated_flag2)
self.assertEqual(('new_value', 'new_value'), mutate_flags_together())
# The flags have not changed outside the context of the function.
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_decorator_with_cross_validated_overrides_set_badly(self):
# Different values should violate the validator.
@flagsaver.flagsaver(
flagsaver_test_validated_flag1='new_value',
flagsaver_test_validated_flag2='other_value')
def mutate_flags_together_badly():
return (FLAGS.flagsaver_test_validated_flag1,
FLAGS.flagsaver_test_validated_flag2)
with self.assertRaisesRegex(flags.IllegalFlagValueError,
'Flag validation failed'):
mutate_flags_together_badly()
# The flags have not changed outside the context of the exception.
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_decorator_with_cross_validated_overrides_set_separately(self):
# Setting the flags sequentially and not together will trip the validator,
# because it will be called at the end of each flagsaver call.
@flagsaver.flagsaver(flagsaver_test_validated_flag1='new_value')
@flagsaver.flagsaver(flagsaver_test_validated_flag2='new_value')
def mutate_flags_separately():
return (FLAGS.flagsaver_test_validated_flag1,
FLAGS.flagsaver_test_validated_flag2)
with self.assertRaisesRegex(flags.IllegalFlagValueError,
'Flag validation failed'):
mutate_flags_separately()
# The flags have not changed outside the context of the exception.
self.assertIsNone(FLAGS.flagsaver_test_validated_flag1)
self.assertIsNone(FLAGS.flagsaver_test_validated_flag2)
def test_save_flag_value(self):
# First save the flag values.
saved_flag_values = flagsaver.save_flag_values()
# Now mutate the flag's value field and check that it changed.
FLAGS.flagsaver_test_flag0 = 'new value'
self.assertEqual('new value', FLAGS.flagsaver_test_flag0)
# Now restore the flag to its original value.
flagsaver.restore_flag_values(saved_flag_values)
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_save_flag_default(self):
# First save the flag.
saved_flag_values = flagsaver.save_flag_values()
# Now mutate the flag's default field and check that it changed.
FLAGS.set_default('flagsaver_test_flag0', 'new_default')
self.assertEqual('new_default', FLAGS['flagsaver_test_flag0'].default)
# Now restore the flag's default field.
flagsaver.restore_flag_values(saved_flag_values)
self.assertEqual('unchanged0', FLAGS['flagsaver_test_flag0'].default)
def test_restore_after_parse(self):
# First save the flag.
saved_flag_values = flagsaver.save_flag_values()
# Sanity check (would fail if called with --flagsaver_test_flag0).
self.assertEqual(0, FLAGS['flagsaver_test_flag0'].present)
# Now populate the flag and check that it changed.
FLAGS['flagsaver_test_flag0'].parse('new value')
self.assertEqual('new value', FLAGS['flagsaver_test_flag0'].value)
self.assertEqual(1, FLAGS['flagsaver_test_flag0'].present)
# Now restore the flag to its original value.
flagsaver.restore_flag_values(saved_flag_values)
self.assertEqual('unchanged0', FLAGS['flagsaver_test_flag0'].value)
self.assertEqual(0, FLAGS['flagsaver_test_flag0'].present)
def test_decorator_with_exception(self):
@flagsaver.flagsaver
def raise_exception():
FLAGS.flagsaver_test_flag0 = 'new value'
# Simulate a failed test.
raise _TestError('something happened')
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
self.assertRaises(_TestError, raise_exception)
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
def test_validator_list_is_restored(self):
self.assertLen(FLAGS['flagsaver_test_flag0'].validators, 1)
original_validators = list(FLAGS['flagsaver_test_flag0'].validators)
@flagsaver.flagsaver
def modify_validators():
def no_space(value):
return ' ' not in value
flags.register_validator('flagsaver_test_flag0', no_space)
self.assertLen(FLAGS['flagsaver_test_flag0'].validators, 2)
modify_validators()
self.assertEqual(original_validators,
FLAGS['flagsaver_test_flag0'].validators)
class FlagSaverDecoratorUsageTest(absltest.TestCase):
@flagsaver.flagsaver
def test_mutate1(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
@flagsaver.flagsaver
def test_mutate2(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
@flagsaver.flagsaver
def test_mutate3(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
@flagsaver.flagsaver
def test_mutate4(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
class FlagSaverSetUpTearDownUsageTest(absltest.TestCase):
def setUp(self):
self.saved_flag_values = flagsaver.save_flag_values()
def tearDown(self):
flagsaver.restore_flag_values(self.saved_flag_values)
def test_mutate1(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
def test_mutate2(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
def test_mutate3(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
def test_mutate4(self):
# Even though other test cases change the flag, it should be
# restored to 'unchanged0' if the flagsaver is working.
self.assertEqual('unchanged0', FLAGS.flagsaver_test_flag0)
FLAGS.flagsaver_test_flag0 = 'changed0'
class FlagSaverBadUsageTest(absltest.TestCase):
"""Tests that certain kinds of improper usages raise errors."""
def test_flag_saver_on_class(self):
with self.assertRaises(TypeError):
# WRONG. Don't do this.
# Consider the correct usage example in FlagSaverSetUpTearDownUsageTest.
@flagsaver.flagsaver
class FooTest(absltest.TestCase):
def test_tautology(self):
pass
del FooTest
def test_flag_saver_call_on_class(self):
with self.assertRaises(TypeError):
# WRONG. Don't do this.
# Consider the correct usage example in FlagSaverSetUpTearDownUsageTest.
@flagsaver.flagsaver()
class FooTest(absltest.TestCase):
def test_tautology(self):
pass
del FooTest
def test_flag_saver_with_overrides_on_class(self):
with self.assertRaises(TypeError):
# WRONG. Don't do this.
# Consider the correct usage example in FlagSaverSetUpTearDownUsageTest.
@flagsaver.flagsaver(foo='bar')
class FooTest(absltest.TestCase):
def test_tautology(self):
pass
del FooTest
def test_multiple_positional_parameters(self):
with self.assertRaises(ValueError):
func_a = lambda: None
func_b = lambda: None
flagsaver.flagsaver(func_a, func_b)
def test_both_positional_and_keyword_parameters(self):
with self.assertRaises(ValueError):
func_a = lambda: None
flagsaver.flagsaver(func_a, flagsaver_test_flag0='new value')
def test_duplicate_holder_parameters(self):
with self.assertRaises(ValueError):
flagsaver.flagsaver((INT_FLAG, 45), (INT_FLAG, 45))
def test_duplicate_holder_and_kw_parameter(self):
with self.assertRaises(ValueError):
flagsaver.flagsaver((INT_FLAG, 45), **{INT_FLAG.name: 45})
def test_both_positional_and_holder_parameters(self):
with self.assertRaises(ValueError):
func_a = lambda: None
flagsaver.flagsaver(func_a, (INT_FLAG, 45))
def test_holder_parameters_wrong_shape(self):
with self.assertRaises(ValueError):
flagsaver.flagsaver(INT_FLAG)
def test_holder_parameters_tuple_too_long(self):
with self.assertRaises(ValueError):
# Even if it is a bool flag, it should be a tuple
flagsaver.flagsaver((INT_FLAG, 4, 5))
def test_holder_parameters_tuple_wrong_type(self):
with self.assertRaises(ValueError):
# Even if it is a bool flag, it should be a tuple
flagsaver.flagsaver((4, INT_FLAG))
def test_both_wrong_positional_parameters(self):
with self.assertRaises(ValueError):
func_a = lambda: None
flagsaver.flagsaver(func_a, STR_FLAG, '45')
if __name__ == '__main__':
absltest.main()
|
import unittest.mock
import pytest
from PyQt5.QtCore import QUrl
from qutebrowser.browser import urlmarks
@pytest.fixture
def bm_file(config_tmpdir):
bm_dir = config_tmpdir / 'bookmarks'
bm_dir.mkdir()
bm_file = bm_dir / 'urls'
return bm_file
def test_init(bm_file, fake_save_manager):
bm_file.write('\n'.join([
'http://example.com Example Site',
'http://example.com/foo Foo',
'http://example.com/bar Bar',
'http://example.com/notitle',
]))
bm = urlmarks.BookmarkManager()
fake_save_manager.add_saveable.assert_called_once_with(
'bookmark-manager',
unittest.mock.ANY,
unittest.mock.ANY,
filename=str(bm_file),
)
assert list(bm.marks.items()) == [
('http://example.com', 'Example Site'),
('http://example.com/foo', 'Foo'),
('http://example.com/bar', 'Bar'),
('http://example.com/notitle', ''),
]
def test_add(bm_file, fake_save_manager, qtbot):
bm = urlmarks.BookmarkManager()
with qtbot.wait_signal(bm.changed):
bm.add(QUrl('http://example.com'), 'Example Site')
assert list(bm.marks.items()) == [
('http://example.com', 'Example Site'),
]
with qtbot.wait_signal(bm.changed):
bm.add(QUrl('http://example.com/notitle'), '')
assert list(bm.marks.items()) == [
('http://example.com', 'Example Site'),
('http://example.com/notitle', ''),
]
def test_add_toggle(bm_file, fake_save_manager, qtbot):
bm = urlmarks.BookmarkManager()
with qtbot.wait_signal(bm.changed):
bm.add(QUrl('http://example.com'), '', toggle=True)
assert 'http://example.com' in bm.marks
with qtbot.wait_signal(bm.changed):
bm.add(QUrl('http://example.com'), '', toggle=True)
assert 'http://example.com' not in bm.marks
with qtbot.wait_signal(bm.changed):
bm.add(QUrl('http://example.com'), '', toggle=True)
assert 'http://example.com' in bm.marks
def test_add_dupe(bm_file, fake_save_manager, qtbot):
bm = urlmarks.BookmarkManager()
bm.add(QUrl('http://example.com'), '')
with pytest.raises(urlmarks.AlreadyExistsError):
bm.add(QUrl('http://example.com'), '')
def test_delete(bm_file, fake_save_manager, qtbot):
bm = urlmarks.BookmarkManager()
bm.add(QUrl('http://example.com/foo'), 'Foo')
bm.add(QUrl('http://example.com/bar'), 'Bar')
bm.add(QUrl('http://example.com/baz'), 'Baz')
bm.save()
with qtbot.wait_signal(bm.changed):
bm.delete('http://example.com/bar')
assert list(bm.marks.items()) == [
('http://example.com/foo', 'Foo'),
('http://example.com/baz', 'Baz'),
]
def test_save(bm_file, fake_save_manager, qtbot):
bm = urlmarks.BookmarkManager()
bm.add(QUrl('http://example.com'), 'Example Site')
bm.add(QUrl('http://example.com/notitle'), '')
bm.save()
assert bm_file.read().splitlines() == [
'http://example.com Example Site',
'http://example.com/notitle ',
]
|
import ipaddress
import fnmatch
import urllib.parse
from typing import Any, Optional, Tuple
from PyQt5.QtCore import QUrl
from qutebrowser.utils import utils, qtutils
class ParseError(Exception):
"""Raised when a pattern could not be parsed."""
class UrlPattern:
"""A Chromium-like URL matching pattern.
Class attributes:
_DEFAULT_PORTS: The default ports used for schemes which support ports.
_SCHEMES_WITHOUT_HOST: Schemes which don't need a host.
Attributes:
host: The host to match to, or None for any host.
_pattern: The given pattern as string.
_match_all: Whether the pattern should match all URLs.
_match_subdomains: Whether the pattern should match subdomains of the
given host.
_scheme: The scheme to match to, or None to match any scheme.
Note that with Chromium, '*'/None only matches http/https and
not file/ftp. We deviate from that as per-URL settings aren't
security relevant.
_path: The path to match to, or None for any path.
_port: The port to match to as integer, or None for any port.
"""
_DEFAULT_PORTS = {'https': 443, 'http': 80, 'ftp': 21}
_SCHEMES_WITHOUT_HOST = ['about', 'file', 'data', 'javascript']
def __init__(self, pattern: str) -> None:
# Make sure all attributes are initialized if we exit early.
self._pattern = pattern
self._match_all = False
self._match_subdomains: bool = False
self._scheme: Optional[str] = None
self.host: Optional[str] = None
self._path: Optional[str] = None
self._port: Optional[int] = None
# > The special pattern <all_urls> matches any URL that starts with a
# > permitted scheme.
if pattern == '<all_urls>':
self._match_all = True
return
if '\0' in pattern:
raise ParseError("May not contain NUL byte")
pattern = self._fixup_pattern(pattern)
# We use urllib.parse instead of QUrl here because it can handle
# hosts with * in them.
try:
parsed = urllib.parse.urlparse(pattern)
except ValueError as e:
raise ParseError(str(e))
assert parsed is not None
self._init_scheme(parsed)
self._init_host(parsed)
self._init_path(parsed)
self._init_port(parsed)
def _to_tuple(self) -> Tuple:
"""Get a pattern with information used for __eq__/__hash__."""
return (self._match_all, self._match_subdomains, self._scheme,
self.host, self._path, self._port)
def __hash__(self) -> int:
return hash(self._to_tuple())
def __eq__(self, other: Any) -> bool:
if not isinstance(other, UrlPattern):
return NotImplemented
return self._to_tuple() == other._to_tuple()
def __repr__(self) -> str:
return utils.get_repr(self, pattern=self._pattern, constructor=True)
def __str__(self) -> str:
return self._pattern
def _fixup_pattern(self, pattern: str) -> str:
"""Make sure the given pattern is parseable by urllib.parse."""
if pattern.startswith('*:'): # Any scheme, but *:// is unparseable
pattern = 'any:' + pattern[2:]
schemes = tuple(s + ':' for s in self._SCHEMES_WITHOUT_HOST)
if '://' not in pattern and not pattern.startswith(schemes):
pattern = 'any://' + pattern
# Chromium handles file://foo like file:///foo
# FIXME This doesn't actually strip the hostname correctly.
if (pattern.startswith('file://') and
not pattern.startswith('file:///')):
pattern = 'file:///' + pattern[len("file://"):]
return pattern
def _init_scheme(self, parsed: urllib.parse.ParseResult) -> None:
"""Parse the scheme from the given URL.
Deviation from Chromium:
- We assume * when no scheme has been given.
"""
if not parsed.scheme:
raise ParseError("Missing scheme")
if parsed.scheme == 'any':
self._scheme = None
return
self._scheme = parsed.scheme
def _init_path(self, parsed: urllib.parse.ParseResult) -> None:
"""Parse the path from the given URL.
Deviation from Chromium:
- We assume * when no path has been given.
"""
if self._scheme == 'about' and not parsed.path.strip():
raise ParseError("Pattern without path")
if parsed.path == '/*':
self._path = None
elif not parsed.path:
# When the user doesn't add a trailing slash, we assume the pattern
# matches any path.
self._path = None
else:
self._path = parsed.path
def _init_host(self, parsed: urllib.parse.ParseResult) -> None:
"""Parse the host from the given URL.
Deviation from Chromium:
- http://:1234/ is not a valid URL because it has no host.
- We don't allow patterns for dot/space hosts which QUrl considers
invalid.
"""
if parsed.hostname is None or not parsed.hostname.strip():
if self._scheme not in self._SCHEMES_WITHOUT_HOST:
raise ParseError("Pattern without host")
assert self.host is None
return
if parsed.netloc.startswith('['):
# Using QUrl parsing to minimize ipv6 addresses
url = QUrl()
url.setHost(parsed.hostname)
if not url.isValid():
raise ParseError(url.errorString())
self.host = url.host()
return
if parsed.hostname == '*':
self._match_subdomains = True
hostname = None
elif parsed.hostname.startswith('*.'):
if len(parsed.hostname) == 2:
# We don't allow just '*.' as a host.
raise ParseError("Pattern without host")
self._match_subdomains = True
hostname = parsed.hostname[2:]
elif set(parsed.hostname) in {frozenset('.'), frozenset('. ')}:
raise ParseError("Invalid host")
else:
hostname = parsed.hostname
if hostname is None:
self.host = None
elif '*' in hostname:
# Only * or *.foo is allowed as host.
raise ParseError("Invalid host wildcard")
else:
self.host = hostname.rstrip('.')
def _init_port(self, parsed: urllib.parse.ParseResult) -> None:
"""Parse the port from the given URL.
Deviation from Chromium:
- We use None instead of "*" if there's no port filter.
"""
if parsed.netloc.endswith(':*'):
# We can't access parsed.port as it tries to run int()
self._port = None
elif parsed.netloc.endswith(':'):
raise ParseError("Invalid port: Port is empty")
else:
try:
self._port = parsed.port
except ValueError as e:
raise ParseError("Invalid port: {}".format(e))
scheme_has_port = (self._scheme in list(self._DEFAULT_PORTS) or
self._scheme is None)
if self._port is not None and not scheme_has_port:
raise ParseError("Ports are unsupported with {} scheme".format(
self._scheme))
def _matches_scheme(self, scheme: str) -> bool:
return self._scheme is None or self._scheme == scheme
def _matches_host(self, host: str) -> bool:
# FIXME what about multiple dots?
host = host.rstrip('.')
# If we have no host in the match pattern, that means that we're
# matching all hosts, which means we have a match no matter what the
# test host is.
# Contrary to Chromium, we don't need to check for
# self._match_subdomains, as we want to return True here for e.g.
# file:// as well.
if self.host is None:
return True
# If the hosts are exactly equal, we have a match.
if host == self.host:
return True
# Otherwise, we can only match if our match pattern matches subdomains.
if not self._match_subdomains:
return False
# We don't do subdomain matching against IP addresses, so we can give
# up now if the test host is an IP address.
if not utils.raises(ValueError, ipaddress.ip_address, host):
return False
# Check if the test host is a subdomain of our host.
if len(host) <= (len(self.host) + 1):
return False
if not host.endswith(self.host):
return False
return host[len(host) - len(self.host) - 1] == '.'
def _matches_port(self, scheme: str, port: int) -> bool:
if port == -1 and scheme in self._DEFAULT_PORTS:
port = self._DEFAULT_PORTS[scheme]
return self._port is None or self._port == port
def _matches_path(self, path: str) -> bool:
"""Match the URL's path.
Deviations from Chromium:
- Chromium only matches <all_urls> with "javascript:" (pathless); but
we also match *://*/* and friends.
"""
if self._path is None:
return True
# Match 'google.com' with 'google.com/'
if path + '/*' == self._path:
return True
# FIXME Chromium seems to have a more optimized glob matching which
# doesn't rely on regexes. Do we need that too?
return fnmatch.fnmatchcase(path, self._path)
def matches(self, qurl: QUrl) -> bool:
"""Check if the pattern matches the given QUrl."""
qtutils.ensure_valid(qurl)
if self._match_all:
return True
if not self._matches_scheme(qurl.scheme()):
return False
# FIXME ignore for file:// like Chromium?
if not self._matches_host(qurl.host()):
return False
if not self._matches_port(qurl.scheme(), qurl.port()):
return False
if not self._matches_path(qurl.path()):
return False
return True
|
Subsets and Splits