text
stringlengths 213
32.3k
|
---|
from functools import wraps
import boto3
from botocore.config import Config
from flask import current_app
config = Config(retries=dict(max_attempts=20))
def sts_client(service, service_type="client"):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
sts = boto3.client("sts", config=config)
arn = "arn:aws:iam::{0}:role/{1}".format(
kwargs.pop("account_number"),
current_app.config.get("LEMUR_INSTANCE_PROFILE", "Lemur"),
)
# TODO add user specific information to RoleSessionName
role = sts.assume_role(RoleArn=arn, RoleSessionName="lemur")
if service_type == "client":
client = boto3.client(
service,
region_name=kwargs.pop("region", "us-east-1"),
aws_access_key_id=role["Credentials"]["AccessKeyId"],
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
aws_session_token=role["Credentials"]["SessionToken"],
config=config,
)
kwargs["client"] = client
elif service_type == "resource":
resource = boto3.resource(
service,
region_name=kwargs.pop("region", "us-east-1"),
aws_access_key_id=role["Credentials"]["AccessKeyId"],
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
aws_session_token=role["Credentials"]["SessionToken"],
config=config,
)
kwargs["resource"] = resource
return f(*args, **kwargs)
return decorated_function
return decorator
|
import logging
import requests
from ritassist import API
import voluptuous as vol
from homeassistant.components.device_tracker import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_PASSWORD,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_utc_time_change
_LOGGER = logging.getLogger(__name__)
CONF_INCLUDE = "include"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_INCLUDE, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
def setup_scanner(hass, config: dict, see, discovery_info=None):
"""Set up the DeviceScanner and check if login is valid."""
scanner = FleetGoDeviceScanner(config, see)
if not scanner.login(hass):
_LOGGER.error("FleetGO authentication failed")
return False
return True
class FleetGoDeviceScanner:
"""Define a scanner for the FleetGO platform."""
def __init__(self, config, see):
"""Initialize FleetGoDeviceScanner."""
self._include = config.get(CONF_INCLUDE)
self._see = see
self._api = API(
config.get(CONF_CLIENT_ID),
config.get(CONF_CLIENT_SECRET),
config.get(CONF_USERNAME),
config.get(CONF_PASSWORD),
)
def setup(self, hass):
"""Set up a timer and start gathering devices."""
self._refresh()
track_utc_time_change(
hass, lambda now: self._refresh(), second=range(0, 60, 30)
)
def login(self, hass):
"""Perform a login on the FleetGO API."""
if self._api.login():
self.setup(hass)
return True
return False
def _refresh(self) -> None:
"""Refresh device information from the platform."""
try:
devices = self._api.get_devices()
for device in devices:
if not self._include or device.license_plate in self._include:
if device.active or device.current_address is None:
device.get_map_details()
self._see(
dev_id=device.plate_as_id,
gps=(device.latitude, device.longitude),
attributes=device.state_attributes,
icon="mdi:car",
)
except requests.exceptions.ConnectionError:
_LOGGER.error("ConnectionError: Could not connect to FleetGO")
|
from PyQt5.QtCore import pyqtSlot, Qt
from qutebrowser.mainwindow.statusbar import textbase
from qutebrowser.misc import throttle
from qutebrowser.utils import utils
class Percentage(textbase.TextBase):
"""Reading percentage displayed in the statusbar."""
def __init__(self, parent=None):
"""Constructor. Set percentage to 0%."""
super().__init__(parent, elidemode=Qt.ElideNone)
self._strings = self._calc_strings()
self._set_text = throttle.Throttle(self.setText, 100, parent=self)
self.set_perc(0, 0)
def set_raw(self):
self._strings = self._calc_strings(raw=True)
def _calc_strings(self, raw=False):
"""Pre-calculate strings for the statusbar."""
fmt = '[{:02}]' if raw else '[{:02}%]'
strings = {i: fmt.format(i) for i in range(1, 100)}
strings.update({0: '[top]', 100: '[bot]'})
return strings
@pyqtSlot(int, int)
def set_perc(self, x, y):
"""Setter to be used as a Qt slot.
Args:
x: The x percentage (int), currently ignored.
y: The y percentage (int)
"""
utils.unused(x)
self._set_text(self._strings.get(y, '[???]'))
def on_tab_changed(self, tab):
"""Update scroll position when tab changed."""
self.set_perc(*tab.scroller.pos_perc())
|
import re
import base64
import os.path
import ipaddress
import posixpath
import urllib.parse
from typing import Optional, Tuple, Union
from PyQt5.QtCore import QUrl
from PyQt5.QtNetwork import QHostInfo, QHostAddress, QNetworkProxy
from qutebrowser.api import cmdutils
from qutebrowser.config import config
from qutebrowser.utils import log, qtutils, message, utils
from qutebrowser.browser.network import pac
# FIXME: we probably could raise some exceptions on invalid URLs
# https://github.com/qutebrowser/qutebrowser/issues/108
# URL schemes supported by QtWebEngine
WEBENGINE_SCHEMES = [
'about',
'data',
'file',
'filesystem',
'ftp',
'http',
'https',
'javascript',
'ws',
'wss',
]
class Error(Exception):
"""Base class for errors in this module."""
class InvalidUrlError(Error):
"""Error raised if a function got an invalid URL."""
def __init__(self, url: QUrl) -> None:
if url.isValid():
raise ValueError("Got valid URL {}!".format(url.toDisplayString()))
self.url = url
self.msg = get_errstring(url)
super().__init__(self.msg)
def _parse_search_term(s: str) -> Tuple[Optional[str], Optional[str]]:
"""Get a search engine name and search term from a string.
Args:
s: The string to get a search engine for.
Return:
A (engine, term) tuple, where engine is None for the default engine.
"""
s = s.strip()
split = s.split(maxsplit=1)
if not split:
raise ValueError("Empty search term!")
if len(split) == 2:
if split[0] in config.val.url.searchengines:
engine: Optional[str] = split[0]
term: Optional[str] = split[1]
else:
engine = None
term = s
else:
if config.val.url.open_base_url and s in config.val.url.searchengines:
engine = s
term = None
else:
engine = None
term = s
log.url.debug("engine {}, term {!r}".format(engine, term))
return (engine, term)
def _get_search_url(txt: str) -> QUrl:
"""Get a search engine URL for a text.
Args:
txt: Text to search for.
Return:
The search URL as a QUrl.
"""
log.url.debug("Finding search engine for {!r}".format(txt))
engine, term = _parse_search_term(txt)
if not engine:
engine = 'DEFAULT'
if term:
template = config.val.url.searchengines[engine]
semiquoted_term = urllib.parse.quote(term)
quoted_term = urllib.parse.quote(term, safe='')
evaluated = template.format(semiquoted_term,
unquoted=term,
quoted=quoted_term,
semiquoted=semiquoted_term)
url = QUrl.fromUserInput(evaluated)
else:
url = QUrl.fromUserInput(config.val.url.searchengines[engine])
url.setPath(None) # type: ignore[arg-type]
url.setFragment(None) # type: ignore[arg-type]
url.setQuery(None) # type: ignore[call-overload]
qtutils.ensure_valid(url)
return url
def _is_url_naive(urlstr: str) -> bool:
"""Naive check if given URL is really a URL.
Args:
urlstr: The URL to check for, as string.
Return:
True if the URL really is a URL, False otherwise.
"""
url = QUrl.fromUserInput(urlstr)
assert url.isValid()
host = url.host()
# Valid IPv4/IPv6 address. Qt converts things like "23.42" or "1337" or
# "0xDEAD" to IP addresses, which we don't like, so we check if the host
# from Qt is part of the input.
if (not utils.raises(ValueError, ipaddress.ip_address, host) and
host in urlstr):
return True
tld = r'\.([^.0-9_-]+|xn--[a-z0-9-]+)$'
forbidden = r'[\u0000-\u002c\u002f\u003a-\u0060\u007b-\u00b6]'
return bool(re.search(tld, host) and not re.search(forbidden, host))
def _is_url_dns(urlstr: str) -> bool:
"""Check if a URL is really a URL via DNS.
Args:
url: The URL to check for as a string.
Return:
True if the URL really is a URL, False otherwise.
"""
url = QUrl.fromUserInput(urlstr)
assert url.isValid()
if (utils.raises(ValueError, ipaddress.ip_address, urlstr) and
not QHostAddress(urlstr).isNull()):
log.url.debug("Bogus IP URL -> False")
# Qt treats things like "23.42" or "1337" or "0xDEAD" as valid URLs
# which we don't want to.
return False
host = url.host()
if not host:
log.url.debug("URL has no host -> False")
return False
log.url.debug("Doing DNS request for {}".format(host))
info = QHostInfo.fromName(host)
return not info.error()
def fuzzy_url(urlstr: str,
cwd: str = None,
relative: bool = False,
do_search: bool = True,
force_search: bool = False) -> QUrl:
"""Get a QUrl based on a user input which is URL or search term.
Args:
urlstr: URL to load as a string.
cwd: The current working directory, or None.
relative: Whether to resolve relative files.
do_search: Whether to perform a search on non-URLs.
force_search: Whether to force a search even if the content can be
interpreted as a URL or a path.
Return:
A target QUrl to a search page or the original URL.
"""
urlstr = urlstr.strip()
path = get_path_if_valid(urlstr, cwd=cwd, relative=relative,
check_exists=True)
if not force_search and path is not None:
url = QUrl.fromLocalFile(path)
elif force_search or (do_search and not is_url(urlstr)):
# probably a search term
log.url.debug("URL is a fuzzy search term")
try:
url = _get_search_url(urlstr)
except ValueError: # invalid search engine
url = QUrl.fromUserInput(urlstr)
else: # probably an address
log.url.debug("URL is a fuzzy address")
url = QUrl.fromUserInput(urlstr)
log.url.debug("Converting fuzzy term {!r} to URL -> {}".format(
urlstr, url.toDisplayString()))
ensure_valid(url)
return url
def _has_explicit_scheme(url: QUrl) -> bool:
"""Check if a url has an explicit scheme given.
Args:
url: The URL as QUrl.
"""
# Note that generic URI syntax actually would allow a second colon
# after the scheme delimiter. Since we don't know of any URIs
# using this and want to support e.g. searching for scoped C++
# symbols, we treat this as not a URI anyways.
return bool(url.isValid() and url.scheme() and
(url.host() or url.path()) and
not url.path().startswith(':'))
def is_special_url(url: QUrl) -> bool:
"""Return True if url is an about:... or other special URL.
Args:
url: The URL as QUrl.
"""
if not url.isValid():
return False
special_schemes = ('about', 'qute', 'file')
return url.scheme() in special_schemes
def is_url(urlstr: str) -> bool:
"""Check if url seems to be a valid URL.
Args:
urlstr: The URL as string.
Return:
True if it is a valid URL, False otherwise.
"""
autosearch = config.val.url.auto_search
log.url.debug("Checking if {!r} is a URL (autosearch={}).".format(
urlstr, autosearch))
urlstr = urlstr.strip()
qurl = QUrl(urlstr)
qurl_userinput = QUrl.fromUserInput(urlstr)
if autosearch == 'never':
# no autosearch, so everything is a URL unless it has an explicit
# search engine.
try:
engine, _term = _parse_search_term(urlstr)
except ValueError:
return False
else:
return engine is None
if not qurl_userinput.isValid():
# This will also catch non-URLs containing spaces.
return False
if _has_explicit_scheme(qurl) and ' ' not in urlstr:
# URLs with explicit schemes are always URLs
log.url.debug("Contains explicit scheme")
url = True
elif (autosearch == 'schemeless' and
(not _has_explicit_scheme(qurl) or ' ' in urlstr)):
# When autosearch=schemeless, URLs must contain schemes to be valid
log.url.debug("No explicit scheme in given URL, treating as non-URL")
url = False
elif qurl_userinput.host() in ['localhost', '127.0.0.1', '::1']:
log.url.debug("Is localhost.")
url = True
elif is_special_url(qurl):
# Special URLs are always URLs, even with autosearch=never
log.url.debug("Is a special URL.")
url = True
elif autosearch == 'dns':
log.url.debug("Checking via DNS check")
# We want to use QUrl.fromUserInput here, as the user might enter
# "foo.de" and that should be treated as URL here.
url = ' ' not in qurl_userinput.userName() and _is_url_dns(urlstr)
elif autosearch == 'naive':
log.url.debug("Checking via naive check")
url = ' ' not in qurl_userinput.userName() and _is_url_naive(urlstr)
else: # pragma: no cover
raise ValueError("Invalid autosearch value")
log.url.debug("url = {}".format(url))
return url
def ensure_valid(url: QUrl) -> None:
if not url.isValid():
raise InvalidUrlError(url)
def invalid_url_error(url: QUrl, action: str) -> None:
"""Display an error message for a URL.
Args:
action: The action which was interrupted by the error.
"""
if url.isValid():
raise ValueError("Calling invalid_url_error with valid URL {}".format(
url.toDisplayString()))
errstring = get_errstring(
url, "Trying to {} with invalid URL".format(action))
message.error(errstring)
def raise_cmdexc_if_invalid(url: QUrl) -> None:
"""Check if the given QUrl is invalid, and if so, raise a CommandError."""
try:
ensure_valid(url)
except InvalidUrlError as e:
raise cmdutils.CommandError(str(e))
def get_path_if_valid(pathstr: str,
cwd: str = None,
relative: bool = False,
check_exists: bool = False) -> Optional[str]:
"""Check if path is a valid path.
Args:
pathstr: The path as string.
cwd: The current working directory, or None.
relative: Whether to resolve relative files.
check_exists: Whether to check if the file
actually exists of filesystem.
Return:
The path if it is a valid path, None otherwise.
"""
pathstr = pathstr.strip()
log.url.debug("Checking if {!r} is a path".format(pathstr))
expanded = os.path.expanduser(pathstr)
if os.path.isabs(expanded):
path: Optional[str] = expanded
elif relative and cwd:
path = os.path.join(cwd, expanded)
elif relative:
try:
path = os.path.abspath(expanded)
except OSError:
path = None
else:
path = None
if check_exists:
if path is not None:
try:
if os.path.exists(path):
log.url.debug("URL is a local file")
else:
path = None
except UnicodeEncodeError:
log.url.debug(
"URL contains characters which are not present in the "
"current locale")
path = None
return path
def filename_from_url(url: QUrl) -> Optional[str]:
"""Get a suitable filename from a URL.
Args:
url: The URL to parse, as a QUrl.
Return:
The suggested filename as a string, or None.
"""
if not url.isValid():
return None
pathname = posixpath.basename(url.path())
if pathname:
return pathname
elif url.host():
return url.host() + '.html'
else:
return None
HostTupleType = Tuple[str, str, int]
def host_tuple(url: QUrl) -> HostTupleType:
"""Get a (scheme, host, port) tuple from a QUrl.
This is suitable to identify a connection, e.g. for SSL errors.
"""
ensure_valid(url)
scheme, host, port = url.scheme(), url.host(), url.port()
assert scheme
if not host:
raise ValueError("Got URL {} without host.".format(
url.toDisplayString()))
if port == -1:
port_mapping = {
'http': 80,
'https': 443,
'ftp': 21,
}
try:
port = port_mapping[scheme]
except KeyError:
raise ValueError("Got URL {} with unknown port.".format(
url.toDisplayString()))
return scheme, host, port
def get_errstring(url: QUrl, base: str = "Invalid URL") -> str:
"""Get an error string for a URL.
Args:
url: The URL as a QUrl.
base: The base error string.
Return:
A new string with url.errorString() is appended if available.
"""
url_error = url.errorString()
if url_error:
return base + " - {}".format(url_error)
else:
return base
def same_domain(url1: QUrl, url2: QUrl) -> bool:
"""Check if url1 and url2 belong to the same website.
This will use a "public suffix list" to determine what a "top level domain"
is. All further domains are ignored.
For example example.com and www.example.com are considered the same. but
example.co.uk and test.co.uk are not.
Return:
True if the domains are the same, False otherwise.
"""
ensure_valid(url1)
ensure_valid(url2)
suffix1 = url1.topLevelDomain()
suffix2 = url2.topLevelDomain()
if not suffix1:
return url1.host() == url2.host()
if suffix1 != suffix2:
return False
domain1 = url1.host()[:-len(suffix1)].split('.')[-1]
domain2 = url2.host()[:-len(suffix2)].split('.')[-1]
return domain1 == domain2
def encoded_url(url: QUrl) -> str:
"""Return the fully encoded url as string.
Args:
url: The url to encode as QUrl.
"""
return url.toEncoded().data().decode('ascii')
def file_url(path: str) -> str:
"""Return a file:// url (as string) to the given local path.
Arguments:
path: The absolute path to the local file
"""
url = QUrl.fromLocalFile(path)
return url.toString(QUrl.FullyEncoded) # type: ignore[arg-type]
def data_url(mimetype: str, data: bytes) -> QUrl:
"""Get a data: QUrl for the given data."""
b64 = base64.b64encode(data).decode('ascii')
url = QUrl('data:{};base64,{}'.format(mimetype, b64))
qtutils.ensure_valid(url)
return url
def safe_display_string(qurl: QUrl) -> str:
"""Get a IDN-homograph phishing safe form of the given QUrl.
If we're dealing with a Punycode-encoded URL, this prepends the hostname in
its encoded form, to make sure those URLs are distinguishable.
See https://github.com/qutebrowser/qutebrowser/issues/2547
and https://bugreports.qt.io/browse/QTBUG-60365
"""
ensure_valid(qurl)
host = qurl.host(QUrl.FullyEncoded)
assert '..' not in host, qurl # https://bugreports.qt.io/browse/QTBUG-60364
for part in host.split('.'):
url_host = qurl.host(QUrl.FullyDecoded)
if part.startswith('xn--') and host != url_host:
return '({}) {}'.format(host, qurl.toDisplayString())
return qurl.toDisplayString()
class InvalidProxyTypeError(Exception):
"""Error raised when proxy_from_url gets an unknown proxy type."""
def __init__(self, typ: str) -> None:
super().__init__("Invalid proxy type {}!".format(typ))
def proxy_from_url(url: QUrl) -> Union[QNetworkProxy, pac.PACFetcher]:
"""Create a QNetworkProxy from QUrl and a proxy type.
Args:
url: URL of a proxy (possibly with credentials).
Return:
New QNetworkProxy.
"""
ensure_valid(url)
scheme = url.scheme()
if scheme in ['pac+http', 'pac+https', 'pac+file']:
fetcher = pac.PACFetcher(url)
fetcher.fetch()
return fetcher
types = {
'http': QNetworkProxy.HttpProxy,
'socks': QNetworkProxy.Socks5Proxy,
'socks5': QNetworkProxy.Socks5Proxy,
'direct': QNetworkProxy.NoProxy,
}
if scheme not in types:
raise InvalidProxyTypeError(scheme)
proxy = QNetworkProxy(types[scheme], url.host())
if url.port() != -1:
proxy.setPort(url.port())
if url.userName():
proxy.setUser(url.userName())
if url.password():
proxy.setPassword(url.password())
return proxy
def parse_javascript_url(url: QUrl) -> str:
"""Get JavaScript source from the given URL.
See https://wiki.whatwg.org/wiki/URL_schemes#javascript:_URLs
and https://github.com/whatwg/url/issues/385
"""
ensure_valid(url)
if url.scheme() != 'javascript':
raise Error("Expected a javascript:... URL")
if url.authority():
raise Error("URL contains unexpected components: {}"
.format(url.authority()))
urlstr = url.toString(QUrl.FullyEncoded) # type: ignore[arg-type]
urlstr = urllib.parse.unquote(urlstr)
code = urlstr[len('javascript:'):]
if not code:
raise Error("Resulted in empty JavaScript code")
return code
|
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Raspberry Pi Power Supply Checker component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Raspberry Pi Power Supply Checker from a config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "binary_sensor")
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
return await hass.config_entries.async_forward_entry_unload(entry, "binary_sensor")
|
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_EMAIL,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from . import PoolSenseEntity
from .const import ATTRIBUTION, DOMAIN
SENSORS = {
"Chlorine": {
"unit": "mV",
"icon": "mdi:pool",
"name": "Chlorine",
"device_class": None,
},
"pH": {"unit": None, "icon": "mdi:pool", "name": "pH", "device_class": None},
"Battery": {
"unit": PERCENTAGE,
"icon": None,
"name": "Battery",
"device_class": DEVICE_CLASS_BATTERY,
},
"Water Temp": {
"unit": TEMP_CELSIUS,
"icon": "mdi:coolant-temperature",
"name": "Temperature",
"device_class": DEVICE_CLASS_TEMPERATURE,
},
"Last Seen": {
"unit": None,
"icon": "mdi:clock",
"name": "Last Seen",
"device_class": DEVICE_CLASS_TIMESTAMP,
},
"Chlorine High": {
"unit": "mV",
"icon": "mdi:pool",
"name": "Chlorine High",
"device_class": None,
},
"Chlorine Low": {
"unit": "mV",
"icon": "mdi:pool",
"name": "Chlorine Low",
"device_class": None,
},
"pH High": {
"unit": None,
"icon": "mdi:pool",
"name": "pH High",
"device_class": None,
},
"pH Low": {
"unit": None,
"icon": "mdi:pool",
"name": "pH Low",
"device_class": None,
},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Defer sensor setup to the shared sensor module."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
sensors_list = []
for sensor in SENSORS:
sensors_list.append(
PoolSenseSensor(coordinator, config_entry.data[CONF_EMAIL], sensor)
)
async_add_entities(sensors_list, False)
class PoolSenseSensor(PoolSenseEntity, Entity):
"""Sensor representing poolsense data."""
@property
def name(self):
"""Return the name of the particular component."""
return f"PoolSense {SENSORS[self.info_type]['name']}"
@property
def state(self):
"""State of the sensor."""
return self.coordinator.data[self.info_type]
@property
def device_class(self):
"""Return the device class."""
return SENSORS[self.info_type]["device_class"]
@property
def icon(self):
"""Return the icon."""
return SENSORS[self.info_type]["icon"]
@property
def unit_of_measurement(self):
"""Return unit of measurement."""
return SENSORS[self.info_type]["unit"]
@property
def device_state_attributes(self):
"""Return device attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
|
from __future__ import division
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainercv.links.model.fpn.misc import scale_img
class FasterRCNN(chainer.Chain):
"""Base class of Faster R-CNN with FPN.
This is a base class of Faster R-CNN with FPN.
Args:
extractor (Link): A link that extracts feature maps.
This link must have :obj:`scales`, :obj:`mean` and
:meth:`forward`.
rpn (Link): A link that has the same interface as
:class:`~chainercv.links.model.fpn.RPN`.
Please refer to the documentation found there.
bbox_head (Link): A link that has the same interface as
:class:`~chainercv.links.model.fpn.BboxHead`.
Please refer to the documentation found there.
mask_head (Link): A link that has the same interface as
:class:`~chainercv.links.model.fpn.MaskHead`.
Please refer to the documentation found there.
return_values (list of strings): Determines the values
returned by :meth:`predict`.
min_size (int): A preprocessing paramter for :meth:`prepare`. Please
refer to a docstring found for :meth:`prepare`.
max_size (int): A preprocessing paramter for :meth:`prepare`. Note
that the result of :meth:`prepare` can exceed this size due to
alignment with stride.
Parameters:
nms_thresh (float): The threshold value
for :func:`~chainercv.utils.non_maximum_suppression`.
The default value is :obj:`0.45`.
This value can be changed directly or by using :meth:`use_preset`.
score_thresh (float): The threshold value for confidence score.
If a bounding box whose confidence score is lower than this value,
the bounding box will be suppressed.
The default value is :obj:`0.6`.
This value can be changed directly or by using :meth:`use_preset`.
"""
stride = 32
_acceptable_return_values = ('rois', 'bboxes', 'labels', 'scores', 'masks')
def __init__(self, extractor, rpn, bbox_head,
mask_head, return_values,
min_size=800, max_size=1333):
for value_name in return_values:
if value_name not in self._acceptable_return_values:
raise ValueError(
'{} is not included in accepted value names {}'.format(
value_name, self._acceptable_return_values))
self._return_values = return_values
self._store_rpn_outputs = 'rois' in self._return_values
self._run_bbox = bool(
set(self._return_values) & {'bboxes', 'labels', 'scores', 'masks'})
self._run_mask = 'masks' in self._return_values
super(FasterRCNN, self).__init__()
with self.init_scope():
self.extractor = extractor
self.rpn = rpn
if self._run_bbox:
self.bbox_head = bbox_head
if self._run_mask:
self.mask_head = mask_head
self._min_size = min_size
self._max_size = max_size
self.use_preset('visualize')
def use_preset(self, preset):
"""Use the given preset during prediction.
This method changes values of :obj:`nms_thresh` and
:obj:`score_thresh`. These values are a threshold value
used for non maximum suppression and a threshold value
to discard low confidence proposals in :meth:`predict`,
respectively.
If the attributes need to be changed to something
other than the values provided in the presets, please modify
them by directly accessing the public attributes.
Args:
preset ({'visualize', 'evaluate'}): A string to determine the
preset to use.
"""
if preset == 'visualize':
self.nms_thresh = 0.5
self.score_thresh = 0.7
elif preset == 'evaluate':
self.nms_thresh = 0.5
self.score_thresh = 0.05
else:
raise ValueError('preset must be visualize or evaluate')
def forward(self, x):
assert(not chainer.config.train)
hs = self.extractor(x)
rpn_locs, rpn_confs = self.rpn(hs)
anchors = self.rpn.anchors(h.shape[2:] for h in hs)
rois, roi_indices = self.rpn.decode(
rpn_locs, rpn_confs, anchors, x.shape)
return hs, rois, roi_indices
def predict(self, imgs):
"""Conduct inference on the given images.
The value returned by this method is decided based on
the argument :obj:`return_values` of :meth:`__init__`.
Examples:
>>> from chainercv.links import FasterRCNNFPNResNet50
>>> model = FasterRCNNFPNResNet50(
... pretrained_model='coco',
... return_values=['rois', 'bboxes', 'labels', 'scores'])
>>> rois, bboxes, labels, scores = model.predict(imgs)
Args:
imgs (iterable of numpy.ndarray): Inputs.
Returns:
tuple of lists:
The table below shows the input and possible outputs.
.. csv-table::
:header: Input name, shape, dtype, format
:obj:`imgs`, ":math:`[(3, H, W)]`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
.. csv-table::
:header: Output name, shape, dtype, format
:obj:`rois`, ":math:`[(R', 4)]`", :obj:`float32`, \
":math:`(y_{min}, x_{min}, y_{max}, x_{max})`"
:obj:`bboxes`, ":math:`[(R, 4)]`", :obj:`float32`, \
":math:`(y_{min}, x_{min}, y_{max}, x_{max})`"
:obj:`scores`, ":math:`[(R,)]`", :obj:`float32`, \
--
:obj:`labels`, ":math:`[(R,)]`", :obj:`int32`, \
":math:`[0, \#fg\_class - 1]`"
:obj:`masks`, ":math:`[(R, H, W)]`", :obj:`bool`, --
"""
outputs = {}
sizes = [img.shape[1:] for img in imgs]
x, scales = self.prepare(imgs)
with chainer.using_config('train', False), chainer.no_backprop_mode():
hs, rpn_rois, rpn_roi_indices = self(x)
if self._store_rpn_outputs:
rpn_rois_cpu = [
chainer.backends.cuda.to_cpu(rpn_roi) / scale
for rpn_roi, scale in
zip(_flat_to_list(rpn_rois, rpn_roi_indices, len(imgs)),
scales)]
outputs.update({'rois': rpn_rois_cpu})
if self._run_bbox:
bbox_rois, bbox_roi_indices = self.bbox_head.distribute(
rpn_rois, rpn_roi_indices)
with chainer.using_config(
'train', False), chainer.no_backprop_mode():
head_locs, head_confs = self.bbox_head(
hs, bbox_rois, bbox_roi_indices)
bboxes, labels, scores = self.bbox_head.decode(
bbox_rois, bbox_roi_indices, head_locs, head_confs,
scales, sizes, self.nms_thresh, self.score_thresh)
bboxes_cpu = [
chainer.backends.cuda.to_cpu(bbox) for bbox in bboxes]
labels_cpu = [
chainer.backends.cuda.to_cpu(label) for label in labels]
scores_cpu = [cuda.to_cpu(score) for score in scores]
outputs.update({'bboxes': bboxes_cpu, 'labels': labels_cpu,
'scores': scores_cpu})
if self._run_mask:
rescaled_bboxes = [bbox * scale
for scale, bbox in zip(scales, bboxes)]
# Change bboxes to RoI and RoI indices format
mask_rois_before_reordering, mask_roi_indices_before_reordering =\
_list_to_flat(rescaled_bboxes)
mask_rois, mask_roi_indices, order = self.mask_head.distribute(
mask_rois_before_reordering,
mask_roi_indices_before_reordering)
with chainer.using_config(
'train', False), chainer.no_backprop_mode():
segms = F.sigmoid(
self.mask_head(hs, mask_rois, mask_roi_indices)).data
# Put the order of proposals back to the one used by bbox head.
segms = segms[order]
segms = _flat_to_list(
segms, mask_roi_indices_before_reordering, len(imgs))
segms = [segm if segm is not None else
self.xp.zeros(
(0, self.mask_head.segm_size,
self.mask_head.segm_size), dtype=np.float32)
for segm in segms]
segms = [chainer.backends.cuda.to_cpu(segm) for segm in segms]
# Currently MaskHead only supports numpy inputs
masks_cpu = self.mask_head.decode(
segms, bboxes_cpu, labels_cpu, sizes)
outputs.update({'masks': masks_cpu})
return tuple([outputs[key] for key in self._return_values])
def prepare(self, imgs):
"""Preprocess images.
Args:
imgs (iterable of numpy.ndarray): Arrays holding images.
All images are in CHW and RGB format
and the range of their value is :math:`[0, 255]`.
Returns:
Two arrays: preprocessed images and \
scales that were caluclated in prepocessing.
"""
scales = []
resized_imgs = []
for img in imgs:
img, scale = scale_img(
img, self._min_size, self._max_size)
img -= self.extractor.mean
scales.append(scale)
resized_imgs.append(img)
pad_size = np.array(
[im.shape[1:] for im in resized_imgs]).max(axis=0)
pad_size = (
np.ceil(pad_size / self.stride) * self.stride).astype(int)
x = np.zeros(
(len(imgs), 3, pad_size[0], pad_size[1]), dtype=np.float32)
for i, im in enumerate(resized_imgs):
_, H, W = im.shape
x[i, :, :H, :W] = im
x = self.xp.array(x)
return x, scales
def _list_to_flat(array_list):
xp = chainer.backends.cuda.get_array_module(array_list[0])
indices = xp.concatenate(
[i * xp.ones((len(array),), dtype=np.int32) for
i, array in enumerate(array_list)], axis=0)
flat = xp.concatenate(array_list, axis=0)
return flat, indices
def _flat_to_list(flat, indices, B):
array_list = []
for i in range(B):
array = flat[indices == i]
if len(array) > 0:
array_list.append(array)
else:
array_list.append(None)
return array_list
|
from datetime import timedelta
import lupupy.constants as CONST
from homeassistant.components.switch import SwitchEntity
from . import DOMAIN as LUPUSEC_DOMAIN, LupusecDevice
SCAN_INTERVAL = timedelta(seconds=2)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Lupusec switch devices."""
if discovery_info is None:
return
data = hass.data[LUPUSEC_DOMAIN]
devices = []
for device in data.lupusec.get_devices(generic_type=CONST.TYPE_SWITCH):
devices.append(LupusecSwitch(data, device))
add_entities(devices)
class LupusecSwitch(LupusecDevice, SwitchEntity):
"""Representation of a Lupusec switch."""
def turn_on(self, **kwargs):
"""Turn on the device."""
self._device.switch_on()
def turn_off(self, **kwargs):
"""Turn off the device."""
self._device.switch_off()
@property
def is_on(self):
"""Return true if device is on."""
return self._device.is_on
|
import datetime
from homeassistant.components import geo_location
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.components.qld_bushfire.geo_location import (
ATTR_CATEGORY,
ATTR_EXTERNAL_ID,
ATTR_PUBLICATION_DATE,
ATTR_STATUS,
ATTR_UPDATED_DATE,
SCAN_INTERVAL,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, call, patch
from tests.common import assert_setup_component, async_fire_time_changed
CONFIG = {geo_location.DOMAIN: [{"platform": "qld_bushfire", CONF_RADIUS: 200}]}
CONFIG_WITH_CUSTOM_LOCATION = {
geo_location.DOMAIN: [
{
"platform": "qld_bushfire",
CONF_RADIUS: 200,
CONF_LATITUDE: 40.4,
CONF_LONGITUDE: -3.7,
}
]
}
def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
category=None,
attribution=None,
published=None,
updated=None,
status=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.category = category
feed_entry.attribution = attribution
feed_entry.published = published
feed_entry.updated = updated
feed_entry.status = status
return feed_entry
async def test_setup(hass):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(38.0, -3.0),
category="Category 1",
attribution="Attribution 1",
published=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
updated=datetime.datetime(2018, 9, 22, 8, 10, tzinfo=datetime.timezone.utc),
status="Status 1",
)
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (38.1, -3.1))
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (38.2, -3.2))
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (38.3, -3.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"georss_qld_bushfire_alert_client.QldBushfireAlertFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_CATEGORY: "Category 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_PUBLICATION_DATE: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_UPDATED_DATE: datetime.datetime(
2018, 9, 22, 8, 10, tzinfo=datetime.timezone.utc
),
ATTR_STATUS: "Status 1",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "qld_bushfire",
ATTR_ICON: "mdi:fire",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "qld_bushfire",
ATTR_ICON: "mdi:fire",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "qld_bushfire",
ATTR_ICON: "mdi:fire",
}
assert float(state.state) == 25.5
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234", "Title 1", 20.5, (38.1, -3.1), category="Category 1"
)
with patch("georss_qld_bushfire_alert_client.QldBushfireAlertFeed") as mock_feed:
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert mock_feed.call_args == call(
(40.4, -3.7), filter_categories=[], filter_radius=200.0
)
|
import numpy as np
import six
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image
from chainercv.utils.testing.assertions.assert_is_point import assert_is_point
def assert_is_point_dataset(dataset, n_point=None, n_example=None,
no_visible=False):
"""Checks if a dataset satisfies the point dataset API.
This function checks if a given dataset satisfies the point dataset
API or not.
If the dataset does not satifiy the API, this function raises an
:class:`AssertionError`.
Args:
dataset: A dataset to be checked.
n_point (int): The number of expected points per image.
If this is :obj:`None`, the number of points per image can be
arbitrary.
n_example (int): The number of examples to be checked.
If this argument is specified, this function picks
examples ramdomly and checks them. Otherwise,
this function checks all examples.
no_visible (bool): If :obj:`True`, we assume that
:obj:`visible` is always not contained.
If :obj:`False`, :obj;`visible` may or may not be contained.
"""
assert len(dataset) > 0, 'The length of dataset must be greater than zero.'
if n_example:
for _ in six.moves.range(n_example):
i = np.random.randint(0, len(dataset))
_check_example(dataset[i], n_point, no_visible)
else:
for i in six.moves.range(len(dataset)):
_check_example(dataset[i], n_point, no_visible)
def _check_example(example, n_point=None, no_visible=False):
assert len(example) >= 2, \
'Each example must have at least two elements:' \
'img, point (visible is optional).'
if len(example) == 2 or no_visible:
img, point = example[:2]
visible = None
elif len(example) >= 3:
img, point, visible = example[:3]
assert_is_image(img, color=True)
assert_is_point(point, visible, img.shape[1:], n_point)
|
import mne
from mne.bem import convert_flash_mris, make_flash_bem
def run():
"""Run command."""
from mne.commands.utils import get_optparser
parser = get_optparser(__file__)
parser.add_option("-s", "--subject", dest="subject",
help="Subject name", default=None)
parser.add_option("-d", "--subjects-dir", dest="subjects_dir",
help="Subjects directory", default=None)
parser.add_option("-3", "--noflash30", dest="noflash30",
action="store_true", default=False,
help=("Skip the 30-degree flip angle data"),)
parser.add_option("-n", "--noconvert", dest="noconvert",
action="store_true", default=False,
help=("Assume that the Flash MRI images have already "
"been converted to mgz files"))
parser.add_option("-u", "--unwarp", dest="unwarp",
action="store_true", default=False,
help=("Run grad_unwarp with -unwarp <type> option on "
"each of the converted data sets"))
parser.add_option("-o", "--overwrite", dest="overwrite",
action="store_true", default=False,
help="Write over existing .surf files in bem folder")
parser.add_option("-v", "--view", dest="show", action="store_true",
help="Show BEM model in 3D for visual inspection",
default=False)
parser.add_option("--copy", dest="copy",
help="Use copies instead of symlinks for surfaces",
action="store_true")
parser.add_option("-p", "--flash-path", dest="flash_path",
default=None,
help="The directory containing flash05.mgz and "
"flash30.mgz files (defaults to "
"$SUBJECTS_DIR/$SUBJECT/mri/flash/parameter_maps")
options, args = parser.parse_args()
subject = options.subject
subjects_dir = options.subjects_dir
flash30 = not options.noflash30
convert = not options.noconvert
unwarp = options.unwarp
overwrite = options.overwrite
show = options.show
flash_path = options.flash_path
copy = options.copy
if options.subject is None:
parser.print_help()
raise RuntimeError('The subject argument must be set')
convert_flash_mris(subject=subject, subjects_dir=subjects_dir,
flash30=flash30, convert=convert, unwarp=unwarp,
verbose=True)
make_flash_bem(subject=subject, subjects_dir=subjects_dir,
overwrite=overwrite, show=show, flash_path=flash_path,
copy=copy, verbose=True)
mne.utils.run_command_if_main()
|
from datetime import timedelta
import socket
import ssl
from homeassistant.components.cert_expiry.const import DOMAIN
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY
from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.util.dt import utcnow
from .const import HOST, PORT
from .helpers import future_timestamp, static_datetime
from tests.async_mock import patch
from tests.common import MockConfigEntry, async_fire_time_changed
@patch("homeassistant.util.dt.utcnow", return_value=static_datetime())
async def test_async_setup_entry(mock_now, hass):
"""Test async_setup_entry."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
)
timestamp = future_timestamp(100)
with patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=timestamp,
):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "100"
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
state = hass.states.get("sensor.cert_expiry_timestamp_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == timestamp.isoformat()
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
async def test_async_setup_entry_bad_cert(hass):
"""Test async_setup_entry with a bad/expired cert."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
)
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=ssl.SSLError("some error"),
):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "0"
assert state.attributes.get("error") == "some error"
assert not state.attributes.get("is_valid")
async def test_async_setup_entry_host_unavailable(hass):
"""Test async_setup_entry when host is unavailable."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
)
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=socket.gaierror,
):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id) is False
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_SETUP_RETRY
next_update = utcnow() + timedelta(seconds=45)
async_fire_time_changed(hass, next_update)
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=socket.gaierror,
):
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is None
async def test_update_sensor(hass):
"""Test async_update for sensor."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
)
starting_time = static_datetime()
timestamp = future_timestamp(100)
with patch("homeassistant.util.dt.utcnow", return_value=starting_time), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=timestamp,
):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "100"
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
state = hass.states.get("sensor.cert_expiry_timestamp_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == timestamp.isoformat()
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
next_update = starting_time + timedelta(hours=24)
with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=timestamp,
):
async_fire_time_changed(hass, utcnow() + timedelta(hours=24))
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "99"
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
state = hass.states.get("sensor.cert_expiry_timestamp_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == timestamp.isoformat()
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
async def test_update_sensor_network_errors(hass):
"""Test async_update for sensor."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
)
starting_time = static_datetime()
timestamp = future_timestamp(100)
with patch("homeassistant.util.dt.utcnow", return_value=starting_time), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=timestamp,
):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "100"
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
state = hass.states.get("sensor.cert_expiry_timestamp_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == timestamp.isoformat()
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
next_update = starting_time + timedelta(hours=24)
with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=socket.gaierror,
):
async_fire_time_changed(hass, utcnow() + timedelta(hours=24))
await hass.async_block_till_done()
next_update = starting_time + timedelta(hours=48)
state = hass.states.get("sensor.cert_expiry_example_com")
assert state.state == STATE_UNAVAILABLE
with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=timestamp,
):
async_fire_time_changed(hass, utcnow() + timedelta(hours=48))
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "98"
assert state.attributes.get("error") == "None"
assert state.attributes.get("is_valid")
next_update = starting_time + timedelta(hours=72)
with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=ssl.SSLError("something bad"),
):
async_fire_time_changed(hass, utcnow() + timedelta(hours=72))
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "0"
assert state.attributes.get("error") == "something bad"
assert not state.attributes.get("is_valid")
state = hass.states.get("sensor.cert_expiry_timestamp_example_com")
assert state is not None
assert state.state == STATE_UNKNOWN
assert state.attributes.get("error") == "something bad"
assert not state.attributes.get("is_valid")
next_update = starting_time + timedelta(hours=96)
with patch("homeassistant.util.dt.utcnow", return_value=next_update), patch(
"homeassistant.components.cert_expiry.helper.get_cert", side_effect=Exception()
):
async_fire_time_changed(hass, utcnow() + timedelta(hours=96))
await hass.async_block_till_done()
state = hass.states.get("sensor.cert_expiry_example_com")
assert state.state == STATE_UNAVAILABLE
|
import posixpath
from absl import flags
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import vm_util
VM_TMP_DIR = vm_util.VM_TMP_DIR
TF_SERVING_BASE_DIRECTORY = posixpath.join(linux_packages.INSTALL_DIR,
'serving')
FLAGS = flags.FLAGS
# Versions supported including TF Serving 1.
flags.DEFINE_string('tf_serving_branch', 'r1.15', 'GitHub branch to pull from')
def InstallTensorFlowServingAPI(vm):
"""Installs TF Serving API on the vm.
Currently this is only useful so that the clients can run python
scripts that import tensorflow_serving. The server vms make no use
of it.
Args:
vm: VM to operate on.
"""
pip_package_output_dir = posixpath.join(VM_TMP_DIR, 'tf_serving_pip_package')
pip_package = posixpath.join(pip_package_output_dir,
'tensorflow_serving_api*.whl')
vm.Install('pip')
# Build the pip package from the same source as the serving binary
vm.RemoteCommand('sudo docker run --rm -v {0}:{0} '
'benchmarks/tensorflow-serving-devel '
'bash -c "bazel build --config=nativeopt '
'tensorflow_serving/tools/pip_package:build_pip_package && '
'bazel-bin/tensorflow_serving/tools/pip_package/'
'build_pip_package {0}"'.format(pip_package_output_dir))
vm.RemoteCommand('sudo pip install {0}'.format(pip_package))
def BuildDockerImages(vm):
"""Builds the Docker images from source Dockerfiles for a pre-built env."""
vm.InstallPackages('git')
vm.RemoteHostCommand('cd {0} && git clone -b {1} '
'https://github.com/tensorflow/serving'.format(
linux_packages.INSTALL_DIR, FLAGS.tf_serving_branch))
setup_script = posixpath.join(
linux_packages.INSTALL_DIR,
'serving/tensorflow_serving/tools/docker/Dockerfile.devel')
# Changes the TensorFlow git branch to tf_serving_branch
vm_util.ReplaceText(vm, 'ARG TF_SERVING_VERSION_GIT_BRANCH=master',
'ARG TF_SERVING_VERSION_GIT_BRANCH={}'
.format(FLAGS.tf_serving_branch), setup_script)
# Build an optimized binary for TF Serving, and keep all the build artifacts
vm.RemoteHostCommand(
'sudo docker build --target binary_build '
'-t benchmarks/tensorflow-serving-devel '
'-f {0}/tensorflow_serving/tools/docker/Dockerfile.devel '
'{0}/tensorflow_serving/tools/docker/'.format(TF_SERVING_BASE_DIRECTORY))
# Create a serving image with the optimized model_server binary
vm.RemoteHostCommand(
'sudo docker build '
'-t benchmarks/tensorflow-serving '
'--build-arg '
'TF_SERVING_BUILD_IMAGE=benchmarks/tensorflow-serving-devel '
'-f {0}/tensorflow_serving/tools/docker/Dockerfile '
'{0}/tensorflow_serving/tools/docker/'.format(TF_SERVING_BASE_DIRECTORY))
def InstallFromDocker(vm):
"""Installs Docker and TF Serving."""
vm.Install('docker')
BuildDockerImages(vm)
def AptInstall(vm):
"""Installs TensorFlow Serving on the VM."""
InstallFromDocker(vm)
InstallTensorFlowServingAPI(vm)
def Uninstall(vm):
"""Uninstalls TensorFlow Serving on the VM."""
vm.RemoteCommand(
'sudo pip uninstall -y tensorflow_serving_api', should_log=True)
vm.RemoteHostCommand(
'sudo docker rmi benchmarks/tensorflow-serving', should_log=True)
vm.RemoteHostCommand(
'sudo docker rmi benchmarks/tensorflow-serving-devel', should_log=True)
del vm
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from squid import SquidCollector
##########################################################################
class TestSquidCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SquidCollector', {
'interval': 1,
})
self.collector = SquidCollector(config, None)
def test_import(self):
self.assertTrue(SquidCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_fake_data(self, publish_mock):
_getData_mock = patch.object(
SquidCollector,
'_getData',
Mock(
return_value=self.getFixture('fake_counters_1').getvalue()))
_getData_mock.start()
self.collector.collect()
_getData_mock.stop()
self.assertPublishedMany(publish_mock, {})
_getData_mock = patch.object(
SquidCollector,
'_getData',
Mock(
return_value=self.getFixture('fake_counters_2').getvalue()))
_getData_mock.start()
self.collector.collect()
_getData_mock.stop()
metrics = {
'3128.client_http.requests': 1,
'3128.client_http.hits': 2,
'3128.client_http.errors': 3,
'3128.client_http.kbytes_in': 4,
'3128.client_http.kbytes_out': 5,
'3128.client_http.hit_kbytes_out': 6,
'3128.server.all.requests': 7,
'3128.server.all.errors': 8,
'3128.server.all.kbytes_in': 9,
'3128.server.all.kbytes_out': 10,
'3128.server.http.requests': 1,
'3128.server.http.errors': 12,
'3128.server.http.kbytes_in': 13,
'3128.server.http.kbytes_out': 14,
'3128.server.ftp.requests': 15,
'3128.server.ftp.errors': 16,
'3128.server.ftp.kbytes_in': 17,
'3128.server.ftp.kbytes_out': 18,
'3128.server.other.requests': 19,
'3128.server.other.errors': 20,
'3128.server.other.kbytes_in': 21,
'3128.server.other.kbytes_out': 22,
'3128.icp.pkts_sent': 23,
'3128.icp.pkts_recv': 24,
'3128.icp.queries_sent': 25,
'3128.icp.replies_sent': 26,
'3128.icp.queries_recv': 27,
'3128.icp.replies_recv': 28,
'3128.icp.query_timeouts': 29,
'3128.icp.replies_queued': 30,
'3128.icp.kbytes_sent': 31,
'3128.icp.kbytes_recv': 32,
'3128.icp.q_kbytes_sent': 33,
'3128.icp.r_kbytes_sent': 34,
'3128.icp.q_kbytes_recv': 35,
'3128.icp.r_kbytes_recv': 36,
'3128.icp.times_used': 37,
'3128.cd.times_used': 38,
'3128.cd.msgs_sent': 39,
'3128.cd.msgs_recv': 40,
'3128.cd.memory': 41,
'3128.cd.local_memory': 42,
'3128.cd.kbytes_sent': 43,
'3128.cd.kbytes_recv': 44,
'3128.unlink.requests': 45,
'3128.page_faults': 46,
'3128.select_loops': 47,
'3128.cpu_time': 48.1234567890,
'3128.wall_time': 49.1234567890,
'3128.swap.outs': 50,
'3128.swap.ins': 51,
'3128.swap.files_cleaned': 52,
'3128.aborted_requests': 53
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
_getData_mock = patch.object(
SquidCollector,
'_getData',
Mock(
return_value=self.getFixture('counters_1').getvalue()))
_getData_mock.start()
self.collector.collect()
_getData_mock.stop()
self.assertPublishedMany(publish_mock, {})
_getData_mock = patch.object(
SquidCollector,
'_getData',
Mock(
return_value=self.getFixture('counters_2').getvalue()))
_getData_mock.start()
self.collector.collect()
_getData_mock.stop()
metrics = {
'3128.client_http.requests': 2,
'3128.client_http.hits': 1,
'3128.client_http.errors': 0,
'3128.client_http.kbytes_in': 1,
'3128.client_http.kbytes_out': 12.0,
'3128.client_http.hit_kbytes_out': 10,
'3128.server.all.requests': 0,
'3128.server.all.errors': 0,
'3128.server.all.kbytes_in': 0,
'3128.server.all.kbytes_out': 0,
'3128.server.http.requests': 0,
'3128.server.http.errors': 0,
'3128.server.http.kbytes_in': 0,
'3128.server.http.kbytes_out': 0,
'3128.server.ftp.requests': 0,
'3128.server.ftp.errors': 0,
'3128.server.ftp.kbytes_in': 0,
'3128.server.ftp.kbytes_out': 0,
'3128.server.other.requests': 0,
'3128.server.other.errors': 0,
'3128.server.other.kbytes_in': 0,
'3128.server.other.kbytes_out': 0,
'3128.icp.pkts_sent': 0,
'3128.icp.pkts_recv': 0,
'3128.icp.queries_sent': 0,
'3128.icp.replies_sent': 0,
'3128.icp.queries_recv': 0,
'3128.icp.replies_recv': 0,
'3128.icp.query_timeouts': 0,
'3128.icp.replies_queued': 0,
'3128.icp.kbytes_sent': 0,
'3128.icp.kbytes_recv': 0,
'3128.icp.q_kbytes_sent': 0,
'3128.icp.r_kbytes_sent': 0,
'3128.icp.q_kbytes_recv': 0,
'3128.icp.r_kbytes_recv': 0,
'3128.icp.times_used': 0,
'3128.cd.times_used': 0,
'3128.cd.msgs_sent': 0,
'3128.cd.msgs_recv': 0,
'3128.cd.memory': 0,
'3128.cd.local_memory': 0,
'3128.cd.kbytes_sent': 0,
'3128.cd.kbytes_recv': 0,
'3128.unlink.requests': 0,
'3128.page_faults': 0,
'3128.select_loops': 10827.0,
'3128.cpu_time': 0,
'3128.wall_time': 10,
'3128.swap.outs': 0,
'3128.swap.ins': 2,
'3128.swap.files_cleaned': 0,
'3128.aborted_requests': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import cherrypy
from cherrypy._helper import expose
from cherrypy.lib import cptools, encoding, static, jsontools
from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc
from cherrypy.lib import caching as _caching
from cherrypy.lib import auth_basic, auth_digest
def _getargs(func):
"""Return the names of all static arguments to the given function."""
# Use this instead of importing inspect for less mem overhead.
import types
if isinstance(func, types.MethodType):
func = func.__func__
co = func.__code__
return co.co_varnames[:co.co_argcount]
_attr_error = (
'CherryPy Tools cannot be turned on directly. Instead, turn them '
'on via config, or use them as decorators on your page handlers.'
)
class Tool(object):
"""A registered function for use with CherryPy request-processing hooks.
help(tool.callable) should give you more information about this Tool.
"""
namespace = 'tools'
def __init__(self, point, callable, name=None, priority=50):
self._point = point
self.callable = callable
self._name = name
self._priority = priority
self.__doc__ = self.callable.__doc__
self._setargs()
@property
def on(self):
raise AttributeError(_attr_error)
@on.setter
def on(self, value):
raise AttributeError(_attr_error)
def _setargs(self):
"""Copy func parameter names to obj attributes."""
try:
for arg in _getargs(self.callable):
setattr(self, arg, None)
except (TypeError, AttributeError):
if hasattr(self.callable, '__call__'):
for arg in _getargs(self.callable.__call__):
setattr(self, arg, None)
# IronPython 1.0 raises NotImplementedError because
# inspect.getargspec tries to access Python bytecode
# in co_code attribute.
except NotImplementedError:
pass
# IronPython 1B1 may raise IndexError in some cases,
# but if we trap it here it doesn't prevent CP from working.
except IndexError:
pass
def _merged_args(self, d=None):
"""Return a dict of configuration entries for this Tool."""
if d:
conf = d.copy()
else:
conf = {}
tm = cherrypy.serving.request.toolmaps[self.namespace]
if self._name in tm:
conf.update(tm[self._name])
if 'on' in conf:
del conf['on']
return conf
def __call__(self, *args, **kwargs):
"""Compile-time decorator (turn on the tool in config).
For example::
@expose
@tools.proxy()
def whats_my_base(self):
return cherrypy.request.base
"""
if args:
raise TypeError('The %r Tool does not accept positional '
'arguments; you must use keyword arguments.'
% self._name)
def tool_decorator(f):
if not hasattr(f, '_cp_config'):
f._cp_config = {}
subspace = self.namespace + '.' + self._name + '.'
f._cp_config[subspace + 'on'] = True
for k, v in kwargs.items():
f._cp_config[subspace + k] = v
return f
return tool_decorator
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
cherrypy.serving.request.hooks.attach(self._point, self.callable,
priority=p, **conf)
class HandlerTool(Tool):
"""Tool which is called 'before main', that may skip normal handlers.
If the tool successfully handles the request (by setting response.body),
if should return True. This will cause CherryPy to skip any 'normal' page
handler. If the tool did not handle the request, it should return False
to tell CherryPy to continue on and call the normal page handler. If the
tool is declared AS a page handler (see the 'handler' method), returning
False will raise NotFound.
"""
def __init__(self, callable, name=None):
Tool.__init__(self, 'before_handler', callable, name)
def handler(self, *args, **kwargs):
"""Use this tool as a CherryPy page handler.
For example::
class Root:
nav = tools.staticdir.handler(section="/nav", dir="nav",
root=absDir)
"""
@expose
def handle_func(*a, **kw):
handled = self.callable(*args, **self._merged_args(kwargs))
if not handled:
raise cherrypy.NotFound()
return cherrypy.serving.response.body
return handle_func
def _wrapper(self, **kwargs):
if self.callable(**kwargs):
cherrypy.serving.request.handler = None
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
cherrypy.serving.request.hooks.attach(self._point, self._wrapper,
priority=p, **conf)
class HandlerWrapperTool(Tool):
"""Tool which wraps request.handler in a provided wrapper function.
The 'newhandler' arg must be a handler wrapper function that takes a
'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all
page handler
functions, it must return an iterable for use as cherrypy.response.body.
For example, to allow your 'inner' page handlers to return dicts
which then get interpolated into a template::
def interpolator(next_handler, *args, **kwargs):
filename = cherrypy.request.config.get('template')
cherrypy.response.template = env.get_template(filename)
response_dict = next_handler(*args, **kwargs)
return cherrypy.response.template.render(**response_dict)
cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
"""
def __init__(self, newhandler, point='before_handler', name=None,
priority=50):
self.newhandler = newhandler
self._point = point
self._name = name
self._priority = priority
def callable(self, *args, **kwargs):
innerfunc = cherrypy.serving.request.handler
def wrap(*args, **kwargs):
return self.newhandler(innerfunc, *args, **kwargs)
cherrypy.serving.request.handler = wrap
class ErrorTool(Tool):
"""Tool which is used to replace the default request.error_response."""
def __init__(self, callable, name=None):
Tool.__init__(self, None, callable, name)
def _wrapper(self):
self.callable(**self._merged_args())
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
cherrypy.serving.request.error_response = self._wrapper
# Builtin tools #
class SessionTool(Tool):
"""Session Tool for CherryPy.
sessions.locking
When 'implicit' (the default), the session will be locked for you,
just before running the page handler.
When 'early', the session will be locked before reading the request
body. This is off by default for safety reasons; for example,
a large upload would block the session, denying an AJAX
progress meter
(`issue <https://github.com/cherrypy/cherrypy/issues/630>`_).
When 'explicit' (or any other value), you need to call
cherrypy.session.acquire_lock() yourself before using
session data.
"""
def __init__(self):
# _sessions.init must be bound after headers are read
Tool.__init__(self, 'before_request_body', _sessions.init)
def _lock_session(self):
cherrypy.serving.session.acquire_lock()
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
hooks = cherrypy.serving.request.hooks
conf = self._merged_args()
p = conf.pop('priority', None)
if p is None:
p = getattr(self.callable, 'priority', self._priority)
hooks.attach(self._point, self.callable, priority=p, **conf)
locking = conf.pop('locking', 'implicit')
if locking == 'implicit':
hooks.attach('before_handler', self._lock_session)
elif locking == 'early':
# Lock before the request body (but after _sessions.init runs!)
hooks.attach('before_request_body', self._lock_session,
priority=60)
else:
# Don't lock
pass
hooks.attach('before_finalize', _sessions.save)
hooks.attach('on_end_request', _sessions.close)
def regenerate(self):
"""Drop the current session and make a new one (with a new id)."""
sess = cherrypy.serving.session
sess.regenerate()
# Grab cookie-relevant tool args
relevant = 'path', 'path_header', 'name', 'timeout', 'domain', 'secure'
conf = dict(
(k, v)
for k, v in self._merged_args().items()
if k in relevant
)
_sessions.set_response_cookie(**conf)
class XMLRPCController(object):
"""A Controller (page handler collection) for XML-RPC.
To use it, have your controllers subclass this base class (it will
turn on the tool for you).
You can also supply the following optional config entries::
tools.xmlrpc.encoding: 'utf-8'
tools.xmlrpc.allow_none: 0
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
appropriate handler must first be performed according to the URL, and
then a second dispatch step must take place according to the RPC method
specified in the request body. It also allows a superfluous "/RPC2"
prefix in the URL, supplies its own handler args in the body, and
requires a 200 OK "Fault" response instead of 404 when the desired
method is not found.
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
This Controller acts as the dispatch target for the first half (based
on the URL); it then reads the RPC method from the request body and
does its own second dispatch step based on that method. It also reads
body params, and returns a Fault on error.
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
Otherwise, you need to use declare it in config::
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
"""
# Note we're hard-coding this into the 'tools' namespace. We could do
# a huge amount of work to make it relocatable, but the only reason why
# would be if someone actually disabled the default_toolbox. Meh.
_cp_config = {'tools.xmlrpc.on': True}
@expose
def default(self, *vpath, **params):
rpcparams, rpcmethod = _xmlrpc.process_body()
subhandler = self
for attr in str(rpcmethod).split('.'):
subhandler = getattr(subhandler, attr, None)
if subhandler and getattr(subhandler, 'exposed', False):
body = subhandler(*(vpath + rpcparams), **params)
else:
# https://github.com/cherrypy/cherrypy/issues/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpcutil.on_error
raise Exception('method "%s" is not supported' % attr)
conf = cherrypy.serving.request.toolmaps['tools'].get('xmlrpc', {})
_xmlrpc.respond(body,
conf.get('encoding', 'utf-8'),
conf.get('allow_none', 0))
return cherrypy.serving.response.body
class SessionAuthTool(HandlerTool):
pass
class CachingTool(Tool):
"""Caching Tool for CherryPy."""
def _wrapper(self, **kwargs):
request = cherrypy.serving.request
if _caching.get(**kwargs):
request.handler = None
else:
if request.cacheable:
# Note the devious technique here of adding hooks on the fly
request.hooks.attach('before_finalize', _caching.tee_output,
priority=100)
_wrapper.priority = 90
def _setup(self):
"""Hook caching into cherrypy.request."""
conf = self._merged_args()
p = conf.pop('priority', None)
cherrypy.serving.request.hooks.attach('before_handler', self._wrapper,
priority=p, **conf)
class Toolbox(object):
"""A collection of Tools.
This object also functions as a config namespace handler for itself.
Custom toolboxes should be added to each Application's toolboxes dict.
"""
def __init__(self, namespace):
self.namespace = namespace
def __setattr__(self, name, value):
# If the Tool._name is None, supply it from the attribute name.
if isinstance(value, Tool):
if value._name is None:
value._name = name
value.namespace = self.namespace
object.__setattr__(self, name, value)
def __enter__(self):
"""Populate request.toolmaps from tools specified in config."""
cherrypy.serving.request.toolmaps[self.namespace] = map = {}
def populate(k, v):
toolname, arg = k.split('.', 1)
bucket = map.setdefault(toolname, {})
bucket[arg] = v
return populate
def __exit__(self, exc_type, exc_val, exc_tb):
"""Run tool._setup() for each tool in our toolmap."""
map = cherrypy.serving.request.toolmaps.get(self.namespace)
if map:
for name, settings in map.items():
if settings.get('on', False):
tool = getattr(self, name)
tool._setup()
def register(self, point, **kwargs):
"""
Return a decorator which registers the function
at the given hook point.
"""
def decorator(func):
attr_name = kwargs.get('name', func.__name__)
tool = Tool(point, func, **kwargs)
setattr(self, attr_name, tool)
return func
return decorator
default_toolbox = _d = Toolbox('tools')
_d.session_auth = SessionAuthTool(cptools.session_auth)
_d.allow = Tool('on_start_resource', cptools.allow)
_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
_d.response_headers = Tool('on_start_resource', cptools.response_headers)
_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
_d.err_redirect = ErrorTool(cptools.redirect)
_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
_d.decode = Tool('before_request_body', encoding.decode)
# the order of encoding, gzip, caching is important
_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70)
_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
_d.staticdir = HandlerTool(static.staticdir)
_d.staticfile = HandlerTool(static.staticfile)
_d.sessions = SessionTool()
_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
_d.caching = CachingTool('before_handler', _caching.get, 'caching')
_d.expires = Tool('before_finalize', _caching.expires)
_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
_d.referer = Tool('before_request_body', cptools.referer)
_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
_d.flatten = Tool('before_finalize', cptools.flatten)
_d.accept = Tool('on_start_resource', cptools.accept)
_d.redirect = Tool('on_start_resource', cptools.redirect)
_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0)
_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30)
_d.json_out = Tool('before_handler', jsontools.json_out, priority=30)
_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1)
_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1)
_d.params = Tool('before_handler', cptools.convert_params, priority=15)
del _d, cptools, encoding, static
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from network import NetworkCollector
##########################################################################
class TestNetworkCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NetworkCollector', {
'interfaces': ['eth', 'em', 'bond', 'veth', 'br-lxc'],
'interval': 10,
'byte_unit': ['bit', 'megabit', 'megabyte'],
})
self.collector = NetworkCollector(config, None)
def test_import(self):
self.assertTrue(NetworkCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_net_dev(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/dev')
@patch.object(Collector, 'publish')
def test_should_work_with_virtual_interfaces_and_bridges(self,
publish_mock):
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_2')
self.collector.collect()
metrics = {
'eth0.rx_megabyte': (2.504, 2),
'eth0.tx_megabyte': (4.707, 2),
'eth1.rx_megabyte': (0.0, 2),
'eth1.tx_megabyte': (0.0, 2),
'em2.rx_megabyte': (2.504, 2),
'em2.tx_megabyte': (4.707, 2),
'bond3.rx_megabyte': (2.504, 2),
'bond3.tx_megabyte': (4.707, 2),
'vethmR3i5e.tx_megabyte': (0.223, 2),
'vethmR3i5e.rx_megabyte': (0.033, 2),
'br-lxc-247.tx_megabyte': (0.307, 2),
'br-lxc-247.rx_megabyte': (0.032, 2)
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('proc_net_dev_2')
self.collector.collect()
metrics = {
'eth0.rx_megabyte': (2.504, 2),
'eth0.tx_megabyte': (4.707, 2),
'eth1.rx_megabyte': (0.0, 2),
'eth1.tx_megabyte': (0.0, 2),
'em2.rx_megabyte': (2.504, 2),
'em2.tx_megabyte': (4.707, 2),
'bond3.rx_megabyte': (2.504, 2),
'bond3.tx_megabyte': (4.707, 2)
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
# Named test_z_* to run after test_should_open_proc_net_dev
@patch.object(Collector, 'publish')
def test_z_issue_208_a(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('208-a_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('208-a_2')
self.collector.collect()
metrics = {
'bond0.rx_bit': 2687979419428.0,
'bond0.rx_compressed': 0.0,
'bond0.rx_drop': 0.0,
'bond0.rx_errors': 0.0,
'bond0.rx_fifo': 0.0,
'bond0.rx_frame': 0.0,
'bond0.rx_multicast': 8481087.9,
'bond0.rx_packets': 264585067.9,
'bond0.tx_bit': 1569889402921.6,
'bond0.tx_carrier': 0.0,
'bond0.tx_drop': 0.0,
'bond0.tx_errors': 0.0,
'bond0.tx_fifo': 0.0,
'bond0.tx_colls': 0.0,
'bond0.tx_compressed': 0.0,
'bond0.tx_packets': 200109891.6,
'bond1.rx_bit': 16933606875970.4,
'bond1.rx_compressed': 0.0,
'bond1.rx_drop': 0.0,
'bond1.rx_errors': 0.0,
'bond1.rx_fifo': 0.0,
'bond1.rx_frame': 0.0,
'bond1.rx_multicast': 7.8,
'bond1.rx_packets': 2419703159.9,
'bond1.tx_bit': 17842573410005.6,
'bond1.tx_carrier': 0.0,
'bond1.tx_drop': 0.0,
'bond1.tx_errors': 0.0,
'bond1.tx_fifo': 0.0,
'bond1.tx_colls': 0.0,
'bond1.tx_compressed': 0.0,
'bond1.tx_packets': 2654259261.0,
'em1.rx_bit': 2687881969344.8,
'em1.rx_compressed': 0.0,
'em1.rx_drop': 0.0,
'em1.rx_errors': 0.0,
'em1.rx_fifo': 0.0,
'em1.rx_frame': 0.0,
'em1.rx_multicast': 8471878.8,
'em1.rx_packets': 264382058.1,
'em1.tx_bit': 1569889402921.6,
'em1.tx_carrier': 0.0,
'em1.tx_drop': 0.0,
'em1.tx_errors': 0.0,
'em1.tx_fifo': 0.0,
'em1.tx_colls': 0.0,
'em1.tx_compressed': 0.0,
'em1.tx_packets': 200109891.6,
'em2.rx_bit': 97450083.2,
'em2.rx_compressed': 0.0,
'em2.rx_drop': 0.0,
'em2.rx_errors': 0.0,
'em2.rx_fifo': 0.0,
'em2.rx_frame': 0.0,
'em2.rx_multicast': 9209.1,
'em2.rx_packets': 203009.8,
'em2.tx_bit': 0,
'em2.tx_carrier': 0.0,
'em2.tx_drop': 0.0,
'em2.tx_errors': 0.0,
'em2.tx_fifo': 0.0,
'em2.tx_colls': 0.0,
'em2.tx_compressed': 0.0,
'em2.tx_packets': 0.0,
'em3.rx_bit': 514398.4,
'em3.rx_compressed': 0.0,
'em3.rx_drop': 0.0,
'em3.rx_errors': 0.0,
'em3.rx_fifo': 0.0,
'em3.rx_frame': 0.0,
'em3.rx_multicast': 0.0,
'em3.rx_packets': 1071.6,
'em3.tx_bit': 0.0,
'em3.tx_carrier': 0.0,
'em3.tx_drop': 0.0,
'em3.tx_errors': 0.0,
'em3.tx_fifo': 0.0,
'em3.tx_colls': 0.0,
'em3.tx_compressed': 0.0,
'em3.tx_packets': 0.0,
'em4.rx_bit': 16933606361572.0,
'em4.rx_compressed': 0.0,
'em4.rx_drop': 0.0,
'em4.rx_errors': 0.0,
'em4.rx_fifo': 0.0,
'em4.rx_frame': 0.0,
'em4.rx_multicast': 7.8,
'em4.rx_packets': 2419702088.3,
'em4.tx_bit': 17842573410005.6,
'em4.tx_carrier': 0.0,
'em4.tx_drop': 0.0,
'em4.tx_errors': 0.0,
'em4.tx_fifo': 0.0,
'em4.tx_colls': 0.0,
'em4.tx_compressed': 0.0,
'em4.tx_packets': 2654259261.0,
}
self.assertPublishedMany(publish_mock, metrics)
# Named test_z_* to run after test_should_open_proc_net_dev
@patch.object(Collector, 'publish')
def test_z_issue_208_b(self, publish_mock):
NetworkCollector.PROC = self.getFixturePath('208-b_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
NetworkCollector.PROC = self.getFixturePath('208-b_2')
self.collector.collect()
metrics = {
'bond0.rx_bit': 12754357408.8,
'bond0.rx_compressed': 0.0,
'bond0.rx_drop': 0.0,
'bond0.rx_errors': 0.0,
'bond0.rx_fifo': 0.0,
'bond0.rx_frame': 0.0,
'bond0.rx_multicast': 8483853.6,
'bond0.rx_packets': 13753449.5,
'bond0.tx_bit': 51593345279.2,
'bond0.tx_carrier': 0.0,
'bond0.tx_drop': 0.0,
'bond0.tx_errors': 0.0,
'bond0.tx_fifo': 0.0,
'bond0.tx_colls': 0.0,
'bond0.tx_compressed': 0.0,
'bond0.tx_packets': 58635426.6,
'bond1.rx_bit': 48298217736175.2,
'bond1.rx_compressed': 0.0,
'bond1.rx_drop': 0.0,
'bond1.rx_errors': 0.0,
'bond1.rx_fifo': 473.8,
'bond1.rx_frame': 0.0,
'bond1.rx_multicast': 2.9,
'bond1.rx_packets': 4869871086.2,
'bond1.tx_bit': 23149038213964.0,
'bond1.tx_carrier': 0.0,
'bond1.tx_drop': 0.0,
'bond1.tx_errors': 0.0,
'bond1.tx_fifo': 0.0,
'bond1.tx_colls': 0.0,
'bond1.tx_compressed': 0.0,
'bond1.tx_packets': 2971941537.3,
'em1.rx_bit': 12657057999.2,
'em1.rx_compressed': 0.0,
'em1.rx_drop': 0.0,
'em1.rx_errors': 0.0,
'em1.rx_fifo': 0.0,
'em1.rx_frame': 0.0,
'em1.rx_multicast': 8474644.4,
'em1.rx_packets': 13550781.5,
'em1.tx_bit': 51593345279.2,
'em1.tx_carrier': 0.0,
'em1.tx_drop': 0.0,
'em1.tx_errors': 0.0,
'em1.tx_fifo': 0.0,
'em1.tx_colls': 0.0,
'em1.tx_compressed': 0.0,
'em1.tx_packets': 58635426.6,
'em2.rx_bit': 97299409.6,
'em2.rx_compressed': 0.0,
'em2.rx_drop': 0.0,
'em2.rx_errors': 0.0,
'em2.rx_fifo': 0.0,
'em2.rx_frame': 0.0,
'em2.rx_multicast': 9209.2,
'em2.rx_packets': 202668.0,
'em2.tx_bit': 0,
'em2.tx_carrier': 0.0,
'em2.tx_drop': 0.0,
'em2.tx_errors': 0.0,
'em2.tx_fifo': 0.0,
'em2.tx_colls': 0.0,
'em2.tx_compressed': 0.0,
'em2.tx_packets': 0.0,
'em3.rx_bit': 48298184648012.0,
'em3.rx_compressed': 0.0,
'em3.rx_drop': 0.0,
'em3.rx_errors': 0.0,
'em3.rx_fifo': 473.8,
'em3.rx_frame': 0.0,
'em3.rx_multicast': 2.9,
'em3.rx_packets': 4869866440.5,
'em3.tx_bit': 23149038213964.0,
'em3.tx_carrier': 0.0,
'em3.tx_drop': 0.0,
'em3.tx_errors': 0.0,
'em3.tx_fifo': 0.0,
'em3.tx_colls': 0.0,
'em3.tx_compressed': 0.0,
'em3.tx_packets': 2971941537.3,
'em4.rx_bit': 33088163.2,
'em4.rx_compressed': 0.0,
'em4.rx_drop': 0.0,
'em4.rx_errors': 0.0,
'em4.rx_fifo': 0.0,
'em4.rx_frame': 0.0,
'em4.rx_multicast': 0.0,
'em4.rx_packets': 4645.7,
'em4.tx_bit': 0,
'em4.tx_carrier': 0.0,
'em4.tx_drop': 0.0,
'em4.tx_errors': 0.0,
'em4.tx_fifo': 0.0,
'em4.tx_colls': 0.0,
'em4.tx_compressed': 0.0,
'em4.tx_packets': 0.0,
}
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from PyQt5.QtNetwork import QNetworkReply, QNetworkRequest
from PyQt5.QtCore import pyqtSlot, QIODevice, QByteArray, QTimer
class FixedDataNetworkReply(QNetworkReply):
"""QNetworkReply subclass for fixed data."""
def __init__(self, request, fileData, mimeType, parent=None): # noqa: N803
"""Constructor.
Args:
request: reference to the request object (QNetworkRequest)
fileData: reference to the data buffer (QByteArray)
mimeType: for the reply (string)
parent: reference to the parent object (QObject)
"""
super().__init__(parent)
self._data = fileData
self.setRequest(request)
self.setUrl(request.url())
self.setOpenMode(QIODevice.ReadOnly)
self.setHeader(QNetworkRequest.ContentTypeHeader, mimeType)
self.setHeader(QNetworkRequest.ContentLengthHeader,
QByteArray.number(len(fileData)))
self.setAttribute(QNetworkRequest.HttpStatusCodeAttribute, 200)
self.setAttribute(QNetworkRequest.HttpReasonPhraseAttribute, 'OK')
# For some reason, a segfault will be triggered if these lambdas aren't
# there.
# pylint: disable=unnecessary-lambda
QTimer.singleShot(
0,
lambda: self.metaDataChanged.emit()) # type: ignore[attr-defined]
QTimer.singleShot(
0,
lambda: self.readyRead.emit()) # type: ignore[attr-defined]
QTimer.singleShot(
0,
lambda: self.finished.emit()) # type: ignore[attr-defined]
@pyqtSlot()
def abort(self):
"""Abort the operation."""
def bytesAvailable(self):
"""Determine the bytes available for being read.
Return:
bytes available (int)
"""
return len(self._data) + super().bytesAvailable()
def readData(self, maxlen):
"""Retrieve data from the reply object.
Args:
maxlen maximum number of bytes to read (int)
Return:
bytestring containing the data
"""
len_ = min(maxlen, len(self._data))
buf = bytes(self._data[:len_])
self._data = self._data[len_:]
return buf
def isFinished(self):
return True
def isRunning(self):
return False
class ErrorNetworkReply(QNetworkReply):
"""QNetworkReply which always returns an error."""
def __init__(self, req, errorstring, error, parent=None):
"""Constructor.
Args:
req: The QNetworkRequest associated with this reply.
errorstring: The error string to print.
error: The numerical error value.
parent: The parent to pass to QNetworkReply.
"""
super().__init__(parent)
self.setRequest(req)
self.setUrl(req.url())
# We don't actually want to read anything, but we still need to open
# the device to avoid getting a warning.
self.setOpenMode(QIODevice.ReadOnly)
self.setError(error, errorstring)
QTimer.singleShot(0, lambda:
self.error.emit(error)) # type: ignore[attr-defined]
QTimer.singleShot(0, lambda:
self.finished.emit()) # type: ignore[attr-defined]
def abort(self):
"""Do nothing since it's a fake reply."""
def bytesAvailable(self):
"""We always have 0 bytes available."""
return 0
def readData(self, _maxlen):
"""No data available."""
return bytes()
def isFinished(self):
return True
def isRunning(self):
return False
class RedirectNetworkReply(QNetworkReply):
"""A reply which redirects to the given URL."""
def __init__(self, new_url, parent=None):
super().__init__(parent)
self.setAttribute(QNetworkRequest.RedirectionTargetAttribute, new_url)
QTimer.singleShot(0, lambda:
self.finished.emit()) # type: ignore[attr-defined]
def abort(self):
"""Called when there's e.g. a redirection limit."""
def readData(self, _maxlen):
return bytes()
|
import voluptuous as vol
from homeassistant.components import rpi_gpio
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.reload import setup_reload_service
from . import DOMAIN, PLATFORMS
CONF_BOUNCETIME = "bouncetime"
CONF_INVERT_LOGIC = "invert_logic"
CONF_PORTS = "ports"
CONF_PULL_MODE = "pull_mode"
DEFAULT_BOUNCETIME = 50
DEFAULT_INVERT_LOGIC = False
DEFAULT_PULL_MODE = "UP"
_SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PORTS): _SENSORS_SCHEMA,
vol.Optional(CONF_BOUNCETIME, default=DEFAULT_BOUNCETIME): cv.positive_int,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Raspberry PI GPIO devices."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
pull_mode = config.get(CONF_PULL_MODE)
bouncetime = config.get(CONF_BOUNCETIME)
invert_logic = config.get(CONF_INVERT_LOGIC)
binary_sensors = []
ports = config.get("ports")
for port_num, port_name in ports.items():
binary_sensors.append(
RPiGPIOBinarySensor(
port_name, port_num, pull_mode, bouncetime, invert_logic
)
)
add_entities(binary_sensors, True)
class RPiGPIOBinarySensor(BinarySensorEntity):
"""Represent a binary sensor that uses Raspberry Pi GPIO."""
def __init__(self, name, port, pull_mode, bouncetime, invert_logic):
"""Initialize the RPi binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._port = port
self._pull_mode = pull_mode
self._bouncetime = bouncetime
self._invert_logic = invert_logic
self._state = None
rpi_gpio.setup_input(self._port, self._pull_mode)
def read_gpio(port):
"""Read state from GPIO."""
self._state = rpi_gpio.read_input(self._port)
self.schedule_update_ha_state()
rpi_gpio.edge_detect(self._port, read_gpio, self._bouncetime)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
def update(self):
"""Update the GPIO state."""
self._state = rpi_gpio.read_input(self._port)
|
import sys
from flask import Flask
from httpobs.conf import DEVELOPMENT_MODE, API_PORT, API_PROPAGATE_EXCEPTIONS
from httpobs.website import add_response_headers
from httpobs.website.api import api
from httpobs.website.monitoring import monitoring_api
def __exit_with(msg: str) -> None:
print(msg)
sys.exit(1)
# Register the application with flask
app = Flask('http-observatory')
app.config['PROPAGATE_EXCEPTIONS'] = API_PROPAGATE_EXCEPTIONS
app.register_blueprint(api)
app.register_blueprint(monitoring_api)
@app.route('/')
@add_response_headers()
def main() -> str:
return 'Welcome to the HTTP Observatory!'
if __name__ == '__main__':
app.run(debug=DEVELOPMENT_MODE,
port=API_PORT)
|
import diamond.collector
import os
class VMSFSCollector(diamond.collector.Collector):
SYSFS = '/sys/fs/vmsfs'
VMSFS_STATS = {
'resident': ('cur_resident', 4096),
'allocated': ('cur_allocated', 4096)
}
def vmsfs_stats_read(self, filename):
stats = {}
# Open vmsfs sys info.
stats_fd = None
try:
stats_fd = open(filename)
for line in stats_fd:
tokens = line.split()
stats[tokens[0][0:-1]] = long(tokens[1])
except:
if stats_fd:
stats_fd.close()
return stats
def vmsfs_stats_dispatch(self, filename, prefix=''):
stats = self.vmsfs_stats_read(filename)
for stat in self.VMSFS_STATS:
name = self.VMSFS_STATS[stat][0]
scale = self.VMSFS_STATS[stat][1]
if name in stats:
self.publish(prefix + name, stats[name] * scale)
def get_default_config_help(self):
config_help = super(VMSFSCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(VMSFSCollector, self).get_default_config()
config.update({
'path': 'vmsfs'
})
return config
def collect(self):
if not os.access(self.SYSFS, os.R_OK | os.X_OK):
return None
# Dispatch total stats.
self.vmsfs_stats_dispatch(os.path.join(self.SYSFS, 'stats'))
# Dispatch per-generation stats.
# NOTE: We do not currently report the per-generation statistics to
# diamond. This is because we do not have a good strategy for
# aggregating generation data and exposing it in a sensible way. There
# are three strategies:
# 1) Collect everything at the host level.
# The problem here is that the number of metrics will explode for
# that individual host (and keep growing).
# 2) Collect at the top-level (one virtual host per generation).
# Then the problem is finding the generation through UI tools, etc.
# 3) Figure out some way to put the stats in each instance associated
# with that generation.
# We favor (2) currently, but there's not much value in implementing it
# until it can be exposed to the user.
if False:
TO_IGNORE = ('stats', 'version',
'00000000-0000-0000-0000-000000000000')
files = os.listdir(self.SYSFS)
for f in files:
if f not in TO_IGNORE:
self.vmsfs_stats_dispatch('/sys/fs/vmsfs/' + f,
prefix=('%s.' % f))
|
import logging
from plumbum import cli, local
from plumbum.path.utils import delete, copy
logger = logging.getLogger("FileCopier")
class FileCopier(cli.Application):
overwrite = cli.Flag("-o", help = "If given, overwrite existing files")
@cli.switch(["-l", "--log-to-file"], argtype = str)
def log_to_file(self, filename):
"""logs all output to the given file"""
handler = logging.FileHandler(filename)
logger.addHandler(handler)
@cli.switch(["--verbose"], requires=["--log-to-file"])
def set_debug(self):
"""Sets verbose mode"""
logger.setLevel(logging.DEBUG)
def main(self, src, dst):
if local.path(dst).exists():
if not self.overwrite:
logger.debug("Oh no! That's terrible")
raise ValueError("Destination already exists")
else:
delete(dst)
logger.debug("I'm going to copy %s to %s", src, dst)
copy(src, dst)
logger.debug("Great success")
if __name__ == "__main__":
FileCopier.run()
|
from datetime import timedelta
import logging
from pystiebeleltron import pystiebeleltron
import voluptuous as vol
from homeassistant.components.modbus.const import CONF_HUB, DEFAULT_HUB, MODBUS_DOMAIN
from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
DOMAIN = "stiebel_eltron"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string,
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
def setup(hass, config):
"""Set up the STIEBEL ELTRON unit.
Will automatically load climate platform.
"""
name = config[DOMAIN][CONF_NAME]
modbus_client = hass.data[MODBUS_DOMAIN][config[DOMAIN][CONF_HUB]]
hass.data[DOMAIN] = {
"name": name,
"ste_data": StiebelEltronData(name, modbus_client),
}
discovery.load_platform(hass, "climate", DOMAIN, {}, config)
return True
class StiebelEltronData:
"""Get the latest data and update the states."""
def __init__(self, name, modbus_client):
"""Init the STIEBEL ELTRON data object."""
self.api = pystiebeleltron.StiebelEltronAPI(modbus_client, 1)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update unit data."""
if not self.api.update():
_LOGGER.warning("Modbus read failed")
else:
_LOGGER.debug("Data updated successfully")
|
from flexx import flx
class MyVBox(flx.VFix):
def __init__(self, **kwargs):
kwargs['spacing'] = kwargs.get('spacing', 15)
kwargs['padding'] = 0
kwargs['orientation'] = 'vertical'
super().__init__(**kwargs)
class MyHBox(flx.HFix):
def __init__(self, **kwargs):
kwargs['spacing'] = kwargs.get('spacing', 15)
kwargs['padding'] = 0
super().__init__(**kwargs)
class Mondriaan(flx.Widget):
CSS = """
.flx-Mondriaan {background: #000;}
.flx-Mondriaan .edge {background:none;}
.flx-Mondriaan .white {background:#fff;}
.flx-Mondriaan .red {background:#f23;}
.flx-Mondriaan .blue {background:#249;}
.flx-Mondriaan .yellow {background:#ff7;}
"""
def init(self):
with MyHBox():
with MyVBox(flex=2):
with MyVBox(flex=4, spacing=30):
flx.Widget(flex=1, css_class='white')
flx.Widget(flex=1, css_class='white')
with MyVBox(flex=2, css_class='blue'):
flx.Widget(flex=1, css_class='edge')
flx.Widget(flex=1, css_class='edge')
with MyVBox(flex=6):
with MyVBox(flex=4, spacing=30, css_class='red'):
flx.Widget(flex=1, css_class='edge')
flx.Widget(flex=1, css_class='edge')
with MyHBox(flex=2):
flx.Widget(flex=6, css_class='white')
with MyVBox(flex=1):
flx.Widget(flex=1, css_class='white')
flx.Widget(flex=1, css_class='yellow')
if __name__ == '__main__':
m = flx.launch(Mondriaan, 'app')
flx.run()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
from absl import app
from absl import flags
from absl.flags import argparse_flags
FLAGS = flags.FLAGS
flags.DEFINE_string('absl_echo', None, 'The echo message from absl.flags.')
def parse_flags_simple(argv):
"""Simple example for absl.flags + argparse."""
parser = argparse_flags.ArgumentParser(
description='A simple example of argparse_flags.')
parser.add_argument(
'--argparse_echo', help='The echo message from argparse_flags')
return parser.parse_args(argv[1:])
def main_simple(args):
print('--absl_echo is', FLAGS.absl_echo)
print('--argparse_echo is', args.argparse_echo)
def roll_dice(args):
print('Rolled a dice:', random.randint(1, args.num_faces))
def shuffle(args):
inputs = list(args.inputs)
random.shuffle(inputs)
print('Shuffled:', ' '.join(inputs))
def parse_flags_subcommands(argv):
"""Subcommands example for absl.flags + argparse."""
parser = argparse_flags.ArgumentParser(
description='A subcommands example of argparse_flags.')
parser.add_argument('--argparse_echo',
help='The echo message from argparse_flags')
subparsers = parser.add_subparsers(help='The command to execute.')
roll_dice_parser = subparsers.add_parser(
'roll_dice', help='Roll a dice.')
roll_dice_parser.add_argument('--num_faces', type=int, default=6)
roll_dice_parser.set_defaults(command=roll_dice)
shuffle_parser = subparsers.add_parser(
'shuffle', help='Shuffle inputs.')
shuffle_parser.add_argument(
'inputs', metavar='I', nargs='+', help='Inputs to shuffle.')
shuffle_parser.set_defaults(command=shuffle)
return parser.parse_args(argv[1:])
def main_subcommands(args):
main_simple(args)
args.command(args)
if __name__ == '__main__':
main_func_name = os.environ['MAIN_FUNC']
flags_parser_func_name = os.environ['FLAGS_PARSER_FUNC']
app.run(main=globals()[main_func_name],
flags_parser=globals()[flags_parser_func_name])
|
from functools import reduce
import operator
from urllib.parse import urljoin
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.cache import cache
from django.db import models
from django.db.models.aggregates import Sum
from django.db.models.functions import Coalesce
from django.utils import timezone
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
try:
from django_elasticsearch_dsl.registries import registry as elasticsearch_registry
except ImportError:
elasticsearch_registry = type('DocumentRegistry', (), {'get_documents': lambda *args: []})()
from polymorphic.managers import PolymorphicManager
from polymorphic.models import PolymorphicModel
from shop import deferred
from shop.conf import app_settings
from shop.exceptions import ProductNotAvailable
class Availability:
"""
Contains the currently available quantity for a given product and period.
"""
def __init__(self, **kwargs):
"""
:param earliest:
Point in time from when on this product will be available.
:param latest:
Point in time until this product will be available.
:param quantity:
Number of available items. The type of this value is the same as the type of ``quantity``
in :class:`shop.models.cart.CartItemModel`.
:param sell_short:
If ``True``, sell the product even though it's not in stock. It then will be shipped
at the point in time specified by ``earliest``.
:param limited_offer:
If ``True``, sell the product until the point in time specified by ``latest``. After
that period, the product will not be available anymore.
"""
tzinfo = timezone.get_current_timezone()
self.earliest = kwargs.get('earliest', timezone.datetime.min.replace(tzinfo=tzinfo))
self.latest = kwargs.get('latest', timezone.datetime.max.replace(tzinfo=tzinfo))
quantity = kwargs.get('quantity', app_settings.MAX_PURCHASE_QUANTITY)
self.quantity = min(quantity, app_settings.MAX_PURCHASE_QUANTITY)
self.sell_short = bool(kwargs.get('sell_short', False))
self.limited_offer = bool(kwargs.get('limited_offer', False))
self.inventory = bool(kwargs.get('inventory', None))
class AvailableProductMixin:
"""
Add this mixin class to the product models declaration, wanting to keep track on the
current amount of products in stock. In comparison to
:class:`shop.models.product.ReserveProductMixin`, this mixin does not reserve items in pending
carts, with the risk for overselling. It thus is suited for products kept in the cart
for a long period.
The product class must implement a field named ``quantity`` accepting numerical values.
"""
def get_availability(self, request, **kwargs):
"""
Returns the current available quantity for this product.
If other customers have pending carts containing this same product, the quantity
is not not adjusted. This may result in a situation, where someone adds a product
to the cart, but then is unable to purchase, because someone else bought it in the
meantime.
"""
return Availability(quantity=self.quantity)
def deduct_from_stock(self, quantity, **kwargs):
if quantity > self.quantity:
raise ProductNotAvailable(self)
self.quantity -= quantity
self.save(update_fields=['quantity'])
def managed_availability(self):
return True
@classmethod
def check(cls, **kwargs):
from shop.models.cart import CartItemModel
errors = super().check(**kwargs)
for cart_field in CartItemModel._meta.fields:
if cart_field.attname == 'quantity':
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(CartItemModel.__name__)))
for field in cls._meta.fields:
if field.attname == 'quantity':
if field.get_internal_type() != cart_field.get_internal_type():
msg = "Field `{}.quantity` must be of same type as `{}.quantity`."
errors.append(checks.Error(msg.format(cls.__name__, CartItemModel.__name__)))
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(cls.__name__)))
return errors
class BaseReserveProductMixin:
def get_availability(self, request, **kwargs):
"""
Returns the current available quantity for this product.
If other customers have pending carts containing this same product, the quantity
is adjusted accordingly. Therefore make sure to invalidate carts, which were not
converted into an order after a determined period of time. Otherwise the quantity
returned by this function might be considerably lower, than what it could be.
"""
from shop.models.cart import CartItemModel
availability = super().get_availability(request, **kwargs)
cart_items = CartItemModel.objects.filter(product=self).values('quantity')
availability.quantity -= cart_items.aggregate(sum=Coalesce(Sum('quantity'), 0))['sum']
return availability
class ReserveProductMixin(BaseReserveProductMixin, AvailableProductMixin):
"""
Add this mixin class to the product models declaration, wanting to keep track on the
current amount of products in stock. In comparison to
:class:`shop.models.product.AvailableProductMixin`, this mixin reserves items in pending
carts, without the risk for overselling. On the other hand, the shop may run out of sellable
items, if customers keep products in the cart for a long period, without proceeding to checkout.
Use this mixin for products kept for a short period until checking out the cart, for
instance for ticket sales. Ensure that pending carts are flushed regularly.
The product class must implement a field named ``quantity`` accepting numerical values.
"""
class BaseProductManager(PolymorphicManager):
"""
A base ModelManager for all non-object manipulation needs, mostly statistics and querying.
"""
def select_lookup(self, search_term):
"""
Returning a queryset containing the products matching the declared lookup fields together
with the given search term. Each product can define its own lookup fields using the
member list or tuple `lookup_fields`.
"""
filter_by_term = (models.Q((sf, search_term)) for sf in self.model.lookup_fields)
queryset = self.get_queryset().filter(reduce(operator.or_, filter_by_term))
return queryset
def indexable(self):
"""
Return a queryset of indexable Products.
"""
queryset = self.get_queryset().filter(active=True)
return queryset
class PolymorphicProductMetaclass(deferred.PolymorphicForeignKeyBuilder):
@classmethod
def perform_meta_model_check(cls, Model):
"""
Perform some safety checks on the ProductModel being created.
"""
if not isinstance(Model.objects, BaseProductManager):
msg = "Class `{}.objects` must provide ModelManager inheriting from BaseProductManager"
raise NotImplementedError(msg.format(Model.__name__))
if not isinstance(getattr(Model, 'lookup_fields', None), (list, tuple)):
msg = "Class `{}` must provide a tuple of `lookup_fields` so that we can easily lookup for Products"
raise NotImplementedError(msg.format(Model.__name__))
if not callable(getattr(Model, 'get_price', None)):
msg = "Class `{}` must provide a method implementing `get_price(request)`"
raise NotImplementedError(msg.format(cls.__name__))
class BaseProduct(PolymorphicModel, metaclass=PolymorphicProductMetaclass):
"""
An abstract basic product model for the shop. It is intended to be overridden by one or
more polymorphic models, adding all the fields and relations, required to describe this
type of product.
Some attributes for this class are mandatory. They shall be implemented as property method.
The following fields MUST be implemented by the inheriting class:
``product_name``: Return the pronounced name for this product in its localized language.
Additionally the inheriting class MUST implement the following methods ``get_absolute_url()``
and ``get_price()``. See below for details.
Unless each product variant offers its own product code, it is strongly recommended to add
a field ``product_code = models.CharField(_("Product code"), max_length=255, unique=True)``
to the class implementing the product.
"""
created_at = models.DateTimeField(
_("Created at"),
auto_now_add=True,
)
updated_at = models.DateTimeField(
_("Updated at"),
auto_now=True,
)
active = models.BooleanField(
_("Active"),
default=True,
help_text=_("Is this product publicly visible."),
)
class Meta:
abstract = True
verbose_name = _("Product")
verbose_name_plural = _("Products")
def product_type(self):
"""
Returns the polymorphic type of the product.
"""
return force_str(self.polymorphic_ctype)
product_type.short_description = _("Product type")
@property
def product_model(self):
"""
Returns the polymorphic model name of the product's class.
"""
return self.polymorphic_ctype.model
def get_absolute_url(self):
"""
Hook for returning the canonical Django URL of this product.
"""
msg = "Method get_absolute_url() must be implemented by subclass: `{}`"
raise NotImplementedError(msg.format(self.__class__.__name__))
def get_price(self, request):
"""
Hook for returning the current price of this product.
The price shall be of type Money. Read the appropriate section on how to create a Money
type for the chosen currency.
Use the `request` object to vary the price according to the logged in user,
its country code or the language.
"""
msg = "Method get_price() must be implemented by subclass: `{}`"
raise NotImplementedError(msg.format(self.__class__.__name__))
def get_product_variant(self, **kwargs):
"""
Hook for returning the variant of a product using parameters passed in by **kwargs.
If the product has no variants, then return the product itself.
:param **kwargs: A dictionary describing the product's variations.
"""
return self
def get_product_variants(self):
"""
Hook for returning a queryset of variants for the given product.
If the product has no variants, then the queryset contains just itself.
"""
return self._meta.model.objects.filter(pk=self.pk)
def get_availability(self, request, **kwargs):
"""
Hook for checking the availability of a product.
:param request:
Optionally used to vary the availability according to the logged in user,
its country code or language.
:param **kwargs:
Extra arguments passed to the underlying method. Useful for products with
variations.
:return: An object of type :class:`shop.models.product.Availability`.
"""
return Availability()
def managed_availability(self):
"""
:return True: If this product has its quantity managed by some inventory functionality.
"""
return False
def is_in_cart(self, cart, watched=False, **kwargs):
"""
Checks if the current product is already in the given cart, and if so, returns the
corresponding cart_item.
:param watched (bool): This is used to determine if this check shall only be performed
for the watch-list.
:param **kwargs: Optionally one may pass arbitrary information about the product being looked
up. This can be used to determine if a product with variations shall be considered
equal to the same cart item, resulting in an increase of it's quantity, or if it
shall be considered as a separate cart item, resulting in the creation of a new item.
:returns: The cart item (of type CartItem) containing the product considered as equal to the
current one, or ``None`` if no product matches in the cart.
"""
from shop.models.cart import CartItemModel
cart_item_qs = CartItemModel.objects.filter(cart=cart, product=self)
return cart_item_qs.first()
def deduct_from_stock(self, quantity, **kwargs):
"""
Hook to deduct a number of items of the current product from the stock's inventory.
:param quantity: Number of items to deduct.
:param **kwargs:
Extra arguments passed to the underlying method. Useful for products with
variations.
"""
def get_weight(self):
"""
Optional hook to return the product's gross weight in kg. This information is required to
estimate the shipping costs. The merchants product model shall override this method.
"""
return 0
@classmethod
def check(cls, **kwargs):
"""
Internal method to check consistency of Product model declaration on bootstrapping
application.
"""
errors = super().check(**kwargs)
try:
cls.product_name
except AttributeError:
msg = "Class `{}` must provide a model field implementing `product_name`"
errors.append(checks.Error(msg.format(cls.__name__)))
return errors
def update_search_index(self):
"""
Update the Document inside the Elasticsearch index after changing relevant parts
of the product.
"""
documents = elasticsearch_registry.get_documents([ProductModel])
if settings.USE_I18N:
for language, _ in settings.LANGUAGES:
try:
document = next(doc for doc in documents if doc._language == language)
except StopIteration:
document = next(doc for doc in documents if doc._language is None)
document().update(self)
else:
document = next(doc for doc in documents)
document().update(self)
def invalidate_cache(self):
"""
Method ``ProductCommonSerializer.render_html()`` caches the rendered HTML snippets.
Invalidate this HTML snippet after changing relevant parts of the product.
"""
shop_app = apps.get_app_config('shop')
if shop_app.cache_supporting_wildcard:
cache.delete_pattern('product:{}|*'.format(self.id))
ProductModel = deferred.MaterializedModel(BaseProduct)
class CMSPageReferenceMixin:
"""
Products which refer to CMS pages in order to emulate categories, normally need a method for
being accessed directly through a canonical URL. Add this mixin class for adding a
``get_absolute_url()`` method to any to product model.
"""
category_fields = ['cms_pages'] # used by ProductIndex to fill the categories
def get_absolute_url(self):
"""
Return the absolute URL of a product
"""
# sorting by highest level, so that the canonical URL
# associates with the most generic category
cms_page = self.cms_pages.order_by('node__path').last()
if cms_page is None:
return urljoin('/category-not-assigned/', self.slug)
return urljoin(cms_page.get_absolute_url(), self.slug)
|
import unittest
import theano
from theano import tensor
class TestTheano(unittest.TestCase):
def test_addition(self):
# Declare two symbolic floating-point scalars.
a = tensor.dscalar()
b = tensor.dscalar()
# Create a simple expression.
c = a + b
# Convert the expression into a callable object that takes (a,b)
# values as input and computes a value for 'c'.
f = theano.function([a,b], c)
# Bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c'.
self.assertEqual(4.0, f(1.5, 2.5))
|
from Handler import Handler
import logging
try:
import statsd
except ImportError:
statsd = None
class StatsdHandler(Handler):
def __init__(self, config=None):
"""
Create a new instance of the StatsdHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
logging.debug("Initialized statsd handler.")
if not statsd:
self.log.error('statsd import failed. Handler disabled')
self.enabled = False
return
if not hasattr(statsd, 'StatsClient'):
self.log.warn('python-statsd support is deprecated '
'and will be removed in the future. '
'Please use https://pypi.python.org/pypi/statsd/')
# Initialize Options
self.host = self.config['host']
self.port = int(self.config['port'])
self.batch_size = int(self.config['batch'])
self.metrics = []
self.old_values = {}
# Connect
self._connect()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(StatsdHandler, self).get_default_config_help()
config.update({
'host': '',
'port': '',
'batch': '',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(StatsdHandler, self).get_default_config()
config.update({
'host': '',
'port': 1234,
'batch': 1,
})
return config
def process(self, metric):
"""
Process a metric by sending it to statsd
"""
self.metrics.append(metric)
if len(self.metrics) >= self.batch_size:
self._send()
def _send(self):
"""
Send data to statsd. Fire and forget. Cross fingers and it'll arrive.
"""
if not statsd:
return
for metric in self.metrics:
# Split the path into a prefix and a name
# to work with the statsd module's view of the world.
# It will get re-joined by the python-statsd module.
#
# For the statsd module, you specify prefix in the constructor
# so we just use the full metric path.
(prefix, name) = metric.path.rsplit(".", 1)
logging.debug("Sending %s %s|g", name, metric.value)
if metric.metric_type == 'GAUGE':
if hasattr(statsd, 'StatsClient'):
self.connection.gauge(metric.path, metric.value)
else:
statsd.Gauge(prefix, self.connection).send(
name, metric.value)
else:
# To send a counter, we need to just send the delta
# but without any time delta changes
value = metric.raw_value
if metric.path in self.old_values:
value = value - self.old_values[metric.path]
self.old_values[metric.path] = metric.raw_value
if hasattr(statsd, 'StatsClient'):
self.connection.incr(metric.path, value)
else:
statsd.Counter(prefix, self.connection).increment(
name, value)
if hasattr(statsd, 'StatsClient'):
self.connection.send()
self.metrics = []
def flush(self):
"""Flush metrics in queue"""
self._send()
def _connect(self):
"""
Connect to the statsd server
"""
if not statsd:
return
if hasattr(statsd, 'StatsClient'):
self.connection = statsd.StatsClient(
host=self.host,
port=self.port
).pipeline()
else:
# Create socket
self.connection = statsd.Connection(
host=self.host,
port=self.port,
sample_rate=1.0
)
|
import sys
from contextlib import contextmanager
from unittest.mock import patch
from kombu.utils.encoding import (
get_default_encoding_file, safe_str,
set_default_encoding_file, default_encoding,
)
@contextmanager
def clean_encoding():
old_encoding = sys.modules.pop('kombu.utils.encoding', None)
import kombu.utils.encoding
try:
yield kombu.utils.encoding
finally:
if old_encoding:
sys.modules['kombu.utils.encoding'] = old_encoding
class test_default_encoding:
def test_set_default_file(self):
prev = get_default_encoding_file()
try:
set_default_encoding_file('/foo.txt')
assert get_default_encoding_file() == '/foo.txt'
finally:
set_default_encoding_file(prev)
@patch('sys.getfilesystemencoding')
def test_default(self, getdefaultencoding):
getdefaultencoding.return_value = 'ascii'
with clean_encoding() as encoding:
enc = encoding.default_encoding()
if sys.platform.startswith('java'):
assert enc == 'utf-8'
else:
assert enc == 'ascii'
getdefaultencoding.assert_called_with()
class newbytes(bytes):
"""Mock class to simulate python-future newbytes class"""
def __repr__(self):
return 'b' + super().__repr__()
def __str__(self):
return 'b' + f"'{super().__str__()}'"
class newstr(str):
"""Mock class to simulate python-future newstr class"""
def encode(self, encoding=None, errors=None):
return newbytes(super().encode(encoding, errors))
class test_safe_str:
def setup(self):
self._encoding = self.patching('sys.getfilesystemencoding')
self._encoding.return_value = 'ascii'
def test_when_bytes(self):
assert safe_str('foo') == 'foo'
def test_when_newstr(self):
"""Simulates using python-future package under 2.7"""
assert str(safe_str(newstr('foo'))) == 'foo'
def test_when_unicode(self):
assert isinstance(safe_str('foo'), str)
def test_when_encoding_utf8(self):
self._encoding.return_value = 'utf-8'
assert default_encoding() == 'utf-8'
s = 'The quiæk fåx jømps øver the lazy dåg'
res = safe_str(s)
assert isinstance(res, str)
def test_when_containing_high_chars(self):
self._encoding.return_value = 'ascii'
s = 'The quiæk fåx jømps øver the lazy dåg'
res = safe_str(s)
assert isinstance(res, str)
assert len(s) == len(res)
def test_when_not_string(self):
o = object()
assert safe_str(o) == repr(o)
def test_when_unrepresentable(self):
class UnrepresentableObject:
def __repr__(self):
raise KeyError('foo')
assert '<Unrepresentable' in safe_str(UnrepresentableObject())
|
from unittest import mock
import homeassistant.components.datadog as datadog
from homeassistant.const import (
EVENT_LOGBOOK_ENTRY,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
from tests.common import assert_setup_component
async def test_invalid_config(hass):
"""Test invalid configuration."""
with assert_setup_component(0):
assert not await async_setup_component(
hass, datadog.DOMAIN, {datadog.DOMAIN: {"host1": "host1"}}
)
async def test_datadog_setup_full(hass):
"""Test setup with all data."""
config = {datadog.DOMAIN: {"host": "host", "port": 123, "rate": 1, "prefix": "foo"}}
hass.bus.listen = MagicMock()
with patch("homeassistant.components.datadog.initialize") as mock_init, patch(
"homeassistant.components.datadog.statsd"
):
assert await async_setup_component(hass, datadog.DOMAIN, config)
assert mock_init.call_count == 1
assert mock_init.call_args == mock.call(statsd_host="host", statsd_port=123)
assert hass.bus.listen.called
assert EVENT_LOGBOOK_ENTRY == hass.bus.listen.call_args_list[0][0][0]
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[1][0][0]
async def test_datadog_setup_defaults(hass):
"""Test setup with defaults."""
hass.bus.listen = mock.MagicMock()
with patch("homeassistant.components.datadog.initialize") as mock_init, patch(
"homeassistant.components.datadog.statsd"
):
assert await async_setup_component(
hass,
datadog.DOMAIN,
{
datadog.DOMAIN: {
"host": "host",
"port": datadog.DEFAULT_PORT,
"prefix": datadog.DEFAULT_PREFIX,
}
},
)
assert mock_init.call_count == 1
assert mock_init.call_args == mock.call(statsd_host="host", statsd_port=8125)
assert hass.bus.listen.called
async def test_logbook_entry(hass):
"""Test event listener."""
hass.bus.listen = mock.MagicMock()
with patch("homeassistant.components.datadog.initialize"), patch(
"homeassistant.components.datadog.statsd"
) as mock_statsd:
assert await async_setup_component(
hass,
datadog.DOMAIN,
{datadog.DOMAIN: {"host": "host", "rate": datadog.DEFAULT_RATE}},
)
assert hass.bus.listen.called
handler_method = hass.bus.listen.call_args_list[0][0][1]
event = {
"domain": "automation",
"entity_id": "sensor.foo.bar",
"message": "foo bar biz",
"name": "triggered something",
}
handler_method(mock.MagicMock(data=event))
assert mock_statsd.event.call_count == 1
assert mock_statsd.event.call_args == mock.call(
title="Home Assistant",
text="%%% \n **{}** {} \n %%%".format(event["name"], event["message"]),
tags=["entity:sensor.foo.bar", "domain:automation"],
)
mock_statsd.event.reset_mock()
async def test_state_changed(hass):
"""Test event listener."""
hass.bus.listen = mock.MagicMock()
with patch("homeassistant.components.datadog.initialize"), patch(
"homeassistant.components.datadog.statsd"
) as mock_statsd:
assert await async_setup_component(
hass,
datadog.DOMAIN,
{
datadog.DOMAIN: {
"host": "host",
"prefix": "ha",
"rate": datadog.DEFAULT_RATE,
}
},
)
assert hass.bus.listen.called
handler_method = hass.bus.listen.call_args_list[1][0][1]
valid = {"1": 1, "1.0": 1.0, STATE_ON: 1, STATE_OFF: 0}
attributes = {"elevation": 3.2, "temperature": 5.0, "up": True, "down": False}
for in_, out in valid.items():
state = mock.MagicMock(
domain="sensor",
entity_id="sensor.foo.bar",
state=in_,
attributes=attributes,
)
handler_method(mock.MagicMock(data={"new_state": state}))
assert mock_statsd.gauge.call_count == 5
for attribute, value in attributes.items():
value = int(value) if isinstance(value, bool) else value
mock_statsd.gauge.assert_has_calls(
[
mock.call(
f"ha.sensor.{attribute}",
value,
sample_rate=1,
tags=[f"entity:{state.entity_id}"],
)
]
)
assert mock_statsd.gauge.call_args == mock.call(
"ha.sensor",
out,
sample_rate=1,
tags=[f"entity:{state.entity_id}"],
)
mock_statsd.gauge.reset_mock()
for invalid in ("foo", "", object):
handler_method(
mock.MagicMock(data={"new_state": ha.State("domain.test", invalid, {})})
)
assert not mock_statsd.gauge.called
|
import numpy as np
import chainer
from chainer.backends import cuda
from chainercv.links.model.faster_rcnn.utils.bbox2loc import bbox2loc
from chainercv.utils.bbox.bbox_iou import bbox_iou
class AnchorTargetCreator(object):
"""Assign the ground truth bounding boxes to anchors.
Assigns the ground truth bounding boxes to anchors for training Region
Proposal Networks introduced in Faster R-CNN [#]_.
Offsets and scales to match anchors to the ground truth are
calculated using the encoding scheme of
:func:`~chainercv.links.model.faster_rcnn.bbox2loc`.
.. [#] Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun. \
Faster R-CNN: Towards Real-Time Object Detection with \
Region Proposal Networks. NIPS 2015.
Args:
n_sample (int): The number of regions to produce.
pos_iou_thresh (float): Anchors with IoU above this
threshold will be assigned as positive.
neg_iou_thresh (float): Anchors with IoU below this
threshold will be assigned as negative.
pos_ratio (float): Ratio of positive regions in the
sampled regions.
"""
def __init__(self,
n_sample=256,
pos_iou_thresh=0.7, neg_iou_thresh=0.3,
pos_ratio=0.5):
self.n_sample = n_sample
self.pos_iou_thresh = pos_iou_thresh
self.neg_iou_thresh = neg_iou_thresh
self.pos_ratio = pos_ratio
def __call__(self, bbox, anchor, img_size):
"""Assign ground truth supervision to sampled subset of anchors.
Types of input arrays and output arrays are same.
Here are notations.
* :math:`S` is the number of anchors.
* :math:`R` is the number of bounding boxes.
Args:
bbox (array): Coordinates of bounding boxes. Its shape is
:math:`(R, 4)`.
anchor (array): Coordinates of anchors. Its shape is
:math:`(S, 4)`.
img_size (tuple of ints): A tuple :obj:`H, W`, which
is a tuple of height and width of an image.
Returns:
(array, array):
* **loc**: Offsets and scales to match the anchors to \
the ground truth bounding boxes. Its shape is :math:`(S, 4)`.
* **label**: Labels of anchors with values \
:obj:`(1=positive, 0=negative, -1=ignore)`. Its shape \
is :math:`(S,)`.
"""
xp = cuda.get_array_module(bbox)
bbox = cuda.to_cpu(bbox)
anchor = cuda.to_cpu(anchor)
img_H, img_W = img_size
n_anchor = len(anchor)
inside_index = _get_inside_index(anchor, img_H, img_W)
anchor = anchor[inside_index]
argmax_ious, label = self._create_label(
inside_index, anchor, bbox)
# compute bounding box regression targets
loc = bbox2loc(anchor, bbox[argmax_ious])
# map up to original set of anchors
label = _unmap(label, n_anchor, inside_index, fill=-1)
loc = _unmap(loc, n_anchor, inside_index, fill=0)
if xp != np:
loc = chainer.backends.cuda.to_gpu(loc)
label = chainer.backends.cuda.to_gpu(label)
return loc, label
def _create_label(self, inside_index, anchor, bbox):
# label: 1 is positive, 0 is negative, -1 is dont care
label = np.empty((len(inside_index), ), dtype=np.int32)
label.fill(-1)
argmax_ious, max_ious, gt_argmax_ious = \
self._calc_ious(anchor, bbox, inside_index)
# assign negative labels first so that positive labels can clobber them
label[max_ious < self.neg_iou_thresh] = 0
# positive label: for each gt, anchor with highest iou
label[gt_argmax_ious] = 1
# positive label: above threshold IOU
label[max_ious >= self.pos_iou_thresh] = 1
# subsample positive labels if we have too many
n_pos = int(self.pos_ratio * self.n_sample)
pos_index = np.where(label == 1)[0]
if len(pos_index) > n_pos:
disable_index = np.random.choice(
pos_index, size=(len(pos_index) - n_pos), replace=False)
label[disable_index] = -1
# subsample negative labels if we have too many
n_neg = self.n_sample - np.sum(label == 1)
neg_index = np.where(label == 0)[0]
if len(neg_index) > n_neg:
disable_index = np.random.choice(
neg_index, size=(len(neg_index) - n_neg), replace=False)
label[disable_index] = -1
return argmax_ious, label
def _calc_ious(self, anchor, bbox, inside_index):
# ious between the anchors and the gt boxes
ious = bbox_iou(anchor, bbox)
argmax_ious = ious.argmax(axis=1)
max_ious = ious[np.arange(len(inside_index)), argmax_ious]
gt_argmax_ious = ious.argmax(axis=0)
gt_max_ious = ious[gt_argmax_ious, np.arange(ious.shape[1])]
gt_argmax_ious = np.where(ious == gt_max_ious)[0]
return argmax_ious, max_ious, gt_argmax_ious
def _unmap(data, count, index, fill=0):
# Unmap a subset of item (data) back to the original set of items (of
# size count)
if len(data.shape) == 1:
ret = np.empty((count,), dtype=data.dtype)
ret.fill(fill)
ret[index] = data
else:
ret = np.empty((count,) + data.shape[1:], dtype=data.dtype)
ret.fill(fill)
ret[index, :] = data
return ret
def _get_inside_index(anchor, H, W):
# Calc indicies of anchors which are located completely inside of the image
# whose size is speficied.
xp = cuda.get_array_module(anchor)
index_inside = xp.where(
(anchor[:, 0] >= 0) &
(anchor[:, 1] >= 0) &
(anchor[:, 2] <= H) &
(anchor[:, 3] <= W)
)[0]
return index_inside
|
import argparse
import os
import sys
from paasta_tools.envoy_tools import are_services_up_in_pod as is_envoy_ready
from paasta_tools.smartstack_tools import (
are_services_up_on_ip_port as is_smartstack_ready,
)
from paasta_tools.utils import load_system_paasta_config
system_paasta_config = load_system_paasta_config()
synapse_port = system_paasta_config.get_synapse_port()
synapse_host = "169.254.255.254"
synapse_haproxy_url_format = system_paasta_config.get_synapse_haproxy_url_format()
envoy_host = os.environ["PAASTA_HOST"]
envoy_admin_port = system_paasta_config.get_envoy_admin_port()
envoy_admin_endpoint_format = system_paasta_config.get_envoy_admin_endpoint_format()
pod_ip = os.environ["PAASTA_POD_IP"]
def get_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument(
"--enable-smartstack",
dest="smartstack_readiness_check_enabled",
action="store_true",
help="Check smartstack readiness",
)
parser.add_argument(
"--enable-envoy",
action="store_true",
dest="envoy_readiness_check_enabled",
help="Check envoy readiness",
)
parser.add_argument(
"pod_port", help="Pod Port", type=int,
)
parser.add_argument(
"services", nargs="+", help="List of service.instance names",
)
return parser
def main() -> None:
args = get_parser().parse_args()
if args.smartstack_readiness_check_enabled:
smartstack_ready = is_smartstack_ready(
synapse_host=synapse_host,
synapse_port=synapse_port,
synapse_haproxy_url_format=synapse_haproxy_url_format,
services=args.services,
host_ip=pod_ip,
host_port=args.pod_port,
)
else:
smartstack_ready = True
if args.envoy_readiness_check_enabled:
envoy_ready = is_envoy_ready(
envoy_host=envoy_host,
envoy_admin_port=envoy_admin_port,
envoy_admin_endpoint_format=envoy_admin_endpoint_format,
registrations=args.services,
pod_ip=pod_ip,
pod_port=args.pod_port,
)
else:
envoy_ready = True
if smartstack_ready and envoy_ready:
sys.exit(0)
else:
if not smartstack_ready:
print(
f"Could not find backend {pod_ip}:{args.pod_port} for service {args.services} "
f"on Haproxy at {synapse_host}:{synapse_port}"
)
if not envoy_ready:
print(
f"Could not find backend {pod_ip}:{args.pod_port} for service {args.services} "
f"on Envoy at {envoy_host}:{envoy_admin_port}"
)
sys.exit(1)
if __name__ == "__main__":
main()
|
import numpy as np
import chainer
class ConstantStubLink(chainer.Link):
"""A chainer.Link that returns constant value(s).
This is a :obj:`chainer.Link` that returns constant
:obj:`chainer.Variable` (s) when :meth:`forward` method is called.
Args:
outputs (~numpy.ndarray or tuple or ~numpy.ndarray):
The value(s) of variable(s) returned by :meth:`forward`.
If an array is specified, :meth:`forward` returns
a :obj:`chainer.Variable`. Otherwise, it returns a tuple of
:obj:`chainer.Variable`.
"""
def __init__(self, outputs):
super(ConstantStubLink, self).__init__()
if isinstance(outputs, tuple):
self._tuple = True
else:
self._tuple = False
outputs = outputs,
self._outputs = []
for output in outputs:
if not isinstance(output, np.ndarray):
raise ValueError(
'output must be numpy.ndarray or tuple of numpy.ndarray')
self._outputs.append(chainer.Variable(output))
self._outputs = tuple(self._outputs)
def to_cpu(self):
super(ConstantStubLink, self).to_cpu()
for output in self._outputs:
output.to_cpu()
def to_gpu(self, device=None):
super(ConstantStubLink, self).to_gpu(device)
for output in self._outputs:
output.to_gpu(device)
def forward(self, *_):
"""Returns value(s).
Args:
This method can take any values as its arguments.
This function returns values independent of the arguments.
Returns:
chainer.Variable or tuple of chainer.Variable:
If :obj:`outputs` is an array, this method returns
a :obj:`chainer.Variable`. Otherwise, this returns a
tuple of :obj:`chainer.Variable`.
"""
if self._tuple:
return self._outputs
else:
return self._outputs[0]
|
import matplotlib
from pylatex import Document, Section, Figure, NoEscape
matplotlib.use('Agg') # Not to use X server. For TravisCI.
import matplotlib.pyplot as plt # noqa
def main(fname, width, *args, **kwargs):
geometry_options = {"right": "2cm", "left": "2cm"}
doc = Document(fname, geometry_options=geometry_options)
doc.append('Introduction.')
with doc.create(Section('I am a section')):
doc.append('Take a look at this beautiful plot:')
with doc.create(Figure(position='htbp')) as plot:
plot.add_plot(width=NoEscape(width), *args, **kwargs)
plot.add_caption('I am a caption.')
doc.append('Created using matplotlib.')
doc.append('Conclusion.')
doc.generate_pdf(clean_tex=False)
if __name__ == '__main__':
x = [0, 1, 2, 3, 4, 5, 6]
y = [15, 2, 7, 1, 5, 6, 9]
plt.plot(x, y)
main('matplotlib_ex-dpi', r'1\textwidth', dpi=300)
main('matplotlib_ex-facecolor', r'0.5\textwidth', facecolor='b')
|
import tensorflow as tf
from tensorflow.keras.layers import Layer # type: ignore
from tensorflow.keras import activations
from tensorflow.keras import initializers
from typing import List, Optional, Text, Tuple
import tensornetwork as tn
from tensornetwork import Node
import numpy as np
import math
# pytype: disable=module-attr
@tf.keras.utils.register_keras_serializable(package='tensornetwork')
# pytype: enable=module-attr
class DenseExpander(Layer):
"""Expander TN layer. Greatly expands dimensionality of input.
Used in conjunction with DenseEntangler to achieve very large hidden layers.
This layer can take an input shape of arbitrary dimension, with the first
dimension expected to be a batch dimension. The weight matrix will be
constructed from and applied to the last input dimension.
Example:
::
# as first layer in a sequential model:
model = Sequential()
model.add(
DenseExpander(exp_base=2
num_nodes=3,
use_bias=True,
activation='relu',
input_shape=(128,)))
# now the model will take as input arrays of shape (*, 128)
# and output arrays of shape (*, 1024).
# After the first layer, you don't need to specify
# the size of the input anymore:
model.add(
DenseExpander(exp_base=2,
num_nodes=2,
use_bias=True,
activation='relu'))
Args:
exp_base: Positive integer, base of the dimensionality expansion term.
num_nodes: Positive integer, number of nodes in expander.
Note: the output dim will be input_shape[-1] * (exp_base**num_nodes)
so increasing num_nodes will increase the output dim exponentially.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the two weight matrices.
bias_initializer: Initializer for the bias vector.
Input shape:
N-D tensor with shape: `(batch_size, ..., input_dim)`.
Output shape:
N-D tensor with shape: `(batch_size, ..., input_shape[-1] *
(exp_base**num_nodes))`.
"""
def __init__(self,
exp_base: int,
num_nodes: int,
use_bias: Optional[bool] = True,
activation: Optional[Text] = None,
kernel_initializer: Optional[Text] = 'glorot_uniform',
bias_initializer: Optional[Text] = 'zeros',
**kwargs) -> None:
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super().__init__(**kwargs)
self.exp_base = exp_base
self.num_nodes = num_nodes
self.nodes = []
self.use_bias = use_bias
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
def build(self, input_shape: List[int]) -> None:
# Disable the attribute-defined-outside-init violations in this function
# pylint: disable=attribute-defined-outside-init
if input_shape[-1] is None:
raise ValueError('The last dimension of the inputs to `Dense` '
'should be defined. Found `None`.')
super().build(input_shape)
self.output_dim = input_shape[-1] * (self.exp_base**self.num_nodes)
for i in range(self.num_nodes):
self.nodes.append(
self.add_weight(name=f'node_{i}',
shape=(input_shape[-1], self.exp_base,
input_shape[-1]),
trainable=True,
initializer=self.kernel_initializer))
self.bias_var = self.add_weight(
name='bias',
shape=(self.output_dim,),
trainable=True,
initializer=self.bias_initializer) if self.use_bias else None
def call(self, inputs: tf.Tensor, **kwargs) -> tf.Tensor: # pylint: disable=unused-argument
def f(x: tf.Tensor, nodes: List[Node], num_nodes: int, use_bias: bool,
bias_var: tf.Tensor) -> tf.Tensor:
state_node = tn.Node(x, name='xnode', backend="tensorflow")
operating_edge = state_node[0]
# The TN will be connected like this:
# | | | |
# | | 33333
# | | |
# | 22222
# | |
# 11111
# |
# xxxxxxx
for i in range(num_nodes):
op = tn.Node(nodes[i], name=f'node_{i}', backend="tensorflow")
tn.connect(operating_edge, op[0])
operating_edge = op[2]
state_node = tn.contract_between(state_node, op)
result = tf.reshape(state_node.tensor, (-1,))
if use_bias:
result += bias_var
return result
input_shape = list(inputs.shape)
inputs = tf.reshape(inputs, (-1, input_shape[-1]))
result = tf.vectorized_map(
lambda vec: f(vec, self.nodes, self.num_nodes, self.use_bias, self.
bias_var), inputs)
if self.activation is not None:
result = self.activation(result)
result = tf.reshape(result, [-1] + input_shape[1:-1] + [self.output_dim,])
return result
def compute_output_shape(self, input_shape: List[int]) -> Tuple[int, int]:
return tuple(input_shape[0:-1]) + (self.output_dim,)
def get_config(self) -> dict:
"""Returns the config of the layer.
The same layer can be reinstantiated later
(without its trained weights) from this configuration.
Returns:
Python dictionary containing the configuration of the layer.
"""
config = {}
# Include the Expander-specific arguments
args = ['exp_base', 'num_nodes', 'use_bias']
for arg in args:
config[arg] = getattr(self, arg)
# Serialize the activation
config['activation'] = activations.serialize(getattr(self, 'activation'))
# Serialize the initializers
initializers_list = ['kernel_initializer', 'bias_initializer']
for initializer_arg in initializers_list:
config[initializer_arg] = initializers.serialize(
getattr(self, initializer_arg))
# Get base config
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))
|
import asyncio
import logging
from roonapi import RoonApi
from homeassistant.const import CONF_API_KEY, CONF_HOST
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util.dt import utcnow
from .const import ROON_APPINFO
_LOGGER = logging.getLogger(__name__)
FULL_SYNC_INTERVAL = 30
class RoonServer:
"""Manages a single Roon Server."""
def __init__(self, hass, config_entry):
"""Initialize the system."""
self.config_entry = config_entry
self.hass = hass
self.roonapi = None
self.all_player_ids = set()
self.all_playlists = []
self.offline_devices = set()
self._exit = False
self._roon_name_by_id = {}
@property
def host(self):
"""Return the host of this server."""
return self.config_entry.data[CONF_HOST]
async def async_setup(self, tries=0):
"""Set up a roon server based on host parameter."""
host = self.host
hass = self.hass
token = self.config_entry.data[CONF_API_KEY]
_LOGGER.debug("async_setup: %s %s", token, host)
self.roonapi = RoonApi(ROON_APPINFO, token, host, blocking_init=False)
self.roonapi.register_state_callback(
self.roonapi_state_callback, event_filter=["zones_changed"]
)
# initialize media_player platform
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(
self.config_entry, "media_player"
)
)
# Initialize Roon background polling
asyncio.create_task(self.async_do_loop())
return True
async def async_reset(self):
"""Reset this connection to default state.
Will cancel any scheduled setup retry and will unload
the config entry.
"""
self.stop_roon()
return True
@property
def zones(self):
"""Return list of zones."""
return self.roonapi.zones
def add_player_id(self, entity_id, roon_name):
"""Register a roon player."""
self._roon_name_by_id[entity_id] = roon_name
def roon_name(self, entity_id):
"""Get the name of the roon player from entity_id."""
return self._roon_name_by_id.get(entity_id)
def stop_roon(self):
"""Stop background worker."""
self.roonapi.stop()
self._exit = True
def roonapi_state_callback(self, event, changed_zones):
"""Callbacks from the roon api websockets."""
self.hass.add_job(self.async_update_changed_players(changed_zones))
async def async_do_loop(self):
"""Background work loop."""
self._exit = False
while not self._exit:
await self.async_update_players()
# await self.async_update_playlists()
await asyncio.sleep(FULL_SYNC_INTERVAL)
async def async_update_changed_players(self, changed_zones_ids):
"""Update the players which were reported as changed by the Roon API."""
for zone_id in changed_zones_ids:
if zone_id not in self.roonapi.zones:
# device was removed ?
continue
zone = self.roonapi.zones[zone_id]
for device in zone["outputs"]:
dev_name = device["display_name"]
if dev_name == "Unnamed" or not dev_name:
# ignore unnamed devices
continue
player_data = await self.async_create_player_data(zone, device)
dev_id = player_data["dev_id"]
player_data["is_available"] = True
if dev_id in self.offline_devices:
# player back online
self.offline_devices.remove(dev_id)
async_dispatcher_send(self.hass, "roon_media_player", player_data)
self.all_player_ids.add(dev_id)
async def async_update_players(self):
"""Periodic full scan of all devices."""
zone_ids = self.roonapi.zones.keys()
await self.async_update_changed_players(zone_ids)
# check for any removed devices
all_devs = {}
for zone in self.roonapi.zones.values():
for device in zone["outputs"]:
player_data = await self.async_create_player_data(zone, device)
dev_id = player_data["dev_id"]
all_devs[dev_id] = player_data
for dev_id in self.all_player_ids:
if dev_id in all_devs:
continue
# player was removed!
player_data = {"dev_id": dev_id}
player_data["is_available"] = False
async_dispatcher_send(self.hass, "roon_media_player", player_data)
self.offline_devices.add(dev_id)
async def async_update_playlists(self):
"""Store lists in memory with all playlists - could be used by a custom lovelace card."""
all_playlists = []
roon_playlists = self.roonapi.playlists()
if roon_playlists and "items" in roon_playlists:
all_playlists += [item["title"] for item in roon_playlists["items"]]
roon_playlists = self.roonapi.internet_radio()
if roon_playlists and "items" in roon_playlists:
all_playlists += [item["title"] for item in roon_playlists["items"]]
self.all_playlists = all_playlists
async def async_create_player_data(self, zone, output):
"""Create player object dict by combining zone with output."""
new_dict = zone.copy()
new_dict.update(output)
new_dict.pop("outputs")
new_dict["host"] = self.host
new_dict["is_synced"] = len(zone["outputs"]) > 1
new_dict["zone_name"] = zone["display_name"]
new_dict["display_name"] = output["display_name"]
new_dict["last_changed"] = utcnow()
# we don't use the zone_id or output_id for now as unique id as I've seen cases were it changes for some reason
new_dict["dev_id"] = f"roon_{self.host}_{output['display_name']}"
return new_dict
|
from collections import Counter
from scattertext.external.phrasemachine import phrasemachine
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class PhraseMachinePhrases(FeatsFromSpacyDoc):
'''
Returns unigrams and phrase machine phrases
'''
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Doc
Returns
-------
Counter noun chunk -> count
'''
ngram_counter = Counter()
for sent in doc.sents:
ngram_counter += _phrase_counts(sent)
return ngram_counter
class PhraseMachinePhrasesAndUnigrams(FeatsFromSpacyDoc):
'''
Returns unigrams and phrase machine phrases
'''
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Doc
Returns
-------
Counter noun chunk -> count
'''
# ngram_counter = phrasemachine.get_phrases(str(doc), tagger='spacy')['counts']
ngram_counter = Counter()
for sent in doc.sents:
unigrams = self._get_unigram_feats(sent)
ngram_counter += Counter(unigrams) + _phrase_counts(sent)
return ngram_counter
def _phrase_counts(sent):
pos_seq = [w.tag_ for w in sent]
tokens = [w.lower_ for w in sent]
counts = Counter()
for (start, end) in phrasemachine.extract_ngram_filter(pos_seq, minlen=2, maxlen=8):
phrase = phrasemachine.safejoin([tokens[i] for i in range(start, end)])
phrase = phrase.lower()
counts[phrase] += 1
return counts
|
import argparse
import glob
import os
import struct
import sys
def clamp_to_min_max(value, min, max):
if value > max:
value = max
elif value < min:
value = min
return value
def clamp_to_u8(value):
return clamp_to_min_max(value, 0, 255)
def parse_args():
parser = argparse.ArgumentParser(description="Set the mouse DPI")
parser.add_argument('-d', '--device', type=str, help="Device string like \"0003:1532:0045.000C\"")
parser.add_argument('--dpi_x', required=True, type=int, help="DPI on the X axis (100, 20000)")
parser.add_argument('--dpi_y', type=int, help="DPI on the Y axis (if omitted, dpi_x is used), (100, 20000)")
args = parser.parse_args()
return args
def run():
args = parse_args()
if args.device is None:
mouse_dirs = glob.glob(os.path.join('/sys/bus/hid/drivers/razermouse/', "*:*:*.*"))
if len(mouse_dirs) > 1:
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
if len(mouse_dirs) < 1:
print("No mouse directories found. Make sure the driver is binded", file=sys.stderr)
sys.exit(1)
mouse_dir = mouse_dirs[0]
else:
mouse_dir = os.path.join('/sys/bus/hid/drivers/razermouse/', args.device)
if not os.path.isdir(mouse_dir):
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
dpi_x = clamp_to_min_max(args.dpi_x, 100, 20000)
if args.dpi_y is None:
byte_string = struct.pack(">H", dpi_x)
else:
dpi_y = clamp_to_min_max(args.dpi_y, 100, 20000)
byte_string = struct.pack(">HH", dpi_x, dpi_y)
set_mouse_dpi_filepath = os.path.join(mouse_dir, "dpi")
with open(set_mouse_dpi_filepath, 'wb') as set_mouse_dpi_file:
set_mouse_dpi_file.write(byte_string)
print("Done")
if __name__ == '__main__':
run()
|
import os
import tempfile
import unittest
import mock
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hpcc
FAKE_HPCC_MAIN_SOURCE = """/* comments redacted */
#include <redacted.h>
int
main(int argc, char *argv[]) {
int redacted;
MPI_Init( &argc, &argv );
if (redacted)
goto hpcc_end;
/* -------------------------------------------------- */
/* MPI RandomAccess */
/* -------------------------------------------------- */
MPI RandomAccess line 1 redacted.
MPI RandomAccess line 2 redacted.
/* -------------------------------------------------- */
/* StarRandomAccess */
/* -------------------------------------------------- */
StarRandomAccess line 1 redacted.
StarRandomAccess line 2 redacted.
/* -------------------------------------------------- */
/* SingleRandomAccess */
/* -------------------------------------------------- */
SingleRandomAccess line 1 redacted.
SingleRandomAccess line 2 redacted.
hpcc_end:
redacted();
return 0;
}
"""
class HpccTest(unittest.TestCase):
def _RunLimitBenchmarksToRun(self, selected_hpcc_benchmarks):
"""Calls _LimitBenchmarksToRun for tests."""
self._temp_dir = tempfile.mkdtemp()
with open(os.path.join(self._temp_dir, 'hpcc.c'), 'w+') as f:
f.write(FAKE_HPCC_MAIN_SOURCE)
with mock.patch(vm_util.__name__ + '.GetTempDir') as mock_gettempdir:
mock_gettempdir.return_value = self._temp_dir
hpcc._LimitBenchmarksToRun(mock.Mock(), selected_hpcc_benchmarks)
def _ValidateRunLimitBenchmarksToRun(self, expected_lines):
"""Validates that expected lines are found in the modified hpcc.c file.
Args:
expected_lines: An iterable of expected lines of code.
"""
expected_lines = set(expected_lines)
with open(os.path.join(self._temp_dir, 'hpcc.c')) as f:
for line in f:
expected_lines.discard(line.rstrip())
if expected_lines:
self.fail('Unexpected lines in hpcc.c: %s' % expected_lines)
def testLimitBenchmarksToRunToFirstBenchmark(self):
"""Tests limiting the benchmarks to run to the first benchmark."""
self._RunLimitBenchmarksToRun(set(['MPI RandomAccess']))
self._ValidateRunLimitBenchmarksToRun([
' MPI_Init( &argc, &argv );',
' if (redacted)',
' goto hpcc_end;',
' MPI RandomAccess line 1 redacted.',
' MPI RandomAccess line 2 redacted.',
'// StarRandomAccess line 1 redacted.',
'// StarRandomAccess line 2 redacted.',
'// SingleRandomAccess line 1 redacted.',
'// SingleRandomAccess line 2 redacted.',
' hpcc_end:',
])
def testLimitBenchmarksToRunToLastBenchmark(self):
"""Tests limiting the benchmarks to run to the first benchmark."""
self._RunLimitBenchmarksToRun(set(['SingleRandomAccess']))
self._ValidateRunLimitBenchmarksToRun([
' MPI_Init( &argc, &argv );',
' if (redacted)',
' goto hpcc_end;',
'// MPI RandomAccess line 1 redacted.',
'// MPI RandomAccess line 2 redacted.',
'// StarRandomAccess line 1 redacted.',
'// StarRandomAccess line 2 redacted.',
' SingleRandomAccess line 1 redacted.',
' SingleRandomAccess line 2 redacted.',
' hpcc_end:',
])
def testLimitBenchmarksToRunToMultipleBenchmarks(self):
"""Tests limiting the benchmarks to run to the first benchmark."""
self._RunLimitBenchmarksToRun(
set(['StarRandomAccess', 'SingleRandomAccess']))
self._ValidateRunLimitBenchmarksToRun([
' MPI_Init( &argc, &argv );',
' if (redacted)',
' goto hpcc_end;',
'// MPI RandomAccess line 1 redacted.',
'// MPI RandomAccess line 2 redacted.',
' StarRandomAccess line 1 redacted.',
' StarRandomAccess line 2 redacted.',
' SingleRandomAccess line 1 redacted.',
' SingleRandomAccess line 2 redacted.',
' hpcc_end:',
])
if __name__ == '__main__':
unittest.main()
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Field, Layout
from django import forms
from django.http import QueryDict
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from weblate.formats.models import FILE_FORMATS
from weblate.trans.discovery import ComponentDiscovery
from weblate.trans.forms import AutoForm, BulkEditForm
from weblate.utils.forms import ContextDiv
from weblate.utils.render import validate_render, validate_render_component
from weblate.utils.validators import validate_filename, validate_re
class AddonFormMixin:
def serialize_form(self):
return self.cleaned_data
def save(self):
self._addon.configure(self.serialize_form())
return self._addon.instance
class BaseAddonForm(forms.Form, AddonFormMixin):
def __init__(self, user, addon, instance=None, *args, **kwargs):
self._addon = addon
self.user = user
super().__init__(*args, **kwargs)
class GenerateMoForm(BaseAddonForm):
path = forms.CharField(
label=_("Path of generated MO file"),
required=False,
initial="{{ filename|stripext }}.mo",
help_text=_("If not specified, the location of the PO file will be used."),
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("path"),
ContextDiv(
template="addons/generatemo_help.html", context={"user": self.user}
),
)
def test_render(self, value):
validate_render_component(value, translation=True)
def clean_path(self):
self.test_render(self.cleaned_data["path"])
validate_filename(self.cleaned_data["path"])
return self.cleaned_data["path"]
class GenerateForm(BaseAddonForm):
filename = forms.CharField(label=_("Name of generated file"), required=True)
template = forms.CharField(
widget=forms.Textarea(), label=_("Content of generated file"), required=True
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("filename"),
Field("template"),
ContextDiv(
template="addons/generate_help.html", context={"user": self.user}
),
)
def test_render(self, value):
validate_render_component(value, translation=True)
def clean_filename(self):
self.test_render(self.cleaned_data["filename"])
validate_filename(self.cleaned_data["filename"])
return self.cleaned_data["filename"]
def clean_template(self):
self.test_render(self.cleaned_data["template"])
return self.cleaned_data["template"]
class GettextCustomizeForm(BaseAddonForm):
width = forms.ChoiceField(
label=_("Long lines wrapping"),
choices=[
(77, _("Wrap lines at 77 chars and at newlines")),
(65535, _("Only wrap lines at newlines")),
(-1, _("No line wrapping")),
],
required=True,
initial=77,
help_text=_(
"By default gettext wraps lines at 77 chars and newlines. "
"With --no-wrap parameter, it wraps only at newlines."
),
)
class MsgmergeForm(BaseAddonForm):
previous = forms.BooleanField(
label=_("Keep previous msgids of translated strings"),
required=False,
initial=True,
)
no_location = forms.BooleanField(
label=_("Remove locations of translated strings"),
required=False,
initial=False,
)
fuzzy = forms.BooleanField(
label=_("Use fuzzy matching"), required=False, initial=True
)
class GitSquashForm(BaseAddonForm):
squash = forms.ChoiceField(
label=_("Commit squashing"),
widget=forms.RadioSelect,
choices=(
("all", _("All commits into one")),
("language", _("Per language")),
("file", _("Per file")),
("author", _("Per author")),
),
initial="all",
required=True,
)
append_trailers = forms.BooleanField(
label=_("Append trailers to squashed commit message"),
required=False,
initial=True,
help_text=_(
"Trailer lines are lines that look similar to RFC 822 e-mail "
"headers, at the end of the otherwise free-form part of a commit "
"message, such as 'Co-authored-by: ...'."
),
)
commit_message = forms.CharField(
widget=forms.Textarea(),
required=False,
help_text=_(
"This commit message will be used instead of the combined commit "
"messages from the squashed commits."
),
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("squash"),
Field("append_trailers"),
Field("commit_message"),
ContextDiv(template="addons/squash_help.html", context={"user": self.user}),
)
class JSONCustomizeForm(BaseAddonForm):
sort_keys = forms.BooleanField(label=_("Sort JSON keys"), required=False)
indent = forms.IntegerField(
label=_("JSON indentation"), min_value=0, initial=4, required=True
)
class YAMLCustomizeForm(BaseAddonForm):
indent = forms.IntegerField(
label=_("YAML indentation"), min_value=1, max_value=10, initial=2, required=True
)
width = forms.ChoiceField(
label=_("Long lines wrapping"),
choices=[
("80", _("Wrap lines at 80 chars")),
("100", _("Wrap lines at 100 chars")),
("120", _("Wrap lines at 120 chars")),
("180", _("Wrap lines at 180 chars")),
("65535", _("No line wrapping")),
],
required=True,
initial=80,
)
line_break = forms.ChoiceField(
label=_("Line breaks"),
choices=[
("dos", _("DOS (\\r\\n)")),
("unix", _("UNIX (\\n)")),
("mac", _("MAC (\\r)")),
],
required=True,
initial="unix",
)
class RemoveForm(BaseAddonForm):
age = forms.IntegerField(
label=_("Days to keep"), min_value=0, initial=30, required=True
)
class RemoveSuggestionForm(RemoveForm):
votes = forms.IntegerField(
label=_("Voting threshold"),
initial=0,
required=True,
help_text=_(
"Threshold for removal. This field has no effect with " "voting turned off."
),
)
class DiscoveryForm(BaseAddonForm):
match = forms.CharField(
label=_("Regular expression to match translation files against"), required=True
)
file_format = forms.ChoiceField(
label=_("File format"),
choices=FILE_FORMATS.get_choices(empty=True),
initial="",
required=True,
)
name_template = forms.CharField(
label=_("Customize the component name"),
initial="{{ component }}",
required=True,
)
base_file_template = forms.CharField(
label=_("Define the monolingual base filename"),
initial="",
required=False,
help_text=_("Leave empty for bilingual translation files."),
)
new_base_template = forms.CharField(
label=_("Define the base file for new translations"),
initial="",
required=False,
help_text=_(
"Filename of file used for creating new translations. "
"For gettext choose .pot file."
),
)
language_regex = forms.CharField(
label=_("Language filter"),
max_length=200,
initial="^[^.]+$",
validators=[validate_re],
help_text=_(
"Regular expression to filter "
"translation files against when scanning for filemask."
),
)
copy_addons = forms.BooleanField(
label=_("Clone addons from the main component to the newly created ones"),
required=False,
initial=True,
)
remove = forms.BooleanField(
label=_("Remove components for inexistant files"), required=False
)
confirm = forms.BooleanField(
label=_("I confirm the above matches look correct"),
required=False,
widget=forms.HiddenInput,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("match"),
Field("file_format"),
Field("name_template"),
Field("base_file_template"),
Field("new_base_template"),
Field("language_regex"),
Field("copy_addons"),
Field("remove"),
ContextDiv(
template="addons/discovery_help.html", context={"user": self.user}
),
)
if self.is_bound:
# Perform form validation
self.full_clean()
# Show preview if form was submitted
if self.cleaned_data["preview"]:
self.fields["confirm"].widget = forms.CheckboxInput()
self.helper.layout.insert(0, Field("confirm"))
created, matched, deleted = self.discovery.perform(
preview=True, remove=self.cleaned_data["remove"]
)
self.helper.layout.insert(
0,
ContextDiv(
template="addons/discovery_preview.html",
context={
"matches_created": created,
"matches_matched": matched,
"matches_deleted": deleted,
"user": self.user,
},
),
)
@cached_property
def discovery(self):
return ComponentDiscovery(
self._addon.instance.component,
**ComponentDiscovery.extract_kwargs(self.cleaned_data)
)
def clean(self):
self.cleaned_data["preview"] = False
# There are some other errors or the form was loaded from db
if self.errors or not isinstance(self.data, QueryDict):
return
self.cleaned_data["preview"] = True
if not self.cleaned_data["confirm"]:
raise forms.ValidationError(
_("Please review and confirm the matched components.")
)
def clean_match(self):
match = self.cleaned_data["match"]
validate_re(match, ("component", "language"))
return match
@staticmethod
def test_render(value):
return validate_render(value, component="test")
def template_clean(self, name):
result = self.test_render(self.cleaned_data[name])
if result and result == self.cleaned_data[name]:
raise forms.ValidationError(
_("Please include component markup in the template.")
)
return self.cleaned_data[name]
def clean_name_template(self):
return self.template_clean("name_template")
def clean_base_file_template(self):
return self.template_clean("base_file_template")
def clean_new_base_template(self):
return self.template_clean("new_base_template")
class AutoAddonForm(AutoForm, AddonFormMixin):
def __init__(self, user, addon, instance=None, *args, **kwargs):
self.user = user
self._addon = addon
super().__init__(obj=addon.instance.component, *args, **kwargs)
class BulkEditAddonForm(BulkEditForm, AddonFormMixin):
def __init__(self, user, addon, instance=None, *args, **kwargs):
self.user = user
self._addon = addon
component = addon.instance.component
super().__init__(
obj=component, project=component.project, user=None, *args, **kwargs
)
def serialize_form(self):
result = dict(self.cleaned_data)
# Need to convert to JSON serializable objects
result["add_labels"] = list(result["add_labels"].values_list("name", flat=True))
result["remove_labels"] = list(
result["remove_labels"].values_list("name", flat=True)
)
return result
class CDNJSForm(BaseAddonForm):
threshold = forms.IntegerField(
label=_("Translation threshold"),
initial=0,
max_value=100,
min_value=0,
required=True,
help_text=_("Threshold for inclusion of translations."),
)
css_selector = forms.CharField(
label=_("CSS selector"),
required=True,
initial=".l10n",
help_text=_("CSS selector to detect localizable elements."),
)
cookie_name = forms.CharField(
label=_("Language cookie name"),
required=False,
initial="",
help_text=_("Name of cookie which stores language preference."),
)
files = forms.CharField(
widget=forms.Textarea(),
label=_("Extract strings from HTML files"),
required=False,
help_text=_(
"List of filenames in current repository or remote URLs to parse "
"for translatable strings."
),
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.layout = Layout(
Field("threshold"),
Field("css_selector"),
Field("cookie_name"),
Field("files"),
)
if self.is_bound and self._addon.instance.pk:
self.helper.layout.insert(
0,
ContextDiv(
template="addons/cdnjs.html",
context={"url": self._addon.cdn_js_url, "user": self.user},
),
)
|
from datetime import timedelta
import pytest
from transmissionrpc.error import TransmissionError
from homeassistant import data_entry_flow
from homeassistant.components import transmission
from homeassistant.components.transmission import config_flow
from homeassistant.components.transmission.const import (
CONF_LIMIT,
CONF_ORDER,
DEFAULT_LIMIT,
DEFAULT_NAME,
DEFAULT_ORDER,
DEFAULT_PORT,
DEFAULT_SCAN_INTERVAL,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
NAME = "Transmission"
HOST = "192.168.1.100"
USERNAME = "username"
PASSWORD = "password"
PORT = 9091
SCAN_INTERVAL = 10
MOCK_ENTRY = {
CONF_NAME: NAME,
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_PORT: PORT,
}
@pytest.fixture(name="api")
def mock_transmission_api():
"""Mock an api."""
with patch("transmissionrpc.Client"):
yield
@pytest.fixture(name="auth_error")
def mock_api_authentication_error():
"""Mock an api."""
with patch(
"transmissionrpc.Client", side_effect=TransmissionError("401: Unauthorized")
):
yield
@pytest.fixture(name="conn_error")
def mock_api_connection_error():
"""Mock an api."""
with patch(
"transmissionrpc.Client",
side_effect=TransmissionError("111: Connection refused"),
):
yield
@pytest.fixture(name="unknown_error")
def mock_api_unknown_error():
"""Mock an api."""
with patch("transmissionrpc.Client", side_effect=TransmissionError):
yield
@pytest.fixture(name="transmission_setup", autouse=True)
def transmission_setup_fixture():
"""Mock transmission entry setup."""
with patch(
"homeassistant.components.transmission.async_setup_entry", return_value=True
):
yield
def init_config_flow(hass):
"""Init a configuration flow."""
flow = config_flow.TransmissionFlowHandler()
flow.hass = hass
return flow
async def test_flow_user_config(hass, api):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_flow_required_fields(hass, api):
"""Test with required fields only."""
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN,
context={"source": "user"},
data={CONF_NAME: NAME, CONF_HOST: HOST, CONF_PORT: PORT},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
async def test_flow_all_provided(hass, api):
"""Test with all provided."""
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}, data=MOCK_ENTRY
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_PORT] == PORT
async def test_options(hass):
"""Test updating options."""
entry = MockConfigEntry(
domain=transmission.DOMAIN,
title=CONF_NAME,
data=MOCK_ENTRY,
options={CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL},
)
flow = init_config_flow(hass)
options_flow = flow.async_get_options_flow(entry)
result = await options_flow.async_step_init()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await options_flow.async_step_init({CONF_SCAN_INTERVAL: 10})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_SCAN_INTERVAL] == 10
async def test_import(hass, api):
"""Test import step."""
flow = init_config_flow(hass)
# import with minimum fields only
result = await flow.async_step_import(
{
CONF_NAME: DEFAULT_NAME,
CONF_HOST: HOST,
CONF_PORT: DEFAULT_PORT,
CONF_SCAN_INTERVAL: timedelta(seconds=DEFAULT_SCAN_INTERVAL),
CONF_LIMIT: DEFAULT_LIMIT,
CONF_ORDER: DEFAULT_ORDER,
}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == DEFAULT_NAME
assert result["data"][CONF_NAME] == DEFAULT_NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == DEFAULT_PORT
assert result["data"][CONF_SCAN_INTERVAL] == DEFAULT_SCAN_INTERVAL
# import with all
result = await flow.async_step_import(
{
CONF_NAME: NAME,
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_PORT: PORT,
CONF_SCAN_INTERVAL: timedelta(seconds=SCAN_INTERVAL),
CONF_LIMIT: DEFAULT_LIMIT,
CONF_ORDER: DEFAULT_ORDER,
}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_PORT] == PORT
assert result["data"][CONF_SCAN_INTERVAL] == SCAN_INTERVAL
async def test_host_already_configured(hass, api):
"""Test host is already configured."""
entry = MockConfigEntry(
domain=transmission.DOMAIN,
data=MOCK_ENTRY,
options={CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL},
)
entry.add_to_hass(hass)
mock_entry_unique_name = MOCK_ENTRY.copy()
mock_entry_unique_name[CONF_NAME] = "Transmission 1"
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}, data=mock_entry_unique_name
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
mock_entry_unique_port = MOCK_ENTRY.copy()
mock_entry_unique_port[CONF_PORT] = 9092
mock_entry_unique_port[CONF_NAME] = "Transmission 2"
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}, data=mock_entry_unique_port
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
mock_entry_unique_host = MOCK_ENTRY.copy()
mock_entry_unique_host[CONF_HOST] = "192.168.1.101"
mock_entry_unique_host[CONF_NAME] = "Transmission 3"
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}, data=mock_entry_unique_host
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_name_already_configured(hass, api):
"""Test name is already configured."""
entry = MockConfigEntry(
domain=transmission.DOMAIN,
data=MOCK_ENTRY,
options={CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL},
)
entry.add_to_hass(hass)
mock_entry = MOCK_ENTRY.copy()
mock_entry[CONF_HOST] = "0.0.0.0"
result = await hass.config_entries.flow.async_init(
transmission.DOMAIN, context={"source": "user"}, data=mock_entry
)
assert result["type"] == "form"
assert result["errors"] == {CONF_NAME: "name_exists"}
async def test_error_on_wrong_credentials(hass, auth_error):
"""Test with wrong credentials."""
flow = init_config_flow(hass)
result = await flow.async_step_user(
{
CONF_NAME: NAME,
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_PORT: PORT,
}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {
CONF_USERNAME: "invalid_auth",
CONF_PASSWORD: "invalid_auth",
}
async def test_error_on_connection_failure(hass, conn_error):
"""Test when connection to host fails."""
flow = init_config_flow(hass)
result = await flow.async_step_user(
{
CONF_NAME: NAME,
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_PORT: PORT,
}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_error_on_unknwon_error(hass, unknown_error):
"""Test when connection to host fails."""
flow = init_config_flow(hass)
result = await flow.async_step_user(
{
CONF_NAME: NAME,
CONF_HOST: HOST,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_PORT: PORT,
}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
|
import sys
import docker_registry.storage as storage
# Copy/Pasted from old models
from sqlalchemy import create_engine, ForeignKey, UniqueConstraint
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Table, Column, Integer, String, DateTime, func
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
created_at = Column(DateTime, nullable=False, default=func.now())
updated_at = Column(DateTime, onupdate=func.now())
username = Column(String(256), nullable=False, unique=True)
email = Column(String(256), nullable=False, unique=True)
password = Column(String(64), nullable=False)
repositories = relationship(
'Repository', order_by='Repository.name', backref='user'
)
repositories_revisions = Table(
'repositories_revisions',
Base.metadata,
Column('repository_id', Integer, ForeignKey('repositories.id')),
Column('revision_id', String(64), ForeignKey('revisions.id'))
)
class Tag(Base):
__tablename__ = 'tags'
__table_args__ = (
UniqueConstraint('name', 'repository_id'),
)
id = Column(Integer, primary_key=True)
name = Column(String(256), nullable=False)
revision_id = Column(String(64), ForeignKey('revisions.id'))
repository_id = Column(Integer, ForeignKey('repositories.id'))
revision = relationship('ImageRevision')
class ImageRevision(Base):
__tablename__ = 'revisions'
id = Column(String(64), primary_key=True, autoincrement=False, unique=True)
parent_id = Column(String(64), index=True, nullable=True)
layer_url = Column(String(256), index=False, nullable=True)
created_at = Column(DateTime, nullable=False)
class Repository(Base):
__tablename__ = 'repositories'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'))
name = Column(String(64), index=True, nullable=False)
revisions = relationship(
ImageRevision,
secondary=repositories_revisions,
order_by=ImageRevision.created_at.desc(),
backref='repositories'
)
tags = relationship('Tag', order_by='Tag.name', backref='repository')
def import_tags(sess, store):
for tag in sess.query(Tag).all():
try:
repos_name = tag.repository.name
tag_name = tag.name
repos_namespace = tag.repository.user.username
image_id = tag.revision.id
path = store.tag_path(repos_namespace, repos_name, tag_name)
if store.exists(path):
continue
dest = store.put_content(path, image_id)
print('{0} -> {1}'.format(dest, image_id))
except AttributeError as e:
print('# Warning: {0}'.format(e))
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: {0} URL'.format(sys.argv[0]))
sys.exit(0)
url = sys.argv[1]
Session = sessionmaker(bind=create_engine(url))
store = storage.load()
sess = Session()
import_tags(sess, store)
|
from numpy.testing import assert_array_equal, assert_allclose
import numpy as np
from scipy import stats, sparse
from mne.stats import permutation_cluster_1samp_test
from mne.stats.permutations import (permutation_t_test, _ci,
bootstrap_confidence_interval)
from mne.utils import run_tests_if_main, check_version
def test_permutation_t_test():
"""Test T-test based on permutations."""
# 1 sample t-test
np.random.seed(10)
n_samples, n_tests = 30, 5
X = np.random.randn(n_samples, n_tests)
X[:, :2] += 1
t_obs, p_values, H0 = permutation_t_test(
X, n_permutations=999, tail=0, seed=0)
assert (p_values > 0).all()
assert len(H0) == 999
is_significant = p_values < 0.05
assert_array_equal(is_significant, [True, True, False, False, False])
t_obs, p_values, H0 = permutation_t_test(
X, n_permutations=999, tail=1, seed=0)
assert (p_values > 0).all()
assert len(H0) == 999
is_significant = p_values < 0.05
assert_array_equal(is_significant, [True, True, False, False, False])
t_obs, p_values, H0 = permutation_t_test(
X, n_permutations=999, tail=-1, seed=0)
is_significant = p_values < 0.05
assert_array_equal(is_significant, [False, False, False, False, False])
X *= -1
t_obs, p_values, H0 = permutation_t_test(
X, n_permutations=999, tail=-1, seed=0)
assert (p_values > 0).all()
assert len(H0) == 999
is_significant = p_values < 0.05
assert_array_equal(is_significant, [True, True, False, False, False])
# check equivalence with spatio_temporal_cluster_test
for adjacency in (sparse.eye(n_tests), False):
t_obs_clust, _, p_values_clust, _ = permutation_cluster_1samp_test(
X, n_permutations=999, seed=0, adjacency=adjacency,
out_type='mask')
# the cluster tests drop any clusters that don't get thresholded
keep = p_values < 1
assert_allclose(t_obs_clust, t_obs)
assert_allclose(p_values_clust, p_values[keep], atol=1e-2)
X = np.random.randn(18, 1)
t_obs, p_values, H0 = permutation_t_test(X, n_permutations='all')
t_obs_scipy, p_values_scipy = stats.ttest_1samp(X[:, 0], 0)
assert_allclose(t_obs[0], t_obs_scipy, 8)
assert_allclose(p_values[0], p_values_scipy, rtol=1e-2)
def test_ci():
"""Test confidence intervals."""
# isolated test of CI functions
arr = np.linspace(0, 1, 1000)[..., np.newaxis]
assert_allclose(_ci(arr, method="parametric"),
_ci(arr, method="bootstrap"), rtol=.005)
assert_allclose(bootstrap_confidence_interval(arr, stat_fun="median",
random_state=0),
bootstrap_confidence_interval(arr, stat_fun="mean",
random_state=0),
rtol=.1)
# smoke test for new API
if check_version('numpy', '1.17'):
random_state = np.random.default_rng(0)
bootstrap_confidence_interval(arr, random_state=random_state)
run_tests_if_main()
|
from PyQt5.QtCore import pyqtSignal, Qt, QUrl
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtWebKitWidgets import QWebView, QWebPage
from qutebrowser.config import config, stylesheet
from qutebrowser.keyinput import modeman
from qutebrowser.utils import log, usertypes, utils, objreg, debug
from qutebrowser.browser.webkit import webpage
class WebView(QWebView):
"""Custom QWebView subclass with qutebrowser-specific features.
Attributes:
tab: The WebKitTab object for this WebView
hintmanager: The HintManager instance for this view.
scroll_pos: The current scroll position as (x%, y%) tuple.
win_id: The window ID of the view.
_tab_id: The tab ID of the view.
_old_scroll_pos: The old scroll position.
Signals:
scroll_pos_changed: Scroll percentage of current tab changed.
arg 1: x-position in %.
arg 2: y-position in %.
shutting_down: Emitted when the view is shutting down.
"""
STYLESHEET = """
WebView {
{% if conf.colors.webpage.bg %}
background-color: {{ qcolor_to_qsscolor(conf.colors.webpage.bg) }};
{% endif %}
}
"""
scroll_pos_changed = pyqtSignal(int, int)
shutting_down = pyqtSignal()
def __init__(self, *, win_id, tab_id, tab, private, parent=None):
super().__init__(parent)
# FIXME:qtwebengine this is only used to set the zoom factor from
# the QWebPage - we should get rid of it somehow (signals?)
self.tab = tab
self._tabdata = tab.data
self.win_id = win_id
self.scroll_pos = (-1, -1)
self._old_scroll_pos = (-1, -1)
self._tab_id = tab_id
page = webpage.BrowserPage(win_id=self.win_id, tab_id=self._tab_id,
tabdata=tab.data, private=private,
parent=self)
page.setVisibilityState(
QWebPage.VisibilityStateVisible if self.isVisible()
else QWebPage.VisibilityStateHidden)
self.setPage(page)
stylesheet.set_register(self)
def __repr__(self):
flags = QUrl.EncodeUnicode
urlstr = self.url().toDisplayString(flags) # type: ignore[arg-type]
url = utils.elide(urlstr, 100)
return utils.get_repr(self, tab_id=self._tab_id, url=url)
def __del__(self):
# Explicitly releasing the page here seems to prevent some segfaults
# when quitting.
# Copied from:
# https://code.google.com/p/webscraping/source/browse/webkit.py#325
try:
self.setPage(None) # type: ignore[arg-type]
except RuntimeError:
# It seems sometimes Qt has already deleted the QWebView and we
# get: RuntimeError: wrapped C/C++ object of type WebView has been
# deleted
pass
def shutdown(self):
"""Shut down the webview."""
self.shutting_down.emit()
# We disable javascript because that prevents some segfaults when
# quitting it seems.
log.destroy.debug("Shutting down {!r}.".format(self))
settings = self.settings()
settings.setAttribute(QWebSettings.JavascriptEnabled, False)
self.stop()
self.page().shutdown()
def createWindow(self, wintype):
"""Called by Qt when a page wants to create a new window.
This function is called from the createWindow() method of the
associated QWebPage, each time the page wants to create a new window of
the given type. This might be the result, for example, of a JavaScript
request to open a document in a new window.
Args:
wintype: This enum describes the types of window that can be
created by the createWindow() function.
QWebPage::WebBrowserWindow: The window is a regular web
browser window.
QWebPage::WebModalDialog: The window acts as modal dialog.
Return:
The new QWebView object.
"""
debug_type = debug.qenum_key(QWebPage, wintype)
log.webview.debug("createWindow with type {}".format(debug_type))
if wintype == QWebPage.WebModalDialog:
log.webview.warning("WebModalDialog requested, but we don't "
"support that!")
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=self.win_id)
# pylint: disable=protected-access
return tabbed_browser.tabopen(background=False)._widget
def paintEvent(self, e):
"""Extend paintEvent to emit a signal if the scroll position changed.
This is a bit of a hack: We listen to repaint requests here, in the
hope a repaint will always be requested when scrolling, and if the
scroll position actually changed, we emit a signal.
QtWebEngine has a scrollPositionChanged signal, so it's not needed
there.
Args:
e: The QPaintEvent.
Return:
The superclass event return value.
"""
frame = self.page().mainFrame()
new_pos = (frame.scrollBarValue(Qt.Horizontal),
frame.scrollBarValue(Qt.Vertical))
if self._old_scroll_pos != new_pos:
self._old_scroll_pos = new_pos
m = (frame.scrollBarMaximum(Qt.Horizontal),
frame.scrollBarMaximum(Qt.Vertical))
perc = (round(100 * new_pos[0] / m[0]) if m[0] != 0 else 0,
round(100 * new_pos[1] / m[1]) if m[1] != 0 else 0)
self.scroll_pos = perc
self.scroll_pos_changed.emit(*perc)
# Let superclass handle the event
super().paintEvent(e)
def contextMenuEvent(self, e):
"""Save a reference to the context menu so we can close it.
This is not needed for QtWebEngine, so it's in here.
"""
menu = self.page().createStandardContextMenu()
self.shutting_down.connect(menu.close)
mm = modeman.instance(self.win_id)
mm.entered.connect(menu.close)
menu.exec_(e.globalPos())
def showEvent(self, e):
"""Extend showEvent to set the page visibility state to visible.
Args:
e: The QShowEvent.
Return:
The superclass event return value.
"""
super().showEvent(e)
self.page().setVisibilityState(QWebPage.VisibilityStateVisible)
def hideEvent(self, e):
"""Extend hideEvent to set the page visibility state to hidden.
Args:
e: The QHideEvent.
Return:
The superclass event return value.
"""
super().hideEvent(e)
self.page().setVisibilityState(QWebPage.VisibilityStateHidden)
def mousePressEvent(self, e):
"""Set the tabdata ClickTarget on a mousepress.
This is implemented here as we don't need it for QtWebEngine.
"""
if e.button() == Qt.MidButton or e.modifiers() & Qt.ControlModifier:
background = config.val.tabs.background
if e.modifiers() & Qt.ShiftModifier:
background = not background
if background:
target = usertypes.ClickTarget.tab_bg
else:
target = usertypes.ClickTarget.tab
self._tabdata.open_target = target
log.mouse.debug("Ctrl/Middle click, setting target: {}".format(
target))
else:
self._tabdata.open_target = usertypes.ClickTarget.normal
log.mouse.debug("Normal click, setting normal target")
super().mousePressEvent(e)
|
import tensorflow as tf
from examples.wavefunctions import wavefunctions
def ising_hamiltonian(N, dtype):
X = tf.convert_to_tensor([[0.0, 1.0], [1.0, 0.0]], dtype=dtype)
Z = tf.convert_to_tensor([[1.0, 0.0], [0.0, -1.0]], dtype=dtype)
I = tf.eye(2, dtype=dtype)
h = -tf.tensordot(X, X, axes=0) - tf.tensordot(Z, I, axes=0)
h_last = h - tf.tensordot(I, Z, axes=0)
h = tf.transpose(h, (0, 2, 1, 3))
h_last = tf.transpose(h_last, (0, 2, 1, 3))
H = [h] * (N - 2) + [h_last]
return H
def random_state(N, d, dtype):
psi = tf.cast(tf.random.uniform([d for n in range(N)]), dtype)
psi = tf.divide(psi, tf.norm(psi))
return psi
def callback(psi, t, i):
print(i,
tf.norm(psi).numpy().real,
wavefunctions.expval(psi, X, 0).numpy().real)
if __name__ == "__main__":
N = 16
dtype = tf.complex128
build_graph = True
dt = 0.1
num_steps = 100
euclidean_evolution = False
print("----------------------------------------------------")
print("Evolving a random state by the Ising Hamiltonian.")
print("----------------------------------------------------")
print("System size:", N)
print("Trotter step size:", dt)
print("Euclidean?:", euclidean_evolution)
X = tf.convert_to_tensor([[0.0, 1.0], [1.0, 0.0]], dtype=dtype)
H = ising_hamiltonian(N, dtype)
psi = random_state(N, 2, dtype)
if build_graph:
f = wavefunctions.evolve_trotter_defun
else:
f = wavefunctions.evolve_trotter
print("----------------------------------------------------")
print("step\tnorm\t<X_0>")
print("----------------------------------------------------")
psi_t, t = f(
psi, H, dt, num_steps, euclidean=euclidean_evolution, callback=callback)
print("Final norm:", tf.norm(psi_t).numpy().real)
print("<psi | psi_t>:", wavefunctions.inner(psi, psi_t).numpy())
|
import logging
import requests
from homeassistant.components.water_heater import (
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, TEMP_CELSIUS
from . import (
DOMAIN as VICARE_DOMAIN,
PYVICARE_ERROR,
VICARE_API,
VICARE_HEATING_TYPE,
VICARE_NAME,
)
_LOGGER = logging.getLogger(__name__)
VICARE_MODE_DHW = "dhw"
VICARE_MODE_DHWANDHEATING = "dhwAndHeating"
VICARE_MODE_FORCEDREDUCED = "forcedReduced"
VICARE_MODE_FORCEDNORMAL = "forcedNormal"
VICARE_MODE_OFF = "standby"
VICARE_TEMP_WATER_MIN = 10
VICARE_TEMP_WATER_MAX = 60
OPERATION_MODE_ON = "on"
OPERATION_MODE_OFF = "off"
SUPPORT_FLAGS_HEATER = SUPPORT_TARGET_TEMPERATURE
VICARE_TO_HA_HVAC_DHW = {
VICARE_MODE_DHW: OPERATION_MODE_ON,
VICARE_MODE_DHWANDHEATING: OPERATION_MODE_ON,
VICARE_MODE_FORCEDREDUCED: OPERATION_MODE_OFF,
VICARE_MODE_FORCEDNORMAL: OPERATION_MODE_ON,
VICARE_MODE_OFF: OPERATION_MODE_OFF,
}
HA_TO_VICARE_HVAC_DHW = {
OPERATION_MODE_OFF: VICARE_MODE_OFF,
OPERATION_MODE_ON: VICARE_MODE_DHW,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Create the ViCare water_heater devices."""
if discovery_info is None:
return
vicare_api = hass.data[VICARE_DOMAIN][VICARE_API]
heating_type = hass.data[VICARE_DOMAIN][VICARE_HEATING_TYPE]
add_entities(
[
ViCareWater(
f"{hass.data[VICARE_DOMAIN][VICARE_NAME]} Water",
vicare_api,
heating_type,
)
]
)
class ViCareWater(WaterHeaterEntity):
"""Representation of the ViCare domestic hot water device."""
def __init__(self, name, api, heating_type):
"""Initialize the DHW water_heater device."""
self._name = name
self._state = None
self._api = api
self._attributes = {}
self._target_temperature = None
self._current_temperature = None
self._current_mode = None
self._heating_type = heating_type
def update(self):
"""Let HA know there has been an update from the ViCare API."""
try:
current_temperature = self._api.getDomesticHotWaterStorageTemperature()
if current_temperature != PYVICARE_ERROR:
self._current_temperature = current_temperature
else:
self._current_temperature = None
self._target_temperature = (
self._api.getDomesticHotWaterConfiguredTemperature()
)
self._current_mode = self._api.getActiveMode()
except requests.exceptions.ConnectionError:
_LOGGER.error("Unable to retrieve data from ViCare server")
except ValueError:
_LOGGER.error("Unable to decode data from ViCare server")
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_HEATER
@property
def name(self):
"""Return the name of the water_heater device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
self._api.setDomesticHotWaterTemperature(temp)
self._target_temperature = temp
@property
def min_temp(self):
"""Return the minimum temperature."""
return VICARE_TEMP_WATER_MIN
@property
def max_temp(self):
"""Return the maximum temperature."""
return VICARE_TEMP_WATER_MAX
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_WHOLE
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return VICARE_TO_HA_HVAC_DHW.get(self._current_mode)
@property
def operation_list(self):
"""Return the list of available operation modes."""
return list(HA_TO_VICARE_HVAC_DHW)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import sys
from absl import flags
import object_storage_api_tests
import six
from six.moves import range
FLAGS = flags.FLAGS
def ValidateService(service):
"""Sanity test for the a storage service."""
object_names = ['object_' + str(i) for i in range(10)]
payload = object_storage_api_tests.GenerateWritePayload(100)
handle = six.StringIO(payload)
logging.info('Starting test.')
# Write objects
for name in object_names:
service.WriteObjectFromBuffer(FLAGS.bucket, name, handle, 100)
logging.info('Wrote 10 100B objects to %s.', FLAGS.bucket)
# Read the objects back
for name in object_names:
service.ReadObject(FLAGS.bucket, name)
logging.info('Read objects back.')
# List the objects
names = service.ListObjects(FLAGS.bucket, 'object_')
if sorted(names) != object_names:
logging.error('ListObjects returned %s, but should have returned %s',
names, object_names)
logging.info('Listed object names.')
# Delete the objects
deleted = []
service.DeleteObjects(FLAGS.bucket, object_names, objects_deleted=deleted)
if sorted(deleted) != object_names:
logging.error('DeleteObjects returned %s, but should have returned %s',
deleted, object_names)
logging.info('Deleted objects. Test is complete.')
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
FLAGS(sys.argv)
service = (
object_storage_api_tests._STORAGE_TO_SERVICE_DICT[FLAGS.storage_provider]()) # noqa
ValidateService(service)
|
import logging
import re
from configparser import RawConfigParser, ParsingError
from functools import reduce
import pkg_resources
import yaml
from yandextank.common.util import recursive_dict_update
from yandextank.validator.validator import load_plugin_schema, load_yaml_schema
logger = logging.getLogger(__name__)
CORE_SCHEMA = load_yaml_schema(pkg_resources.resource_filename('yandextank.core', 'config/schema.yaml'))['core']['schema']
DEPRECATED_SECTIONS = ['lunaport', 'aggregator']
def old_plugin_mapper(package):
MAP = {'Overload': 'DataUploader'}
return MAP.get(package, package)
def parse_package_name(package_path):
if package_path.startswith("Tank/Plugins/"):
package = package_path.split('/')[-1].split('.')[0]
else:
package = package_path.split('.')[-1].split()[0]
return old_plugin_mapper(package)
SECTIONS_PATTERNS = {
'tank': 'core|tank',
'Aggregator': 'aggregator',
'Android': 'android',
'Appium': 'appium',
'Autostop': 'autostop',
'BatteryHistorian': 'battery_historian',
'Bfg': 'bfg|ultimate_gun|http_gun|custom_gun|scenario_gun',
'Phantom': 'phantom(-.*)?',
'DataUploader': 'meta|overload|uploader|datauploader',
'Telegraf': 'telegraf|monitoring',
'JMeter': 'jmeter',
'ResourceCheck': 'rcheck',
'ShellExec': 'shell_?exec',
'ShootExec': 'shoot_?exec',
'Console': 'console',
'TipsAndTricks': 'tips',
'RCAssert': 'rcassert',
'JsonReport': 'json_report|jsonreport',
'Pandora': 'pandora',
'InfluxUploader': 'influx',
'OpenTSDBUploader': 'opentsdb',
}
class ConversionError(Exception):
MSG = 'ConversionError:\n{}\nCheck your file format'
def __init__(self, message=''):
self.message = self.MSG.format(message)
class OptionsConflict(ConversionError):
pass
class UnrecognizedSection(ConversionError):
pass
def guess_plugin(section):
for plugin, section_name_pattern in SECTIONS_PATTERNS.items():
if re.match(section_name_pattern, section):
return plugin
else:
raise UnrecognizedSection('Section {} did not match any plugin'.format(section))
def convert_rps_schedule(key, value):
return {'load_profile': {
'load_type': 'rps',
'schedule': value
}}
def convert_instances_schedule(key, value):
return {'load_profile': {
'load_type': 'instances',
'schedule': value
}}
def convert_stpd_schedule(key, value):
return {'load_profile': {
'load_type': 'stpd_file',
'schedule': value
}}
def to_bool(value):
try:
return bool(int(value))
except ValueError:
return True if 'true' == value.lower() else False
def is_option_deprecated(plugin, option_name):
DEPRECATED = {
'Aggregator': [
'time_periods',
'precise_cumulative'
],
'DataUploader': [
'copy_config_to'
]
}
if option_name in DEPRECATED.get(plugin, []):
logger.warning('Deprecated option {} in plugin {}, omitting'.format(option_name, plugin))
return True
else:
return False
def check_options(plugin, options):
CONFLICT_OPTS = {
'Phantom': [{'rps_schedule', 'instances_schedule', 'stpd_file'}]
}
for conflict_options in CONFLICT_OPTS.get(plugin, []):
intersect = {option[0] for option in options} & conflict_options
if len(intersect) > 1:
raise OptionsConflict('Conflicting options: {}: {}'.format(plugin, list(intersect)))
return plugin, options
def without_deprecated(plugin, options):
"""
:type options: list of tuple
"""
return filter(lambda option: not is_option_deprecated(plugin, option[0]), options)
def old_section_name_mapper(name):
MAP = {
'monitoring': 'telegraf',
}
return MAP.get(name, name)
def rename(name):
MAP = {
'meta': 'uploader'
}
return MAP.get(name, name)
class Package(object):
def __init__(self, package_path):
if package_path.startswith("Tank/Plugins/"):
self.package = package_path.split('.')[0].replace('Tank/Plugins/', 'yandextank.plugins.')
else:
self.package = package_path
self.plugin_name = old_plugin_mapper(self.package.split('.')[-1])
class UnknownOption(ConversionError):
def __init__(self, option):
self.message = 'Unknown option: {}'.format(option)
def empty_to_none(func):
def new_func(k, v):
if v in '':
return {k: None}
else:
return func(k, v)
return new_func
class Option(object):
TYPE_CASTERS = {
'boolean': empty_to_none(lambda k, v: {k: to_bool(v)}),
'integer': empty_to_none(lambda k, v: {k: int(v)}),
'list': empty_to_none(lambda k, v: {k: [_.strip() for _ in v.strip().split()]}),
'float': empty_to_none(lambda k, v: {k: float(v)})
}
SPECIAL_CONVERTERS = {
'Phantom': {
'rps_schedule': convert_rps_schedule,
'instances_schedule': convert_instances_schedule,
'stpd_file': convert_stpd_schedule,
'autocases': TYPE_CASTERS['integer'],
'headers': lambda key, value: {key: re.compile(r'\[(.*?)\]').findall(value)}
},
'Bfg': {
'rps_schedule': convert_rps_schedule,
'instances_schedule': convert_instances_schedule,
'headers': lambda key, value: {key: re.compile(r'\[(.*?)\]').findall(value)}
},
'JMeter': {
'exclude_markers': lambda key, value: {key: value.strip().split(' ')}
},
'Pandora': {
'config_content': lambda key, value: {key: yaml.load(value, Loader=yaml.FullLoader)} # works for json as well
},
'Autostop': {
'autostop': lambda k, v: {k: re.findall(r'\w+\(.+?\)', v)}
},
'DataUploader': {
'lock_targets': lambda k, v: {k: v.strip().split() if v != 'auto' else v}
},
'core': {
'ignore_locks': lambda k, v: {'ignore_lock': to_bool(v)}
}
}
CONVERTERS_FOR_UNKNOWN = {
'DataUploader': lambda k, v: {'meta': {k: v}},
'JMeter': lambda k, v: {'variables': {k: v}}
}
def __init__(self, plugin_name, key, value, schema=None):
self.dummy_converter = lambda k, v: {k: v}
self.plugin = plugin_name
self._schema = schema
if '.' in key:
self.name, rest = key.split('.', 1)
self.value = Option(plugin_name, rest, value, schema=self.schema[self.name]).converted
self._converter = self.dummy_converter
else:
self.name = key
self.value = value
self._converter = None
self._converted = None
self._as_tuple = None
@property
def schema(self):
if self._schema is None:
module_paths = {
'tank': 'yandextank.core'
}
def default_path(plugin):
'yandextank.plugins.{}'.format(plugin)
self._schema = load_plugin_schema(module_paths.get(self.plugin, default_path(self.plugin)))
return self._schema
@property
def converted(self):
"""
:rtype: {str: object}
"""
if self._converted is None:
self._converted = self.converter(self.name, self.value)
return self._converted
@property
def as_tuple(self):
"""
:rtype: (str, object)
"""
if self._as_tuple is None:
self._as_tuple = next(iter(self.converted.items()))
return self._as_tuple
@property
def converter(self):
"""
:rtype: callable
"""
if self._converter is None:
try:
self._converter = self.SPECIAL_CONVERTERS[self.plugin][self.name]
except KeyError:
try:
self._converter = self._get_scheme_converter()
except UnknownOption:
self._converter = self.CONVERTERS_FOR_UNKNOWN.get(self.plugin, self.dummy_converter)
return self._converter
def _get_scheme_converter(self):
if self.name == 'enabled':
return self.TYPE_CASTERS['boolean']
if self.schema.get(self.name) is None:
logger.warning('Unknown option {}:{}'.format(self.plugin, self.name))
raise UnknownOption('{}:{}'.format(self.plugin, self.name))
_type = self.schema[self.name].get('type', None)
if _type is None:
logger.warning('Option {}:{}: no type specified in schema'.format(self.plugin, self.name))
return self.dummy_converter
return self.TYPE_CASTERS.get(_type, self.dummy_converter)
class Section(object):
def __init__(self, name, plugin, options, enabled=None):
self.name = old_section_name_mapper(name)
self.new_name = rename(self.name)
self.plugin = plugin
self._schema = None
self.options = [Option(plugin, *option, schema=self.schema) for option in without_deprecated(*check_options(plugin, options))]
self.enabled = enabled
self._merged_options = None
@property
def schema(self):
if self._schema is None:
self._schema = load_plugin_schema('yandextank.plugins.' + self.plugin)
return self._schema
def get_cfg_dict(self, with_meta=True):
options_dict = self.merged_options
if with_meta:
if self.plugin:
options_dict.update({'package': 'yandextank.plugins.{}'.format(self.plugin)})
if self.enabled is not None:
options_dict.update({'enabled': self.enabled})
return options_dict
@property
def merged_options(self):
if self._merged_options is None:
self._merged_options = reduce(lambda acc, upd: recursive_dict_update(acc, upd),
[opt.converted for opt in self.options],
{})
return self._merged_options
@classmethod
def from_multiple(cls, sections, parent_name=None, child_name=None, is_list=True):
"""
:type parent_name: str
:type sections: list of Section
"""
if len(sections) == 1:
return sections[0]
if parent_name:
master_section = next(filter(lambda section: section.name == parent_name, sections))
rest = filter(lambda section: section is not master_section, sections)
else:
master_section = sections[0]
parent_name = master_section.name
rest = sections[1:]
child = {'multi': [section.get_cfg_dict(with_meta=False) for section in rest]} if is_list \
else {child_name: cls._select_one(master_section, rest).get_cfg_dict(with_meta=False)}
master_section.merged_options.update(child)
return master_section
def __repr__(self):
return '{}/{}'.format(self.name, self.plugin)
@classmethod
def _select_one(cls, master_section, rest):
MAP = {
'bfg': lambda section: section.name == '{}_gun'.format(master_section.get_cfg_dict()['gun_type'])
}
return next(filter(MAP.get(master_section.name, lambda x: True), rest))
# return filter(lambda section: section.name == MAP.get(master_section.name, ), rest)[0]
def without_defaults(cfg_ini, section):
"""
:rtype: (str, str)
:type cfg_ini: ConfigParser
"""
defaults = cfg_ini.defaults()
options = cfg_ini.items(section) if cfg_ini.has_section(section) else []
return [(key, value) for key, value in options if key not in defaults]
PLUGIN_PREFIX = 'plugin_'
CORE_SECTION_PATTERN = 'tank|core'
CORE_SECTION_OLD = 'tank'
CORE_SECTION_NEW = 'core'
def parse_sections(cfg_ini):
"""
:type cfg_ini: ConfigParser
"""
return [Section(section.lower(),
guess_plugin(section.lower()),
without_defaults(cfg_ini, section))
for section in cfg_ini.sections()
if not re.match(CORE_SECTION_PATTERN, section.lower()) and section.lower() not in DEPRECATED_SECTIONS]
class PluginInstance(object):
def __init__(self, name, package_and_section):
self.name = name
self.enabled = len(package_and_section) > 0
try:
package_path, self.section_name = package_and_section.split()
self.package = Package(package_path)
except ValueError:
self.package = Package(package_and_section)
self.section_name = self._guess_section_name()
self.plugin_name = self.package.plugin_name
def __repr__(self):
return self.name
def _guess_section_name(self):
package_map = {
'Aggregator': 'aggregator',
'Autostop': 'autostop',
'BatteryHistorian': 'battery_historian',
'Bfg': 'bfg',
'Console': 'console',
'DataUploader': 'meta',
'JMeter': 'jmeter',
'JsonReport': 'json_report',
'Maven': 'maven',
'Monitoring': 'monitoring',
'Pandora': 'pandora',
'Phantom': 'phantom',
'RCAssert': 'rcassert',
'ResourceCheck': 'rcheck',
'ShellExec': 'shellexec',
'ShootExec': 'shootexec',
'SvgReport': 'svgreport',
'Telegraf': 'telegraf',
'TipsAndTricks': 'tips'
}
name_map = {
'aggregate': 'aggregator',
'overload': 'overload',
'jsonreport': 'json_report'
}
return name_map.get(self.name, package_map.get(self.package.plugin_name, self.name))
def enable_sections(sections, core_opts):
"""
:type sections: list of Section
"""
DEPRECATED_PLUGINS = ['yandextank.plugins.Aggregator', 'Tank/Plugins/Aggregator.py']
plugin_instances = [PluginInstance(key.split('_')[1], value) for key, value in core_opts if
key.startswith(PLUGIN_PREFIX) and value not in DEPRECATED_PLUGINS]
enabled_instances = {instance.section_name: instance for instance in plugin_instances if instance.enabled}
disabled_instances = {instance.section_name: instance for instance in plugin_instances if not instance.enabled}
for section in sections:
if enabled_instances.pop(section.name, None) is not None:
section.enabled = True
elif disabled_instances.pop(section.name, None) is not None:
section.enabled = False
# add leftovers
leftovers = set(enabled_instances.keys()) | set(disabled_instances.keys())
for plugin_instance in filter(
lambda lo: lo.section_name in leftovers,
plugin_instances,
):
sections.append(Section(plugin_instance.section_name, plugin_instance.plugin_name, [], plugin_instance.enabled))
return sections
def combine_sections(sections):
"""
:type sections: list of Section
:rtype: list of Section
"""
PLUGINS_TO_COMBINE = {
'Phantom': ('phantom', 'multi', True),
'Bfg': ('bfg', 'gun_config', False)
}
plugins = {}
ready_sections = []
for section in sections:
if section.plugin in PLUGINS_TO_COMBINE:
try:
plugins[section.plugin].append(section)
except KeyError:
plugins[section.plugin] = [section]
else:
ready_sections.append(section)
for plugin_name, _sections in plugins.items():
if isinstance(_sections, list):
parent_name, child_name, is_list = PLUGINS_TO_COMBINE[plugin_name]
ready_sections.append(Section.from_multiple(_sections, parent_name, child_name, is_list))
return ready_sections
def core_options(cfg_ini):
return cfg_ini.items(CORE_SECTION_OLD) if cfg_ini.has_section(CORE_SECTION_OLD) else []
def convert_ini(ini_file):
cfg_ini = RawConfigParser(strict=False)
try:
if isinstance(ini_file, str):
cfg_ini.read(ini_file)
else:
cfg_ini.read_file(ini_file)
except ParsingError as e:
raise ConversionError(e.message)
ready_sections = enable_sections(combine_sections(parse_sections(cfg_ini)), core_options(cfg_ini))
plugins_cfg_dict = {section.new_name: section.get_cfg_dict() for section in ready_sections}
plugins_cfg_dict.update({
'core': dict([Option('core', key, value, CORE_SCHEMA).as_tuple
for key, value in without_defaults(cfg_ini, CORE_SECTION_OLD)
if not key.startswith(PLUGIN_PREFIX)])
})
logger.info('Converted config:\n{}'.format(yaml.dump(plugins_cfg_dict)))
return plugins_cfg_dict
def convert_single_option(key, value):
"""
:type value: str
:type key: str
:rtype: {str: obj}
"""
section_name, option_name = key.strip().split('.', 1)
if not re.match(CORE_SECTION_PATTERN, section_name):
section = Section(section_name,
guess_plugin(section_name),
[(option_name, value)])
return {section.new_name: section.get_cfg_dict()}
else:
if option_name.startswith(PLUGIN_PREFIX):
return {section.new_name: section.get_cfg_dict() for section in enable_sections([], [(option_name, value)])}
else:
return {'core': Option('core', option_name, value, CORE_SCHEMA).converted}
|
import os
import pathlib
import numpy as np
from .. import coding, conventions
from ..core import indexing
from ..core.pycompat import integer_types
from ..core.utils import FrozenDict, HiddenKeyDict, close_on_error
from ..core.variable import Variable
from .common import (
AbstractWritableDataStore,
BackendArray,
BackendEntrypoint,
_encode_variable_name,
)
from .store import open_backend_dataset_store
# need some special secret attributes to tell us the dimensions
DIMENSION_KEY = "_ARRAY_DIMENSIONS"
def encode_zarr_attr_value(value):
"""
Encode a attribute value as something that can be serialized as json
Many xarray datasets / variables have numpy arrays and values. This
function handles encoding / decoding of such items.
ndarray -> list
scalar array -> scalar
other -> other (no change)
"""
if isinstance(value, np.ndarray):
encoded = value.tolist()
# this checks if it's a scalar number
elif isinstance(value, np.generic):
encoded = value.item()
else:
encoded = value
return encoded
class ZarrArrayWrapper(BackendArray):
__slots__ = ("datastore", "dtype", "shape", "variable_name")
def __init__(self, variable_name, datastore):
self.datastore = datastore
self.variable_name = variable_name
array = self.get_array()
self.shape = array.shape
dtype = array.dtype
self.dtype = dtype
def get_array(self):
return self.datastore.ds[self.variable_name]
def __getitem__(self, key):
array = self.get_array()
if isinstance(key, indexing.BasicIndexer):
return array[key.tuple]
elif isinstance(key, indexing.VectorizedIndexer):
return array.vindex[
indexing._arrayize_vectorized_indexer(key, self.shape).tuple
]
else:
assert isinstance(key, indexing.OuterIndexer)
return array.oindex[key.tuple]
# if self.ndim == 0:
# could possibly have a work-around for 0d data here
def _determine_zarr_chunks(enc_chunks, var_chunks, ndim, name):
"""
Given encoding chunks (possibly None) and variable chunks (possibly None)
"""
# zarr chunk spec:
# chunks : int or tuple of ints, optional
# Chunk shape. If not provided, will be guessed from shape and dtype.
# if there are no chunks in encoding and the variable data is a numpy
# array, then we let zarr use its own heuristics to pick the chunks
if var_chunks is None and enc_chunks is None:
return None
# if there are no chunks in encoding but there are dask chunks, we try to
# use the same chunks in zarr
# However, zarr chunks needs to be uniform for each array
# http://zarr.readthedocs.io/en/latest/spec/v1.html#chunks
# while dask chunks can be variable sized
# http://dask.pydata.org/en/latest/array-design.html#chunks
if var_chunks and enc_chunks is None:
if any(len(set(chunks[:-1])) > 1 for chunks in var_chunks):
raise ValueError(
"Zarr requires uniform chunk sizes except for final chunk. "
f"Variable named {name!r} has incompatible dask chunks: {var_chunks!r}. "
"Consider rechunking using `chunk()`."
)
if any((chunks[0] < chunks[-1]) for chunks in var_chunks):
raise ValueError(
"Final chunk of Zarr array must be the same size or smaller "
f"than the first. Variable named {name!r} has incompatible Dask chunks {var_chunks!r}."
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
# return the first chunk for each dimension
return tuple(chunk[0] for chunk in var_chunks)
# from here on, we are dealing with user-specified chunks in encoding
# zarr allows chunks to be an integer, in which case it uses the same chunk
# size on each dimension.
# Here we re-implement this expansion ourselves. That makes the logic of
# checking chunk compatibility easier
if isinstance(enc_chunks, integer_types):
enc_chunks_tuple = ndim * (enc_chunks,)
else:
enc_chunks_tuple = tuple(enc_chunks)
if len(enc_chunks_tuple) != ndim:
# throw away encoding chunks, start over
return _determine_zarr_chunks(None, var_chunks, ndim, name)
for x in enc_chunks_tuple:
if not isinstance(x, int):
raise TypeError(
"zarr chunk sizes specified in `encoding['chunks']` "
"must be an int or a tuple of ints. "
f"Instead found encoding['chunks']={enc_chunks_tuple!r} "
f"for variable named {name!r}."
)
# if there are chunks in encoding and the variable data is a numpy array,
# we use the specified chunks
if var_chunks is None:
return enc_chunks_tuple
# the hard case
# DESIGN CHOICE: do not allow multiple dask chunks on a single zarr chunk
# this avoids the need to get involved in zarr synchronization / locking
# From zarr docs:
# "If each worker in a parallel computation is writing to a separate
# region of the array, and if region boundaries are perfectly aligned
# with chunk boundaries, then no synchronization is required."
# TODO: incorporate synchronizer to allow writes from multiple dask
# threads
if var_chunks and enc_chunks_tuple:
for zchunk, dchunks in zip(enc_chunks_tuple, var_chunks):
if len(dchunks) == 1:
continue
for dchunk in dchunks[:-1]:
if dchunk % zchunk:
raise NotImplementedError(
f"Specified zarr chunks encoding['chunks']={enc_chunks_tuple!r} for "
f"variable named {name!r} would overlap multiple dask chunks {var_chunks!r}. "
"This is not implemented in xarray yet. "
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
if dchunks[-1] > zchunk:
raise ValueError(
"Final chunk of Zarr array must be the same size or "
"smaller than the first. "
f"Specified Zarr chunk encoding['chunks']={enc_chunks_tuple}, "
f"for variable named {name!r} "
f"but {dchunks} in the variable's Dask chunks {var_chunks} is "
"incompatible with this encoding. "
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
return enc_chunks_tuple
raise AssertionError("We should never get here. Function logic must be wrong.")
def _get_zarr_dims_and_attrs(zarr_obj, dimension_key):
# Zarr arrays do not have dimenions. To get around this problem, we add
# an attribute that specifies the dimension. We have to hide this attribute
# when we send the attributes to the user.
# zarr_obj can be either a zarr group or zarr array
try:
dimensions = zarr_obj.attrs[dimension_key]
except KeyError:
raise KeyError(
"Zarr object is missing the attribute `%s`, which is "
"required for xarray to determine variable dimensions." % (dimension_key)
)
attributes = HiddenKeyDict(zarr_obj.attrs, [dimension_key])
return dimensions, attributes
def extract_zarr_variable_encoding(variable, raise_on_invalid=False, name=None):
"""
Extract zarr encoding dictionary from xarray Variable
Parameters
----------
variable : Variable
raise_on_invalid : bool, optional
Returns
-------
encoding : dict
Zarr encoding for `variable`
"""
encoding = variable.encoding.copy()
valid_encodings = {"chunks", "compressor", "filters", "cache_metadata"}
if raise_on_invalid:
invalid = [k for k in encoding if k not in valid_encodings]
if invalid:
raise ValueError(
"unexpected encoding parameters for zarr backend: %r" % invalid
)
else:
for k in list(encoding):
if k not in valid_encodings:
del encoding[k]
chunks = _determine_zarr_chunks(
encoding.get("chunks"), variable.chunks, variable.ndim, name
)
encoding["chunks"] = chunks
return encoding
# Function below is copied from conventions.encode_cf_variable.
# The only change is to raise an error for object dtypes.
def encode_zarr_variable(var, needs_copy=True, name=None):
"""
Converts an Variable into an Variable which follows some
of the CF conventions:
- Nans are masked using _FillValue (or the deprecated missing_value)
- Rescaling via: scale_factor and add_offset
- datetimes are converted to the CF 'units since time' format
- dtype encodings are enforced.
Parameters
----------
var : Variable
A variable holding un-encoded data.
Returns
-------
out : Variable
A variable which has been encoded as described above.
"""
var = conventions.encode_cf_variable(var, name=name)
# zarr allows unicode, but not variable-length strings, so it's both
# simpler and more compact to always encode as UTF-8 explicitly.
# TODO: allow toggling this explicitly via dtype in encoding.
coder = coding.strings.EncodedStringCoder(allows_unicode=True)
var = coder.encode(var, name=name)
var = coding.strings.ensure_fixed_length_bytes(var)
return var
class ZarrStore(AbstractWritableDataStore):
"""Store for reading and writing data via zarr"""
__slots__ = (
"ds",
"_append_dim",
"_consolidate_on_close",
"_group",
"_read_only",
"_synchronizer",
"_write_region",
)
@classmethod
def open_group(
cls,
store,
mode="r",
synchronizer=None,
group=None,
consolidated=False,
consolidate_on_close=False,
chunk_store=None,
append_dim=None,
write_region=None,
):
import zarr
# zarr doesn't support pathlib.Path objects yet. zarr-python#601
if isinstance(store, pathlib.Path):
store = os.fspath(store)
open_kwargs = dict(mode=mode, synchronizer=synchronizer, path=group)
if chunk_store:
open_kwargs["chunk_store"] = chunk_store
if consolidated:
# TODO: an option to pass the metadata_key keyword
zarr_group = zarr.open_consolidated(store, **open_kwargs)
else:
zarr_group = zarr.open_group(store, **open_kwargs)
return cls(zarr_group, consolidate_on_close, append_dim, write_region)
def __init__(
self, zarr_group, consolidate_on_close=False, append_dim=None, write_region=None
):
self.ds = zarr_group
self._read_only = self.ds.read_only
self._synchronizer = self.ds.synchronizer
self._group = self.ds.path
self._consolidate_on_close = consolidate_on_close
self._append_dim = append_dim
self._write_region = write_region
def open_store_variable(self, name, zarr_array):
data = indexing.LazilyOuterIndexedArray(ZarrArrayWrapper(name, self))
dimensions, attributes = _get_zarr_dims_and_attrs(zarr_array, DIMENSION_KEY)
attributes = dict(attributes)
encoding = {
"chunks": zarr_array.chunks,
"preferred_chunks": dict(zip(dimensions, zarr_array.chunks)),
"compressor": zarr_array.compressor,
"filters": zarr_array.filters,
}
# _FillValue needs to be in attributes, not encoding, so it will get
# picked up by decode_cf
if getattr(zarr_array, "fill_value") is not None:
attributes["_FillValue"] = zarr_array.fill_value
return Variable(dimensions, data, attributes, encoding)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.arrays()
)
def get_attrs(self):
attributes = dict(self.ds.attrs.asdict())
return attributes
def get_dimensions(self):
dimensions = {}
for k, v in self.ds.arrays():
try:
for d, s in zip(v.attrs[DIMENSION_KEY], v.shape):
if d in dimensions and dimensions[d] != s:
raise ValueError(
"found conflicting lengths for dimension %s "
"(%d != %d)" % (d, s, dimensions[d])
)
dimensions[d] = s
except KeyError:
raise KeyError(
"Zarr object is missing the attribute `%s`, "
"which is required for xarray to determine "
"variable dimensions." % (DIMENSION_KEY)
)
return dimensions
def set_dimensions(self, variables, unlimited_dims=None):
if unlimited_dims is not None:
raise NotImplementedError(
"Zarr backend doesn't know how to handle unlimited dimensions"
)
def set_attributes(self, attributes):
self.ds.attrs.put(attributes)
def encode_variable(self, variable):
variable = encode_zarr_variable(variable)
return variable
def encode_attribute(self, a):
return encode_zarr_attr_value(a)
def store(
self,
variables,
attributes,
check_encoding_set=frozenset(),
writer=None,
unlimited_dims=None,
):
"""
Top level method for putting data on this store, this method:
- encodes variables/attributes
- sets dimensions
- sets variables
Parameters
----------
variables : dict-like
Dictionary of key/value (variable name / xr.Variable) pairs
attributes : dict-like
Dictionary of key/value (attribute name / attribute) pairs
check_encoding_set : list-like
List of variables that should be checked for invalid encoding
values
writer : ArrayWriter
unlimited_dims : list-like
List of dimension names that should be treated as unlimited
dimensions.
dimension on which the zarray will be appended
only needed in append mode
"""
import zarr
existing_variables = {
vn for vn in variables if _encode_variable_name(vn) in self.ds
}
new_variables = set(variables) - existing_variables
variables_without_encoding = {vn: variables[vn] for vn in new_variables}
variables_encoded, attributes = self.encode(
variables_without_encoding, attributes
)
if len(existing_variables) > 0:
# there are variables to append
# their encoding must be the same as in the store
ds = open_zarr(self.ds.store, group=self.ds.path, chunks=None)
variables_with_encoding = {}
for vn in existing_variables:
variables_with_encoding[vn] = variables[vn].copy(deep=False)
variables_with_encoding[vn].encoding = ds[vn].encoding
variables_with_encoding, _ = self.encode(variables_with_encoding, {})
variables_encoded.update(variables_with_encoding)
if self._write_region is None:
self.set_attributes(attributes)
self.set_dimensions(variables_encoded, unlimited_dims=unlimited_dims)
self.set_variables(
variables_encoded, check_encoding_set, writer, unlimited_dims=unlimited_dims
)
if self._consolidate_on_close:
zarr.consolidate_metadata(self.ds.store)
def sync(self):
pass
def set_variables(self, variables, check_encoding_set, writer, unlimited_dims=None):
"""
This provides a centralized method to set the variables on the data
store.
Parameters
----------
variables : dict-like
Dictionary of key/value (variable name / xr.Variable) pairs
check_encoding_set : list-like
List of variables that should be checked for invalid encoding
values
writer :
unlimited_dims : list-like
List of dimension names that should be treated as unlimited
dimensions.
"""
for vn, v in variables.items():
name = _encode_variable_name(vn)
check = vn in check_encoding_set
attrs = v.attrs.copy()
dims = v.dims
dtype = v.dtype
shape = v.shape
fill_value = attrs.pop("_FillValue", None)
if v.encoding == {"_FillValue": None} and fill_value is None:
v.encoding = {}
if name in self.ds:
# existing variable
zarr_array = self.ds[name]
else:
# new variable
encoding = extract_zarr_variable_encoding(
v, raise_on_invalid=check, name=vn
)
encoded_attrs = {}
# the magic for storing the hidden dimension data
encoded_attrs[DIMENSION_KEY] = dims
for k2, v2 in attrs.items():
encoded_attrs[k2] = self.encode_attribute(v2)
if coding.strings.check_vlen_dtype(dtype) == str:
dtype = str
zarr_array = self.ds.create(
name, shape=shape, dtype=dtype, fill_value=fill_value, **encoding
)
zarr_array.attrs.put(encoded_attrs)
write_region = self._write_region if self._write_region is not None else {}
write_region = {dim: write_region.get(dim, slice(None)) for dim in dims}
if self._append_dim is not None and self._append_dim in dims:
# resize existing variable
append_axis = dims.index(self._append_dim)
assert write_region[self._append_dim] == slice(None)
write_region[self._append_dim] = slice(
zarr_array.shape[append_axis], None
)
new_shape = list(zarr_array.shape)
new_shape[append_axis] += v.shape[append_axis]
zarr_array.resize(new_shape)
region = tuple(write_region[dim] for dim in dims)
writer.add(v.data, zarr_array, region)
def close(self):
pass
def open_zarr(
store,
group=None,
synchronizer=None,
chunks="auto",
decode_cf=True,
mask_and_scale=True,
decode_times=True,
concat_characters=True,
decode_coords=True,
drop_variables=None,
consolidated=False,
overwrite_encoded_chunks=False,
chunk_store=None,
decode_timedelta=None,
use_cftime=None,
**kwargs,
):
"""Load and decode a dataset from a Zarr store.
.. note:: Experimental
The Zarr backend is new and experimental. Please report any
unexpected behavior via github issues.
The `store` object should be a valid store for a Zarr group. `store`
variables must contain dimension metadata encoded in the
`_ARRAY_DIMENSIONS` attribute.
Parameters
----------
store : MutableMapping or str
A MutableMapping where a Zarr Group has been stored or a path to a
directory in file system where a Zarr DirectoryStore has been stored.
synchronizer : object, optional
Array synchronizer provided to zarr
group : str, optional
Group path. (a.k.a. `path` in zarr terminology.)
chunks : int or dict or tuple or {None, 'auto'}, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``. If `chunks='auto'`, dask chunks are created
based on the variable's zarr chunks. If `chunks=None`, zarr array
data will lazily convert to numpy arrays upon access. This accepts
all the chunk specifications as Dask does.
overwrite_encoded_chunks: bool, optional
Whether to drop the zarr chunks encoded for each variable when a
dataset is loaded with specified chunk sizes (default: False)
decode_cf : bool, optional
Whether to decode these variables, assuming they were saved according
to CF conventions.
mask_and_scale : bool, optional
If True, replace array values equal to `_FillValue` with NA and scale
values according to the formula `original_values * scale_factor +
add_offset`, where `_FillValue`, `scale_factor` and `add_offset` are
taken from variable attributes (if they exist). If the `_FillValue` or
`missing_value` attribute contains multiple values a warning will be
issued and all array values matching one of the multiple values will
be replaced by NA.
decode_times : bool, optional
If True, decode times encoded in the standard NetCDF datetime format
into datetime objects. Otherwise, leave them encoded as numbers.
concat_characters : bool, optional
If True, concatenate along the last dimension of character arrays to
form string arrays. Dimensions will only be concatenated over (and
removed) if they have no corresponding variable and if they are only
used as the last dimension of character arrays.
decode_coords : bool, optional
If True, decode the 'coordinates' attribute to identify coordinates in
the resulting dataset.
drop_variables : str or iterable, optional
A variable or list of variables to exclude from being parsed from the
dataset. This may be useful to drop variables with problems or
inconsistent values.
consolidated : bool, optional
Whether to open the store using zarr's consolidated metadata
capability. Only works for stores that have already been consolidated.
chunk_store : MutableMapping, optional
A separate Zarr store only for chunk data.
decode_timedelta : bool, optional
If True, decode variables and coordinates with time units in
{'days', 'hours', 'minutes', 'seconds', 'milliseconds', 'microseconds'}
into timedelta objects. If False, leave them encoded as numbers.
If None (default), assume the same value of decode_time.
use_cftime: bool, optional
Only relevant if encoded dates come from a standard calendar
(e.g. "gregorian", "proleptic_gregorian", "standard", or not
specified). If None (default), attempt to decode times to
``np.datetime64[ns]`` objects; if this is not possible, decode times to
``cftime.datetime`` objects. If True, always decode times to
``cftime.datetime`` objects, regardless of whether or not they can be
represented using ``np.datetime64[ns]`` objects. If False, always
decode times to ``np.datetime64[ns]`` objects; if this is not possible
raise an error.
Returns
-------
dataset : Dataset
The newly created dataset.
See Also
--------
open_dataset
References
----------
http://zarr.readthedocs.io/
"""
from .api import open_dataset
if chunks == "auto":
try:
import dask.array # noqa
chunks = {}
except ImportError:
chunks = None
if kwargs:
raise TypeError(
"open_zarr() got unexpected keyword arguments " + ",".join(kwargs.keys())
)
backend_kwargs = {
"synchronizer": synchronizer,
"consolidated": consolidated,
"overwrite_encoded_chunks": overwrite_encoded_chunks,
"chunk_store": chunk_store,
}
ds = open_dataset(
filename_or_obj=store,
group=group,
decode_cf=decode_cf,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
engine="zarr",
chunks=chunks,
drop_variables=drop_variables,
backend_kwargs=backend_kwargs,
decode_timedelta=decode_timedelta,
use_cftime=use_cftime,
)
return ds
def open_backend_dataset_zarr(
filename_or_obj,
mask_and_scale=True,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
group=None,
mode="r",
synchronizer=None,
consolidated=False,
consolidate_on_close=False,
chunk_store=None,
):
store = ZarrStore.open_group(
filename_or_obj,
group=group,
mode=mode,
synchronizer=synchronizer,
consolidated=consolidated,
consolidate_on_close=consolidate_on_close,
chunk_store=chunk_store,
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
zarr_backend = BackendEntrypoint(open_dataset=open_backend_dataset_zarr)
|
from homeassistant.components import zwave
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
)
from homeassistant.components.zwave import const, light
from tests.async_mock import MagicMock, patch
from tests.mock.zwave import MockEntityValues, MockNode, MockValue, value_changed
class MockLightValues(MockEntityValues):
"""Mock Z-Wave light values."""
def __init__(self, **kwargs):
"""Initialize the mock zwave values."""
self.dimming_duration = None
self.color = None
self.color_channels = None
super().__init__(**kwargs)
def test_get_device_detects_dimmer(mock_openzwave):
"""Test get_device returns a normal dimmer."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveDimmer)
assert device.supported_features == SUPPORT_BRIGHTNESS
def test_get_device_detects_colorlight(mock_openzwave):
"""Test get_device returns a color light."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == SUPPORT_BRIGHTNESS | SUPPORT_COLOR
def test_get_device_detects_zw098(mock_openzwave):
"""Test get_device returns a zw098 color light."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_COLOR_TEMP
)
def test_get_device_detects_rgbw_light(mock_openzwave):
"""Test get_device returns a color light."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
device.value_added()
assert isinstance(device, light.ZwaveColorLight)
assert device.supported_features == (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_WHITE_VALUE
)
def test_dimmer_turn_on(mock_openzwave):
"""Test turning on a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
device.turn_on()
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 255
node.reset_mock()
device.turn_on(**{ATTR_BRIGHTNESS: 224})
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 87 # round(224 / 255 * 99)
node.reset_mock()
device.turn_on(**{ATTR_BRIGHTNESS: 120})
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 47 # round(120 / 255 * 99)
with patch.object(light, "_LOGGER", MagicMock()) as mock_logger:
device.turn_on(**{ATTR_TRANSITION: 35})
assert mock_logger.debug.called
assert node.set_dimmer.called
msg, entity_id = mock_logger.debug.mock_calls[0][1]
assert entity_id == device.entity_id
def test_dimmer_min_brightness(mock_openzwave):
"""Test turning on a dimmable Z-Wave light to its minimum brightness."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert not device.is_on
device.turn_on(**{ATTR_BRIGHTNESS: 1})
assert device.is_on
assert device.brightness == 1
device.turn_on(**{ATTR_BRIGHTNESS: 0})
assert device.is_on
assert device.brightness == 0
def test_dimmer_transitions(mock_openzwave):
"""Test dimming transition on a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=0, node=node)
duration = MockValue(data=0, node=node)
values = MockLightValues(primary=value, dimming_duration=duration)
device = light.get_device(node=node, values=values, node_config={})
assert device.supported_features == SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
# Test turn_on
# Factory Default
device.turn_on()
assert duration.data == 0xFF
# Seconds transition
device.turn_on(**{ATTR_TRANSITION: 45})
assert duration.data == 45
# Minutes transition
device.turn_on(**{ATTR_TRANSITION: 245})
assert duration.data == 0x83
# Clipped transition
device.turn_on(**{ATTR_TRANSITION: 10000})
assert duration.data == 0xFE
# Test turn_off
# Factory Default
device.turn_off()
assert duration.data == 0xFF
# Seconds transition
device.turn_off(**{ATTR_TRANSITION: 45})
assert duration.data == 45
# Minutes transition
device.turn_off(**{ATTR_TRANSITION: 245})
assert duration.data == 0x83
# Clipped transition
device.turn_off(**{ATTR_TRANSITION: 10000})
assert duration.data == 0xFE
def test_dimmer_turn_off(mock_openzwave):
"""Test turning off a dimmable Z-Wave light."""
node = MockNode()
value = MockValue(data=46, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
device.turn_off()
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 0
def test_dimmer_value_changed(mock_openzwave):
"""Test value changed for dimmer lights."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert not device.is_on
value.data = 46
value_changed(value)
assert device.is_on
assert device.brightness == 118
def test_dimmer_refresh_value(mock_openzwave):
"""Test value changed for dimmer lights."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(
node=node,
values=values,
node_config={zwave.CONF_REFRESH_VALUE: True, zwave.CONF_REFRESH_DELAY: 5},
)
assert not device.is_on
with patch.object(light, "Timer") as mock_timer:
value.data = 46
value_changed(value)
assert not device.is_on
assert mock_timer.called
assert len(mock_timer.mock_calls) == 2
timeout, callback = mock_timer.mock_calls[0][1][:2]
assert timeout == 5
assert mock_timer().start.called
assert len(mock_timer().start.mock_calls) == 1
with patch.object(light, "Timer") as mock_timer_2:
value_changed(value)
assert not device.is_on
assert mock_timer().cancel.called
assert len(mock_timer_2.mock_calls) == 2
timeout, callback = mock_timer_2.mock_calls[0][1][:2]
assert timeout == 5
assert mock_timer_2().start.called
assert len(mock_timer_2().start.mock_calls) == 1
callback()
assert device.is_on
assert device.brightness == 118
def test_set_hs_color(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_HS_COLOR: (30, 50)})
assert color.data == "#ffbf7f0000"
def test_set_white_value(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGBW
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_WHITE_VALUE: 200})
assert color.data == "#ffffffc800"
def test_disable_white_if_set_color(mock_openzwave):
"""
Test that _white is set to 0 if turn_on with ATTR_HS_COLOR.
See Issue #13930 - many RGBW ZWave bulbs will only activate the RGB LED to
produce color if _white is set to zero.
"""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
device._white = 234
assert color.data == "#0000000000"
assert device.white_value == 234
device.turn_on(**{ATTR_HS_COLOR: (30, 50)})
assert device.white_value == 0
assert color.data == "#ffbf7f0000"
def test_zw098_set_color_temp(mock_openzwave):
"""Test setting zwave light color."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, warm white, cold white
color_channels = MockValue(data=0x1F, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert color.data == "#0000000000"
device.turn_on(**{ATTR_COLOR_TEMP: 200})
assert color.data == "#00000000ff"
device.turn_on(**{ATTR_COLOR_TEMP: 400})
assert color.data == "#000000ff00"
def test_rgb_not_supported(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports color temperature only
color_channels = MockValue(data=0x01, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_no_color_value(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
values = MockLightValues(primary=value)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_no_color_channels_value(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
values = MockLightValues(primary=value, color=color)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color is None
def test_rgb_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB only
color_channels = MockValue(data=0x1C, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
color.data = "#ffbf800000"
value_changed(color)
assert device.hs_color == (29.764, 49.804)
def test_rgbww_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Warm White
color_channels = MockValue(data=0x1D, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
assert device.white_value == 0
color.data = "#c86400c800"
value_changed(color)
assert device.hs_color == (30, 100)
assert device.white_value == 200
def test_rgbcw_value_changed(mock_openzwave):
"""Test value changed for rgb lights."""
node = MockNode(command_classes=[const.COMMAND_CLASS_SWITCH_COLOR])
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Cold White
color_channels = MockValue(data=0x1E, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.hs_color == (0, 0)
assert device.white_value == 0
color.data = "#c86400c800"
value_changed(color)
assert device.hs_color == (30, 100)
assert device.white_value == 200
def test_ct_value_changed(mock_openzwave):
"""Test value changed for zw098 lights."""
node = MockNode(
manufacturer_id="0086",
product_id="0062",
command_classes=[const.COMMAND_CLASS_SWITCH_COLOR],
)
value = MockValue(data=0, node=node)
color = MockValue(data="#0000000000", node=node)
# Supports RGB, Cold White
color_channels = MockValue(data=0x1F, node=node)
values = MockLightValues(primary=value, color=color, color_channels=color_channels)
device = light.get_device(node=node, values=values, node_config={})
assert device.color_temp == light.TEMP_MID_HASS
color.data = "#000000ff00"
value_changed(color)
assert device.color_temp == light.TEMP_WARM_HASS
color.data = "#00000000ff"
value_changed(color)
assert device.color_temp == light.TEMP_COLD_HASS
|
import logging
import random
from lightify import Lightify
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_HS_COLOR,
ATTR_TRANSITION,
EFFECT_RANDOM,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
CONF_ALLOW_LIGHTIFY_NODES = "allow_lightify_nodes"
CONF_ALLOW_LIGHTIFY_GROUPS = "allow_lightify_groups"
CONF_ALLOW_LIGHTIFY_SENSORS = "allow_lightify_sensors"
CONF_ALLOW_LIGHTIFY_SWITCHES = "allow_lightify_switches"
CONF_INTERVAL_LIGHTIFY_STATUS = "interval_lightify_status"
CONF_INTERVAL_LIGHTIFY_CONF = "interval_lightify_conf"
DEFAULT_ALLOW_LIGHTIFY_NODES = True
DEFAULT_ALLOW_LIGHTIFY_GROUPS = True
DEFAULT_ALLOW_LIGHTIFY_SENSORS = True
DEFAULT_ALLOW_LIGHTIFY_SWITCHES = True
DEFAULT_INTERVAL_LIGHTIFY_STATUS = 5
DEFAULT_INTERVAL_LIGHTIFY_CONF = 3600
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(
CONF_ALLOW_LIGHTIFY_NODES, default=DEFAULT_ALLOW_LIGHTIFY_NODES
): cv.boolean,
vol.Optional(
CONF_ALLOW_LIGHTIFY_GROUPS, default=DEFAULT_ALLOW_LIGHTIFY_GROUPS
): cv.boolean,
vol.Optional(
CONF_ALLOW_LIGHTIFY_SENSORS, default=DEFAULT_ALLOW_LIGHTIFY_SENSORS
): cv.boolean,
vol.Optional(
CONF_ALLOW_LIGHTIFY_SWITCHES, default=DEFAULT_ALLOW_LIGHTIFY_SWITCHES
): cv.boolean,
vol.Optional(
CONF_INTERVAL_LIGHTIFY_STATUS, default=DEFAULT_INTERVAL_LIGHTIFY_STATUS
): cv.positive_int,
vol.Optional(
CONF_INTERVAL_LIGHTIFY_CONF, default=DEFAULT_INTERVAL_LIGHTIFY_CONF
): cv.positive_int,
}
)
DEFAULT_BRIGHTNESS = 2
DEFAULT_KELVIN = 2700
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Osram Lightify lights."""
host = config[CONF_HOST]
try:
bridge = Lightify(host, log_level=logging.NOTSET)
except OSError as err:
_LOGGER.exception("Error connecting to bridge: %s due to: %s", host, err)
return
setup_bridge(bridge, add_entities, config)
def setup_bridge(bridge, add_entities, config):
"""Set up the Lightify bridge."""
lights = {}
groups = {}
groups_last_updated = [0]
def update_lights():
"""Update the lights objects with the latest info from the bridge."""
try:
new_lights = bridge.update_all_light_status(
config[CONF_INTERVAL_LIGHTIFY_STATUS]
)
lights_changed = bridge.lights_changed()
except TimeoutError:
_LOGGER.error("Timeout during updating of lights")
return 0
except OSError:
_LOGGER.error("OSError during updating of lights")
return 0
if new_lights and config[CONF_ALLOW_LIGHTIFY_NODES]:
new_entities = []
for addr, light in new_lights.items():
if (
light.devicetype().name == "SENSOR"
and not config[CONF_ALLOW_LIGHTIFY_SENSORS]
) or (
light.devicetype().name == "SWITCH"
and not config[CONF_ALLOW_LIGHTIFY_SWITCHES]
):
continue
if addr not in lights:
osram_light = OsramLightifyLight(
light, update_lights, lights_changed
)
lights[addr] = osram_light
new_entities.append(osram_light)
else:
lights[addr].update_luminary(light)
add_entities(new_entities)
return lights_changed
def update_groups():
"""Update the groups objects with the latest info from the bridge."""
lights_changed = update_lights()
try:
bridge.update_scene_list(config[CONF_INTERVAL_LIGHTIFY_CONF])
new_groups = bridge.update_group_list(config[CONF_INTERVAL_LIGHTIFY_CONF])
groups_updated = bridge.groups_updated()
except TimeoutError:
_LOGGER.error("Timeout during updating of scenes/groups")
return 0
except OSError:
_LOGGER.error("OSError during updating of scenes/groups")
return 0
if new_groups:
new_groups = {group.idx(): group for group in new_groups.values()}
new_entities = []
for idx, group in new_groups.items():
if idx not in groups:
osram_group = OsramLightifyGroup(
group, update_groups, groups_updated
)
groups[idx] = osram_group
new_entities.append(osram_group)
else:
groups[idx].update_luminary(group)
add_entities(new_entities)
if groups_updated > groups_last_updated[0]:
groups_last_updated[0] = groups_updated
for idx, osram_group in groups.items():
if idx not in new_groups:
osram_group.update_static_attributes()
return max(lights_changed, groups_updated)
update_lights()
if config[CONF_ALLOW_LIGHTIFY_GROUPS]:
update_groups()
class Luminary(LightEntity):
"""Representation of Luminary Lights and Groups."""
def __init__(self, luminary, update_func, changed):
"""Initialize a Luminary Light."""
self.update_func = update_func
self._luminary = luminary
self._changed = changed
self._unique_id = None
self._supported_features = []
self._effect_list = []
self._is_on = False
self._available = True
self._min_mireds = None
self._max_mireds = None
self._brightness = None
self._color_temp = None
self._rgb_color = None
self._device_attributes = None
self.update_static_attributes()
self.update_dynamic_attributes()
def _get_unique_id(self):
"""Get a unique ID (not implemented)."""
raise NotImplementedError
def _get_supported_features(self):
"""Get list of supported features."""
features = 0
if "lum" in self._luminary.supported_features():
features = features | SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
if "temp" in self._luminary.supported_features():
features = features | SUPPORT_COLOR_TEMP | SUPPORT_TRANSITION
if "rgb" in self._luminary.supported_features():
features = features | SUPPORT_COLOR | SUPPORT_TRANSITION | SUPPORT_EFFECT
return features
def _get_effect_list(self):
"""Get list of supported effects."""
effects = []
if "rgb" in self._luminary.supported_features():
effects.append(EFFECT_RANDOM)
return effects
@property
def name(self):
"""Return the name of the luminary."""
return self._luminary.name()
@property
def hs_color(self):
"""Return last hs color value set."""
return color_util.color_RGB_to_hs(*self._rgb_color)
@property
def color_temp(self):
"""Return the color temperature."""
return self._color_temp
@property
def brightness(self):
"""Return brightness of the luminary (0..255)."""
return self._brightness
@property
def is_on(self):
"""Return True if the device is on."""
return self._is_on
@property
def supported_features(self):
"""List of supported features."""
return self._supported_features
@property
def effect_list(self):
"""List of supported effects."""
return self._effect_list
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._max_mireds
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return self._device_attributes
@property
def available(self):
"""Return True if entity is available."""
return self._available
def play_effect(self, effect, transition):
"""Play selected effect."""
if effect == EFFECT_RANDOM:
self._rgb_color = (
random.randrange(0, 256),
random.randrange(0, 256),
random.randrange(0, 256),
)
self._luminary.set_rgb(*self._rgb_color, transition)
self._luminary.set_onoff(True)
return True
return False
def turn_on(self, **kwargs):
"""Turn the device on."""
transition = int(kwargs.get(ATTR_TRANSITION, 0) * 10)
if ATTR_EFFECT in kwargs:
self.play_effect(kwargs[ATTR_EFFECT], transition)
return
if ATTR_HS_COLOR in kwargs:
self._rgb_color = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR])
self._luminary.set_rgb(*self._rgb_color, transition)
if ATTR_COLOR_TEMP in kwargs:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
self._luminary.set_temperature(
int(color_util.color_temperature_mired_to_kelvin(self._color_temp)),
transition,
)
self._is_on = True
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
self._luminary.set_luminance(int(self._brightness / 2.55), transition)
else:
self._luminary.set_onoff(True)
def turn_off(self, **kwargs):
"""Turn the device off."""
self._is_on = False
if ATTR_TRANSITION in kwargs:
transition = int(kwargs[ATTR_TRANSITION] * 10)
self._brightness = DEFAULT_BRIGHTNESS
self._luminary.set_luminance(0, transition)
else:
self._luminary.set_onoff(False)
def update_luminary(self, luminary):
"""Update internal luminary object."""
self._luminary = luminary
self.update_static_attributes()
def update_static_attributes(self):
"""Update static attributes of the luminary."""
self._unique_id = self._get_unique_id()
self._supported_features = self._get_supported_features()
self._effect_list = self._get_effect_list()
if self._supported_features & SUPPORT_COLOR_TEMP:
self._min_mireds = color_util.color_temperature_kelvin_to_mired(
self._luminary.max_temp() or DEFAULT_KELVIN
)
self._max_mireds = color_util.color_temperature_kelvin_to_mired(
self._luminary.min_temp() or DEFAULT_KELVIN
)
def update_dynamic_attributes(self):
"""Update dynamic attributes of the luminary."""
self._is_on = self._luminary.on()
self._available = self._luminary.reachable() and not self._luminary.deleted()
if self._supported_features & SUPPORT_BRIGHTNESS:
self._brightness = int(self._luminary.lum() * 2.55)
if self._supported_features & SUPPORT_COLOR_TEMP:
self._color_temp = color_util.color_temperature_kelvin_to_mired(
self._luminary.temp() or DEFAULT_KELVIN
)
if self._supported_features & SUPPORT_COLOR:
self._rgb_color = self._luminary.rgb()
def update(self):
"""Synchronize state with bridge."""
changed = self.update_func()
if changed > self._changed:
self._changed = changed
self.update_dynamic_attributes()
class OsramLightifyLight(Luminary):
"""Representation of an Osram Lightify Light."""
def _get_unique_id(self):
"""Get a unique ID."""
return self._luminary.addr()
def update_static_attributes(self):
"""Update static attributes of the luminary."""
super().update_static_attributes()
attrs = {
"device_type": f"{self._luminary.type_id()} ({self._luminary.devicename()})",
"firmware_version": self._luminary.version(),
}
if self._luminary.devicetype().name == "SENSOR":
attrs["sensor_values"] = self._luminary.raw_values()
self._device_attributes = attrs
class OsramLightifyGroup(Luminary):
"""Representation of an Osram Lightify Group."""
def _get_unique_id(self):
"""Get a unique ID for the group."""
# Actually, it's a wrong choice for a unique ID, because a combination of
# lights is NOT unique (Osram Lightify allows to create different groups
# with the same lights). Also a combination of lights may easily change,
# but the group remains the same from the user's perspective.
# It should be something like "<gateway host>-<group.idx()>"
# For now keeping it as is for backward compatibility with existing
# users.
return f"{self._luminary.lights()}"
def _get_supported_features(self):
"""Get list of supported features."""
features = super()._get_supported_features()
if self._luminary.scenes():
features = features | SUPPORT_EFFECT
return features
def _get_effect_list(self):
"""Get list of supported effects."""
effects = super()._get_effect_list()
effects.extend(self._luminary.scenes())
return sorted(effects)
def play_effect(self, effect, transition):
"""Play selected effect."""
if super().play_effect(effect, transition):
return True
if effect in self._luminary.scenes():
self._luminary.activate_scene(effect)
return True
return False
def update_static_attributes(self):
"""Update static attributes of the luminary."""
super().update_static_attributes()
self._device_attributes = {"lights": self._luminary.light_names()}
|
import unittest
from common import gpu_test
class TestCupy(unittest.TestCase):
@gpu_test
def test_kernel(self):
import cupy as cp
x = cp.arange(6, dtype='f').reshape(2, 3)
y = cp.arange(3, dtype='f')
kernel = cp.ElementwiseKernel(
'float32 x, float32 y', 'float32 z',
'''if (x - 2 > y) {
z = x * y;
} else {
z = x + y;
}''',
'my_kernel')
r = kernel(x, y)
self.assertEqual((2, 3), r.shape)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
import posixpath
from absl import flags
from perfkitbenchmarker import context
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import kubernetes_helper
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine, linux_virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import aws_virtual_machine
from perfkitbenchmarker.providers.azure import azure_virtual_machine
from perfkitbenchmarker.providers.gcp import gce_virtual_machine
from perfkitbenchmarker.providers.kubernetes import kubernetes_disk
from perfkitbenchmarker.vm_util import OUTPUT_STDOUT as STDOUT
import six
FLAGS = flags.FLAGS
SELECTOR_PREFIX = 'pkb'
class KubernetesVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a Kubernetes POD."""
CLOUD = providers.KUBERNETES
DEFAULT_IMAGE = None
CONTAINER_COMMAND = None
HOME_DIR = '/root'
IS_REBOOTABLE = False
def __init__(self, vm_spec):
"""Initialize a Kubernetes virtual machine.
Args:
vm_spec: KubernetesPodSpec object of the vm.
"""
super(KubernetesVirtualMachine, self).__init__(vm_spec)
self.num_scratch_disks = 0
self.name = self.name.replace('_', '-')
self.user_name = FLAGS.username
self.image = self.image or self.DEFAULT_IMAGE
self.resource_limits = vm_spec.resource_limits
self.resource_requests = vm_spec.resource_requests
def GetResourceMetadata(self):
metadata = super(KubernetesVirtualMachine, self).GetResourceMetadata()
if self.resource_limits:
metadata.update({
'pod_cpu_limit': self.resource_limits.cpus,
'pod_memory_limit_mb': self.resource_limits.memory,
})
if self.resource_requests:
metadata.update({
'pod_cpu_request': self.resource_requests.cpus,
'pod_memory_request_mb': self.resource_requests.memory,
})
return metadata
def _CreateDependencies(self):
self._CheckPrerequisites()
self._CreateVolumes()
def _DeleteDependencies(self):
self._DeleteVolumes()
def _Create(self):
self._CreatePod()
self._WaitForPodBootCompletion()
@vm_util.Retry()
def _PostCreate(self):
self._GetInternalIp()
self._ConfigureProxy()
self._SetupDevicesPaths()
def _Delete(self):
self._DeletePod()
def _CheckPrerequisites(self):
"""Exits if any of the prerequisites is not met."""
if not FLAGS.kubectl:
raise Exception('Please provide path to kubectl tool using --kubectl '
'flag. Exiting.')
if not FLAGS.kubeconfig:
raise Exception('Please provide path to kubeconfig using --kubeconfig '
'flag. Exiting.')
if self.disk_specs and self.disk_specs[0].disk_type == disk.STANDARD:
if not FLAGS.ceph_monitors:
raise Exception('Please provide a list of Ceph Monitors using '
'--ceph_monitors flag.')
def _CreatePod(self):
"""Creates a POD (Docker container with optional volumes)."""
create_rc_body = self._BuildPodBody()
logging.info('About to create a pod with the following configuration:')
logging.info(create_rc_body)
kubernetes_helper.CreateResource(create_rc_body)
@vm_util.Retry(poll_interval=10, max_retries=100, log_errors=False)
def _WaitForPodBootCompletion(self):
"""
Need to wait for the PODs to get up - PODs are created with a little delay.
"""
exists_cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig, 'get',
'pod', '-o=json', self.name]
logging.info('Waiting for POD %s', self.name)
pod_info, _, _ = vm_util.IssueCommand(exists_cmd, suppress_warning=True,
raise_on_failure=False)
if pod_info:
pod_info = json.loads(pod_info)
containers = pod_info['spec']['containers']
if len(containers) == 1:
pod_status = pod_info['status']['phase']
if (containers[0]['name'].startswith(self.name)
and pod_status == 'Running'):
logging.info('POD is up and running.')
return
raise Exception('POD %s is not running. Retrying to check status.' %
self.name)
def _DeletePod(self):
"""Deletes a POD."""
delete_pod = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig,
'delete', 'pod', self.name]
output = vm_util.IssueCommand(delete_pod, raise_on_failure=False)
logging.info(output[STDOUT].rstrip())
@vm_util.Retry(poll_interval=10, max_retries=20)
def _Exists(self):
"""POD should have been already created but this is a double check."""
exists_cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig, 'get',
'pod', '-o=json', self.name]
pod_info, _, _ = vm_util.IssueCommand(
exists_cmd, suppress_warning=True, raise_on_failure=False)
if pod_info:
return True
return False
def _CreateVolumes(self):
"""
Creates volumes for scratch disks. These volumes have to be created
BEFORE containers creation because Kubernetes doesn't allow to attach
volume to currently running containers.
"""
self.scratch_disks = kubernetes_disk.CreateDisks(self.disk_specs, self.name)
@vm_util.Retry(poll_interval=10, max_retries=20, log_errors=False)
def _DeleteVolumes(self):
"""Deletes volumes."""
for scratch_disk in self.scratch_disks[:]:
scratch_disk.Delete()
self.scratch_disks.remove(scratch_disk)
def DeleteScratchDisks(self):
pass
def _GetInternalIp(self):
"""Gets the POD's internal ip address."""
pod_ip = kubernetes_helper.Get(
'pods', self.name, '', '.status.podIP')
if not pod_ip:
raise Exception('Internal POD IP address not found. Retrying.')
self.internal_ip = pod_ip
self.ip_address = pod_ip
def _ConfigureProxy(self):
"""
In Docker containers environment variables from /etc/environment
are not sourced - this results in connection problems when running
behind proxy. Prepending proxy environment variables to bashrc
solves the problem. Note: APPENDING to bashrc will not work because
the script exits when it is NOT executed in interactive shell.
"""
if FLAGS.http_proxy:
http_proxy = 'sed -i \'1i export http_proxy=%s\' /etc/bash.bashrc'
self.RemoteCommand(http_proxy % FLAGS.http_proxy)
if FLAGS.https_proxy:
https_proxy = 'sed -i \'1i export https_proxy=%s\' /etc/bash.bashrc'
self.RemoteCommand(https_proxy % FLAGS.http_proxy)
if FLAGS.ftp_proxy:
ftp_proxy = 'sed -i \'1i export ftp_proxy=%s\' /etc/bash.bashrc'
self.RemoteCommand(ftp_proxy % FLAGS.ftp_proxy)
def _SetupDevicesPaths(self):
"""Sets the path to each scratch disk device."""
for scratch_disk in self.scratch_disks:
scratch_disk.SetDevicePath(self)
def _BuildPodBody(self):
"""
Builds a JSON which will be passed as a body of POST request
to Kuberneres API in order to create a POD.
"""
container = self._BuildContainerBody()
volumes = self._BuildVolumesBody()
template = {
'kind': 'Pod',
'apiVersion': 'v1',
'metadata': {
'name': self.name,
'labels': {
SELECTOR_PREFIX: self.name
}
},
'spec': {
'volumes': volumes,
'containers': [container],
'dnsPolicy': 'ClusterFirst',
}
}
if FLAGS.kubernetes_anti_affinity:
template['spec']['affinity'] = {
'podAntiAffinity': {
'requiredDuringSchedulingIgnoredDuringExecution': [{
'labelSelector': {
'matchExpressions': [{
'key': 'pkb_anti_affinity',
'operator': 'In',
'values': [''],
}],
},
'topologyKey': 'kubernetes.io/hostname',
}],
},
}
template['metadata']['labels']['pkb_anti_affinity'] = ''
return json.dumps(template)
def _BuildVolumesBody(self):
"""Constructs volumes-related part of POST request to create POD."""
volumes = []
for scratch_disk in self.scratch_disks:
scratch_disk.AttachVolumeInfo(volumes)
return volumes
def _BuildContainerBody(self):
"""Constructs containers-related part of POST request to create POD."""
registry = getattr(context.GetThreadBenchmarkSpec(), 'registry', None)
if (not FLAGS.static_container_image and
registry is not None):
image = registry.GetFullRegistryTag(self.image)
else:
image = self.image
container = {
'image': image,
'name': self.name,
'workingDir': self.HOME_DIR,
'securityContext': {
'privileged': FLAGS.docker_in_privileged_mode
},
'volumeMounts': [
]
}
for scratch_disk in self.scratch_disks:
scratch_disk.AttachVolumeMountInfo(container['volumeMounts'])
resource_body = self._BuildResourceBody()
if resource_body:
container['resources'] = resource_body
if self.CONTAINER_COMMAND:
container['command'] = self.CONTAINER_COMMAND
return container
def _BuildResourceBody(self):
"""Constructs a dictionary that specifies resource limits and requests.
The syntax for including GPUs is specific to GKE and is likely to
change in the future.
See https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus
Returns:
kubernetes pod resource body containing pod limits and requests.
"""
resources = {
'limits': {},
'requests': {},
}
if self.resource_requests:
resources['requests'].update({
'cpu': str(self.resource_requests.cpus),
'memory': '{0}Mi'.format(self.resource_requests.memory),
})
if self.resource_limits:
resources['limits'].update({
'cpu': str(self.resource_limits.cpus),
'memory': '{0}Mi'.format(self.resource_limits.memory),
})
if self.gpu_count:
gpu_dict = {
'nvidia.com/gpu': str(self.gpu_count)
}
resources['limits'].update(gpu_dict)
resources['requests'].update(gpu_dict)
result_with_empty_values_removed = ({
k: v for k, v in six.iteritems(resources) if v
})
return result_with_empty_values_removed
class DebianBasedKubernetesVirtualMachine(
KubernetesVirtualMachine, linux_virtual_machine.BaseDebianMixin):
"""Base class for Debian based containers running inside k8s."""
def RemoteHostCommandWithReturnCode(self, command,
should_log=False, retries=None,
ignore_failure=False, login_shell=False,
suppress_warning=False, timeout=None):
"""Runs a command in the Kubernetes container."""
if retries is None:
retries = FLAGS.ssh_retries
cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig, 'exec', '-i',
self.name, '--', '/bin/bash', '-c', command]
for _ in range(retries):
stdout, stderr, retcode = vm_util.IssueCommand(
cmd, force_info_log=should_log,
suppress_warning=suppress_warning, timeout=timeout,
raise_on_failure=False)
# Check for ephemeral connection issues.
if not (retcode == 1 and 'error dialing backend: ssh' in stderr):
break
logging.info('Retrying ephemeral connection issue\n:%s', stderr)
if not ignore_failure and retcode:
error_text = ('Got non-zero return code (%s) executing %s\n'
'Full command: %s\nSTDOUT: %sSTDERR: %s' %
(retcode, command, ' '.join(cmd),
stdout, stderr))
raise errors.VirtualMachine.RemoteCommandError(error_text)
return stdout, stderr, retcode
def MoveHostFile(self, target, source_path, remote_path=''):
"""Copies a file from one VM to a target VM.
Args:
target: The target BaseVirtualMachine object.
source_path: The location of the file on the REMOTE machine.
remote_path: The destination of the file on the TARGET machine, default
is the home directory.
"""
file_name = vm_util.PrependTempDir(posixpath.basename(source_path))
self.RemoteHostCopy(file_name, source_path, copy_to=False)
target.RemoteHostCopy(file_name, remote_path)
def RemoteHostCopy(self, file_path, remote_path='', copy_to=True):
"""Copies a file to or from the VM.
Args:
file_path: Local path to file.
remote_path: Optional path of where to copy file on remote host.
copy_to: True to copy to vm, False to copy from vm.
Raises:
RemoteCommandError: If there was a problem copying the file.
"""
if copy_to:
file_name = posixpath.basename(file_path)
src_spec, dest_spec = file_path, '%s:%s' % (self.name, file_name)
else:
remote_path, _ = self.RemoteCommand('readlink -f %s' % remote_path)
remote_path = remote_path.strip()
src_spec, dest_spec = '%s:%s' % (self.name, remote_path), file_path
cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig,
'cp', src_spec, dest_spec]
stdout, stderr, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode:
error_text = ('Got non-zero return code (%s) executing %s\n'
'STDOUT: %sSTDERR: %s' %
(retcode, ' '.join(cmd), stdout, stderr))
raise errors.VirtualMachine.RemoteCommandError(error_text)
if copy_to:
file_name = posixpath.basename(file_path)
remote_path = remote_path or file_name
self.RemoteCommand('mv %s %s; chmod 777 %s' %
(file_name, remote_path, remote_path))
@vm_util.Retry(log_errors=False, poll_interval=1)
def PrepareVMEnvironment(self):
super(DebianBasedKubernetesVirtualMachine, self).PrepareVMEnvironment()
# Don't rely on SSH being installed in Kubernetes containers,
# so install it and restart the service so that it is ready to go.
# Although ssh is not required to connect to the container, MPI
# benchmarks require it.
self.InstallPackages('ssh')
self.RemoteCommand('sudo /etc/init.d/ssh restart', ignore_failure=True)
self.RemoteCommand('mkdir -p ~/.ssh')
with open(self.ssh_public_key) as f:
key = f.read()
self.RemoteCommand('echo "%s" >> ~/.ssh/authorized_keys' % key)
self.Install('python')
# cpio is needed for the MKL math library.
# software-properties-common is needed for add-apt-repository
self.InstallPackages('cpio software-properties-common')
# Don't assume the relevant CLI is installed in the Kubernetes environment.
if FLAGS.container_cluster_cloud == 'GCP':
self.InstallGcloudCli()
elif FLAGS.container_cluster_cloud == 'AWS':
self.InstallAwsCli()
elif FLAGS.container_cluster_cloud == 'Azure':
self.InstallAzureCli()
def InstallAwsCli(self):
"""Installs the AWS CLI; used for downloading preprovisioned data."""
self.Install('aws_credentials')
self.Install('awscli')
def InstallAzureCli(self):
"""Installs the Azure CLI; used for downloading preprovisioned data."""
self.Install('azure_cli')
self.Install('azure_credentials')
# TODO(ferneyhough): Consider making this a package.
def InstallGcloudCli(self):
"""Installs the Gcloud CLI; used for downloading preprovisioned data."""
self.InstallPackages('curl')
# The driver /usr/lib/apt/methods/https is sometimes needed for apt-get.
self.InstallPackages('apt-transport-https')
self.RemoteCommand('echo "deb https://packages.cloud.google.com/apt '
'cloud-sdk-$(lsb_release -c -s) main" | sudo tee -a '
'/etc/apt/sources.list.d/google-cloud-sdk.list')
self.RemoteCommand('curl https://packages.cloud.google.com/apt/doc/'
'apt-key.gpg | sudo apt-key add -')
self.RemoteCommand('sudo apt-get update && sudo apt-get install '
'-y google-cloud-sdk')
def DownloadPreprovisionedData(self, install_path, module_name, filename):
"""Downloads a preprovisioned data file.
This function works by looking up the VirtualMachine class which matches
the cloud we are running on (defined by FLAGS.container_cluster_cloud).
Then we look for a module-level function defined in the same module as
the VirtualMachine class which generates a string used to download
preprovisioned data for the given cloud.
Note that this implementation is specific to Debian OS types.
Args:
install_path: The install path on this VM.
module_name: Name of the module associated with this data file.
filename: The name of the file that was downloaded.
Raises:
NotImplementedError: if this method does not support the specified cloud.
AttributeError: if the VirtualMachine class does not implement
GenerateDownloadPreprovisionedDataCommand.
"""
cloud = FLAGS.container_cluster_cloud
if cloud == 'GCP':
download_function = (gce_virtual_machine.
GenerateDownloadPreprovisionedDataCommand)
elif cloud == 'AWS':
download_function = (aws_virtual_machine.
GenerateDownloadPreprovisionedDataCommand)
elif cloud == 'Azure':
download_function = (azure_virtual_machine.
GenerateDownloadPreprovisionedDataCommand)
else:
raise NotImplementedError(
'Cloud {0} does not support downloading preprovisioned '
'data on Kubernetes VMs.'.format(cloud))
self.RemoteCommand(
download_function(install_path, module_name, filename))
def ShouldDownloadPreprovisionedData(self, module_name, filename):
"""Returns whether or not preprovisioned data is available."""
cloud = FLAGS.container_cluster_cloud
if cloud == 'GCP' and FLAGS.gcp_preprovisioned_data_bucket:
stat_function = (gce_virtual_machine.
GenerateStatPreprovisionedDataCommand)
elif cloud == 'AWS' and FLAGS.aws_preprovisioned_data_bucket:
stat_function = (aws_virtual_machine.
GenerateStatPreprovisionedDataCommand)
elif cloud == 'Azure' and FLAGS.azure_preprovisioned_data_bucket:
stat_function = (azure_virtual_machine.
GenerateStatPreprovisionedDataCommand)
else:
return False
return self.TryRemoteCommand(stat_function(module_name, filename))
def _install_sudo_command():
"""Return a bash command that installs sudo and runs tail indefinitely.
This is useful for some docker images that don't have sudo installed.
Returns:
a sequence of arguments that use bash to install sudo and never run
tail indefinitely.
"""
# The canonical ubuntu images as well as the nvidia/cuda
# image do not have sudo installed so install it and configure
# the sudoers file such that the root user's environment is
# preserved when running as sudo. Then run tail indefinitely so that
# the container does not exit.
container_command = ' && '.join([
'apt-get update',
'apt-get install -y sudo',
'sed -i \'/env_reset/d\' /etc/sudoers',
'sed -i \'/secure_path/d\' /etc/sudoers',
'sudo ldconfig',
'tail -f /dev/null',
])
return ['bash', '-c', container_command]
# All Ubuntu images below are from https://hub.docker.com/_/ubuntu/
# Note that they do not include all packages that are typically
# included with Ubuntu. For example, sudo is not installed.
# KubernetesVirtualMachine takes care of this by installing
# sudo in the container startup script.
class Ubuntu1604BasedKubernetesVirtualMachine(
DebianBasedKubernetesVirtualMachine, linux_virtual_machine.Ubuntu1604Mixin):
DEFAULT_IMAGE = 'ubuntu:16.04'
CONTAINER_COMMAND = _install_sudo_command()
class Ubuntu1710BasedKubernetesVirtualMachine(
DebianBasedKubernetesVirtualMachine, linux_virtual_machine.Ubuntu1710Mixin):
DEFAULT_IMAGE = 'ubuntu:17.10'
CONTAINER_COMMAND = _install_sudo_command()
class Ubuntu1604Cuda9BasedKubernetesVirtualMachine(
DebianBasedKubernetesVirtualMachine,
linux_virtual_machine.Ubuntu1604Cuda9Mixin):
# Image is from https://hub.docker.com/r/nvidia/cuda/
DEFAULT_IMAGE = 'nvidia/cuda:9.0-devel-ubuntu16.04'
CONTAINER_COMMAND = _install_sudo_command()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from perfkitbenchmarker.providers.azure import util
from tests import pkb_common_test_case
class AzureUtilTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AzureUtilTest, self).setUp()
self.expected_location = 'eastus2'
self.expected_availability_zone = '1'
def test_get_location_from_zone_valid_location(self):
valid_location = 'eastus2'
self.assertEqual(self.expected_location,
util.GetLocationFromZone(valid_location))
def test_get_location_from_zone_valid_zone(self):
valid_zone = 'eastus2-1'
self.assertEqual(self.expected_location,
util.GetLocationFromZone(valid_zone))
def test_get_location_from_zone_invalid_location(self):
valid_location = 'us-east2'
with self.assertRaises(ValueError):
util.GetLocationFromZone(valid_location)
def test_get_location_from_zone_invalid_zone(self):
valid_location = 'eastus2-1a'
with self.assertRaises(ValueError):
util.GetLocationFromZone(valid_location)
def test_get_availability_zone_from_zone_valid_location(self):
valid_location = 'eastus2'
self.assertEqual(None, util.GetAvailabilityZoneFromZone(valid_location))
def test_get_availability_zone_from_zone_valid_zone(self):
valid_zone = 'eastus2-1'
self.assertEqual(self.expected_availability_zone,
util.GetAvailabilityZoneFromZone(valid_zone))
def test_get_availability_zone_from_zone_invalid_zone(self):
valid_location = 'eastus2-1a'
with self.assertRaises(ValueError):
util.GetAvailabilityZoneFromZone(valid_location)
if __name__ == '__main__':
unittest.main()
|
import asyncio
from datetime import timedelta
import logging
from math import ceil
from aiohttp.client_exceptions import ClientConnectorError
from airly import Airly
from airly.exceptions import AirlyError
import async_timeout
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_API_ADVICE,
ATTR_API_CAQI,
ATTR_API_CAQI_DESCRIPTION,
ATTR_API_CAQI_LEVEL,
DOMAIN,
MAX_REQUESTS_PER_DAY,
NO_AIRLY_SENSORS,
)
PLATFORMS = ["air_quality", "sensor"]
_LOGGER = logging.getLogger(__name__)
def set_update_interval(hass, instances):
"""Set update_interval to another configured Airly instances."""
# We check how many Airly configured instances are and calculate interval to not
# exceed allowed numbers of requests.
interval = timedelta(minutes=ceil(24 * 60 / MAX_REQUESTS_PER_DAY) * instances)
if hass.data.get(DOMAIN):
for instance in hass.data[DOMAIN].values():
instance.update_interval = interval
return interval
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured Airly."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up Airly as config entry."""
api_key = config_entry.data[CONF_API_KEY]
latitude = config_entry.data[CONF_LATITUDE]
longitude = config_entry.data[CONF_LONGITUDE]
# For backwards compat, set unique ID
if config_entry.unique_id is None:
hass.config_entries.async_update_entry(
config_entry, unique_id=f"{latitude}-{longitude}"
)
websession = async_get_clientsession(hass)
# Change update_interval for other Airly instances
update_interval = set_update_interval(
hass, len(hass.config_entries.async_entries(DOMAIN))
)
coordinator = AirlyDataUpdateCoordinator(
hass, websession, api_key, latitude, longitude, update_interval
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
# Change update_interval for other Airly instances
set_update_interval(hass, len(hass.data[DOMAIN]))
return unload_ok
class AirlyDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold Airly data."""
def __init__(self, hass, session, api_key, latitude, longitude, update_interval):
"""Initialize."""
self.latitude = latitude
self.longitude = longitude
self.airly = Airly(api_key, session)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self):
"""Update data via library."""
data = {}
with async_timeout.timeout(20):
measurements = self.airly.create_measurements_session_point(
self.latitude, self.longitude
)
try:
await measurements.update()
except (AirlyError, ClientConnectorError) as error:
raise UpdateFailed(error) from error
values = measurements.current["values"]
index = measurements.current["indexes"][0]
standards = measurements.current["standards"]
if index["description"] == NO_AIRLY_SENSORS:
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area")
for value in values:
data[value["name"]] = value["value"]
for standard in standards:
data[f"{standard['pollutant']}_LIMIT"] = standard["limit"]
data[f"{standard['pollutant']}_PERCENT"] = standard["percent"]
data[ATTR_API_CAQI] = index["value"]
data[ATTR_API_CAQI_LEVEL] = index["level"].lower().replace("_", " ")
data[ATTR_API_CAQI_DESCRIPTION] = index["description"]
data[ATTR_API_ADVICE] = index["advice"]
return data
|
import logging
from io import StringIO
import numpy as np
import pandas as pd
import queue as q
from yandextank.aggregator import TimeChopper
from yandextank.aggregator import aggregator as agg
logger = logging.getLogger(__name__)
KNOWN_EXC = {
"java.net.NoRouteToHostException": 113,
"java.net.ConnectException": 110,
"java.net.BindException": 99,
"java.net.PortUnreachableException": 101,
"java.net.ProtocolException": 71,
"java.net.SocketException": 32,
"java.net.SocketTimeoutException": 110,
"java.net.UnknownHostException": 14,
"java.net.URISyntaxException": 22,
"java.io.FileNotFoundException": 2,
"java.io.IOException": 5,
"java.io.EOFException": 104,
"org.apache.http.conn.ConnectTimeoutException": 110,
"org.apache.commons.net.MalformedServerReplyException": 71,
"org.apache.http.NoHttpResponseException": 32,
"java.io.InterruptedIOException": 32,
"javax.net.ssl.SSLHandshakeException": 5,
}
def _exc_to_net(param1, success):
""" translate http code to net code. if accertion failed, set net code to 314 """
if len(param1) <= 3:
# FIXME: we're unable to use better logic here, because we should support non-http codes
# but, we should look for core.util.HTTP or some other common logic
# here
if success:
return 0
else:
return 314
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC:
return KNOWN_EXC[exc]
else:
logger.warning(
"Unknown Java exception, consider adding it to dictionary: %s",
param1)
return 41
def _exc_to_http(param1):
""" translate exception str to http code"""
if len(param1) <= 3:
try:
int(param1)
except BaseException:
logger.error(
"JMeter wrote some strange data into codes column: %s", param1)
else:
return int(param1)
exc = param1.split(' ')[-1]
if exc in KNOWN_EXC:
return 0
else:
logger.warning("Unknown Java exception. %s", param1)
return 0
exc_to_net = np.vectorize(_exc_to_net)
exc_to_http = np.vectorize(_exc_to_http)
# phout_columns = [
# 'send_ts', 'tag', 'interval_real', 'connect_time', 'send_time', 'latency',
# 'receive_time', 'interval_event', 'size_out', 'size_in', 'net_code',
# 'proto_code'
# ]
jtl_columns = [
'send_ts', 'interval_real', 'tag', 'retcode', 'success', 'size_in',
'grpThreads', 'allThreads', 'latency', 'connect_time'
]
jtl_types = {
'send_ts': np.int64,
'interval_real': np.int64,
'tag': np.str,
'retcode': np.str,
'success': np.bool,
'size_in': np.int64,
'grpThreads': np.int64,
'allThreads': np.int64,
'latency': np.int64,
'connect_time': np.float64,
}
def fix_latency(row):
if row['latency'] < row['connect_time']:
if row['interval_real'] < row['connect_time']:
latency = 0
else:
latency = row['interval_real'] - row['connect_time']
else:
latency = row['latency'] - row['connect_time']
return latency
# timeStamp,elapsed,label,responseCode,success,bytes,grpThreads,allThreads,Latency
def string_to_df(data):
chunk = pd.read_csv(StringIO(data),
sep='\t',
names=jtl_columns, dtype=jtl_types,
keep_default_na=False)
chunk["receive_ts"] = (chunk["send_ts"] + chunk['interval_real']) / 1000.0
chunk['receive_sec'] = chunk["receive_ts"].astype(np.int64)
chunk['interval_real'] = chunk["interval_real"] * 1000 # convert to µs
chunk.set_index(['receive_sec'], inplace=True)
chunk_length = len(chunk)
chunk['connect_time'] = (chunk['connect_time'].fillna(0) * 1000).astype(np.int64)
chunk['latency'] = chunk['latency'] * 1000
chunk['latency'] = chunk.apply(fix_latency, axis=1)
chunk['send_time'] = np.zeros(chunk_length)
chunk['receive_time'] = chunk['interval_real'] - \
chunk['latency'] - chunk['connect_time']
chunk['interval_event'] = np.zeros(chunk_length)
chunk['size_out'] = np.zeros(chunk_length).astype(int)
chunk['net_code'] = exc_to_net(chunk['retcode'], chunk['success'])
chunk['proto_code'] = exc_to_http(chunk['retcode'])
return chunk
class JMeterStatAggregator(object):
def __init__(self, source):
self.worker = agg.Worker({"allThreads": ["max"]}, False)
self.source = source
def __iter__(self):
for ts, chunk, rps in self.source:
stats = self.worker.aggregate(chunk)
yield [{
'ts': ts,
'metrics': {
'instances': stats['allThreads']['max'],
'reqps': 0
}
}]
def close(self):
pass
class JMeterReader(object):
def __init__(self, filename):
self.buffer = ""
self.stat_buffer = ""
self.jtl_file = filename
self.jmeter_finished = False
self.agg_finished = False
self.closed = False
self.stat_queue = q.Queue()
self.stats_reader = JMeterStatAggregator(
TimeChopper(self._read_stat_queue(), 3))
def _read_stat_queue(self):
while not self.closed:
# for _ in range(self.stat_queue.qsize()):
try:
si = self.stat_queue.get_nowait()
if si is not None:
yield si
except q.Empty:
pass
def _read_jtl_chunk(self, jtl):
data = jtl.read(1024 * 1024 * 10)
if data:
parts = data.rsplit('\n', 1)
if len(parts) > 1:
ready_chunk = self.buffer + parts[0] + '\n'
self.buffer = parts[1]
df = string_to_df(ready_chunk)
self.stat_queue.put(df)
return df
else:
self.buffer += parts[0]
else:
if self.jmeter_finished:
self.agg_finished = True
jtl.readline()
return None
def __iter__(self):
with open(self.jtl_file, 'r') as jtl:
while not self.closed:
yield self._read_jtl_chunk(jtl)
yield self._read_jtl_chunk(jtl)
def close(self):
self.closed = True
|
import os
import unittest
from absl import flags
from perfkitbenchmarker import events
from perfkitbenchmarker.sample import Sample
from tests import pkb_common_test_case
from perfkitbenchmarker.traces import dstat
FLAGS = flags.FLAGS
class DstatTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(DstatTestCase, self).setUp()
directory = os.path.join(os.path.dirname(__file__), '..', 'data')
path = os.path.join(directory, 'dstat-result.csv')
self.collector = dstat._DStatCollector(output_directory=directory)
self.collector._role_mapping['test_vm0'] = path
events.TracingEvent.events = []
self.samples = []
def testAnalyzeEmptyEvents(self):
self.collector.Analyze('testSender', None, self.samples)
self.assertEqual(self.samples, [])
def testAnalyzeInvalidEventTimestamps(self):
events.AddEvent('sender', 'event', -1, -2, {})
self.collector.Analyze('testSender', None, self.samples)
self.assertEqual(self.samples, [])
def testAnalyzeValidEventSingleRow(self):
events.AddEvent('sender', 'event', 1475708693, 1475708694,
{'label1': 123})
self.collector.Analyze('testSender', None, self.samples)
# 61 metrics
self.assertTrue(len(self.samples), 61)
expected = Sample(metric='usr__total cpu usage',
value=6.4000000000000004,
unit='',
metadata={'vm_role': 'test_vm0',
'label1': 123, 'event': 'event',
'sender': 'sender'},
timestamp=0.0)
self.assertEqual(
expected.metric, self.samples[0].metric)
self.assertEqual(
expected.value, self.samples[0].value)
self.assertEqual(
expected.metadata, self.samples[0].metadata)
def testAnalyzeValidEventTwoRows(self):
events.AddEvent('sender', 'event', 1475708693, 1475708695,
{'label1': 123})
self.collector.Analyze('testSender', None, self.samples)
# 61 metrics
self.assertTrue(len(self.samples), 61)
expected = Sample(metric='usr__total cpu usage',
value=3.200000000000000,
unit='',
metadata={'vm_role': 'test_vm0',
'label1': 123, 'event': 'event',
'sender': 'sender'},
timestamp=0.0)
self.assertEqual(
expected.metric, self.samples[0].metric)
self.assertEqual(
expected.value, self.samples[0].value)
self.assertEqual(
expected.metadata, self.samples[0].metadata)
def testAnalyzeValidEventEntireFile(self):
events.AddEvent('sender', 'event', 1475708693, 1475709076,
{'label1': 123})
self.collector.Analyze('testSender', None, self.samples)
# 61 metrics
self.assertTrue(len(self.samples), 61)
expected = Sample(metric='usr__total cpu usage',
value=10.063689295039159,
unit='',
metadata={'vm_role': 'test_vm0',
'label1': 123, 'event': 'event',
'sender': 'sender'},
timestamp=0.0)
self.assertEqual(
expected.metric, self.samples[0].metric)
self.assertEqual(
expected.value, self.samples[0].value)
self.assertEqual(
expected.metadata, self.samples[0].metadata)
if __name__ == '__main__':
unittest.main()
|
import glob
import os
import pytest
HOWTO_DIR = os.path.realpath(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'howtos'))
@pytest.mark.parametrize('howto', sorted([x.split('/')[-1]
for x in glob.glob(os.path.join(HOWTO_DIR, 'how_to_*.py'))]))
def test_howto(howto, mongo_host):
exec(open(HOWTO_DIR + "/" + howto).read(), {'mongo_host': mongo_host})
|
import io
import unittest
from absl import flags
import mock
from perfkitbenchmarker import disk
from perfkitbenchmarker import static_virtual_machine as svm
from perfkitbenchmarker import vm_util
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_COMPONENT = 'test_static_vm_spec'
_DISK_SPEC_DICTS = [{
'device_path': '/test_device_path'
}, {
'mount_point': '/test_mount_point'
}]
class TestStaticVirtualMachine(pkb_common_test_case.TestOsMixin,
svm.StaticVirtualMachine):
pass
def CreateTestStaticVm():
vm_spec = svm.StaticVmSpec(_COMPONENT)
return TestStaticVirtualMachine(vm_spec=vm_spec)
class StaticVmSpecTest(pkb_common_test_case.PkbCommonTestCase):
def testDefaults(self):
spec = svm.StaticVmSpec(_COMPONENT)
self.assertIsNone(spec.ip_address)
self.assertIsNone(spec.user_name)
self.assertIsNone(spec.ssh_private_key)
self.assertIsNone(spec.internal_ip)
self.assertEqual(spec.ssh_port, 22)
self.assertIsNone(spec.password)
self.assertIsNone(spec.os_type)
self.assertEqual(spec.disk_specs, [])
def testDiskSpecs(self):
spec = svm.StaticVmSpec(_COMPONENT, disk_specs=_DISK_SPEC_DICTS)
self.assertEqual(len(spec.disk_specs), 2)
for disk_spec in spec.disk_specs:
self.assertIsInstance(disk_spec, disk.BaseDiskSpec)
self.assertEqual(spec.disk_specs[0].device_path, '/test_device_path')
self.assertIsNone(spec.disk_specs[0].mount_point)
self.assertIsNone(spec.disk_specs[1].device_path)
self.assertEqual(spec.disk_specs[1].mount_point, '/test_mount_point')
class StaticVirtualMachineTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(StaticVirtualMachineTest, self).setUp()
self._initial_pool = svm.StaticVirtualMachine.vm_pool
svm.StaticVirtualMachine.vm_pool.clear()
p = mock.patch(vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir')
p.start()
self.addCleanup(p.stop)
FLAGS.image = 'test_image'
def tearDown(self):
super(StaticVirtualMachineTest, self).tearDown()
svm.StaticVirtualMachine.vm_pool = self._initial_pool
def _AssertStaticVMsEqual(self, vm1, vm2):
self.assertEqual(vm1.ip_address, vm2.ip_address)
self.assertEqual(vm1.internal_ip, vm2.internal_ip)
self.assertEqual(vm1.user_name, vm2.user_name)
self.assertEqual(vm1.zone, vm2.zone)
self.assertEqual(vm1.ssh_private_key, vm2.ssh_private_key)
def testReadFromFile_WrongFormat(self):
fp = io.StringIO('{}')
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_MissingKey(self):
fp = io.StringIO('[{"ip_address": "10.10.10.3"}]')
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_Empty(self):
fp = io.StringIO('[]')
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual([], list(svm.StaticVirtualMachine.vm_pool))
def testReadFromFile_NoErr(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem" '
'}, '
'{ '
' "ip_address": "174.12.14.121", '
' "user_name": "ubuntu", '
' "keyfile_path": "rackspace.pem", '
' "internal_ip": "10.10.10.2", '
' "zone": "rackspace_dallas" '
'}] ')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
vm_pool = svm.StaticVirtualMachine.vm_pool
self.assertEqual(2, len(vm_pool))
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.1',
user_name='perfkitbenchmarker',
ssh_private_key='perfkitbenchmarker.pem')), vm_pool[0])
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.121',
user_name='ubuntu',
ssh_private_key='rackspace.pem',
internal_ip='10.10.10.2',
zone='rackspace_dallas')), vm_pool[1])
def testReadFromFile_InvalidScratchDisksType(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem", '
' "scratch_disk_mountpoints": "/tmp/google-pkb" '
'}]')
fp = io.StringIO(s)
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_UnknownOsTypeDefaultsToLinuxRequiredKeys(self):
FLAGS.os_type = 'unknown_os_type'
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem"'
'}]')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
vm_pool = svm.StaticVirtualMachine.vm_pool
self.assertEqual(1, len(vm_pool))
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.1',
user_name='perfkitbenchmarker',
ssh_private_key='perfkitbenchmarker.pem')), vm_pool[0])
def testCreateReturn(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem" '
'}, '
'{ '
' "ip_address": "174.12.14.121", '
' "user_name": "ubuntu", '
' "keyfile_path": "rackspace.pem", '
' "internal_ip": "10.10.10.2", '
' "zone": "rackspace_dallas" '
'}] ')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual(2, len(svm.StaticVirtualMachine.vm_pool))
vm0 = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertTrue(vm0.from_pool)
self.assertEqual(1, len(svm.StaticVirtualMachine.vm_pool))
vm0.Delete()
self.assertEqual(2, len(svm.StaticVirtualMachine.vm_pool))
vm1 = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertIs(vm0, vm1)
def testDiskSpecs(self):
s = """
[{
"ip_address": "174.12.14.1",
"user_name": "ubuntu",
"keyfile_path": "test_keyfile_path",
"local_disks": ["/test_local_disk_0", "/test_local_disk_1"],
"scratch_disk_mountpoints": ["/test_scratch_disk_0",
"/test_scratch_disk_1"]
}]
"""
expected_paths_and_mount_points = ((None, '/test_scratch_disk_0'),
(None, '/test_scratch_disk_1'),
('/test_local_disk_0',
None), ('/test_local_disk_1', None))
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual(1, len(svm.StaticVirtualMachine.vm_pool))
vm = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertTrue(vm.from_pool)
self.assertEqual(len(vm.disk_specs), 4)
for disk_spec, expected_paths in zip(vm.disk_specs,
expected_paths_and_mount_points):
expected_device_path, expected_mount_point = expected_paths
self.assertEqual(disk_spec.device_path, expected_device_path)
self.assertEqual(disk_spec.mount_point, expected_mount_point)
if __name__ == '__main__':
unittest.main()
|
from typing import List, Optional
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
PRESET_AWAY,
PRESET_BOOST,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE
from . import CLIMATE, DOMAIN, AtagEntity
PRESET_SCHEDULE = "Auto"
PRESET_MANUAL = "Manual"
PRESET_EXTEND = "Extend"
SUPPORT_PRESET = [
PRESET_MANUAL,
PRESET_SCHEDULE,
PRESET_EXTEND,
PRESET_AWAY,
PRESET_BOOST,
]
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
HVAC_MODES = [HVAC_MODE_AUTO, HVAC_MODE_HEAT]
async def async_setup_entry(hass, entry, async_add_entities):
"""Load a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
async_add_entities([AtagThermostat(coordinator, CLIMATE)])
class AtagThermostat(AtagEntity, ClimateEntity):
"""Atag climate device."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def hvac_mode(self) -> Optional[str]:
"""Return hvac operation ie. heat, cool mode."""
if self.coordinator.atag.climate.hvac_mode in HVAC_MODES:
return self.coordinator.atag.climate.hvac_mode
return None
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return HVAC_MODES
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation."""
if self.coordinator.atag.climate.status:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self.coordinator.atag.climate.temp_unit
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return self.coordinator.atag.climate.temperature
@property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
return self.coordinator.atag.climate.target_temperature
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., auto, manual, fireplace, extend, etc."""
return self.coordinator.atag.climate.preset_mode
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes."""
return SUPPORT_PRESET
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
await self.coordinator.atag.climate.set_temp(kwargs.get(ATTR_TEMPERATURE))
self.async_write_ha_state()
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
await self.coordinator.atag.climate.set_hvac_mode(hvac_mode)
self.async_write_ha_state()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
await self.coordinator.atag.climate.set_preset_mode(preset_mode)
self.async_write_ha_state()
|
import json
from itertools import chain
from django.contrib.admin import widgets
from django.contrib.staticfiles.storage import staticfiles_storage
from django.forms import Media
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from tagging.models import Tag
from zinnia.models import Entry
class MPTTFilteredSelectMultiple(widgets.FilteredSelectMultiple):
"""
MPTT version of FilteredSelectMultiple.
"""
option_inherits_attrs = True
def __init__(self, verbose_name, is_stacked=False, attrs=None, choices=()):
"""
Initializes the widget directly not stacked.
"""
super(MPTTFilteredSelectMultiple, self).__init__(
verbose_name, is_stacked, attrs, choices)
def optgroups(self, name, value, attrs=None):
"""Return a list of optgroups for this widget."""
groups = []
has_selected = False
if attrs is None:
attrs = {}
for index, (option_value, option_label, sort_fields) in enumerate(
chain(self.choices)):
# Set tree attributes
attrs['data-tree-id'] = sort_fields[0]
attrs['data-left-value'] = sort_fields[1]
subgroup = []
subindex = None
choices = [(option_value, option_label)]
groups.append((None, subgroup, index))
for subvalue, sublabel in choices:
selected = (
force_str(subvalue) in value and
(has_selected is False or self.allow_multiple_selected)
)
if selected is True and has_selected is False:
has_selected = True
subgroup.append(self.create_option(
name, subvalue, sublabel, selected, index,
subindex=subindex, attrs=attrs,
))
return groups
@property
def media(self):
"""
MPTTFilteredSelectMultiple's Media.
"""
js = ['admin/js/core.js',
'zinnia/admin/mptt/js/mptt_m2m_selectbox.js',
'admin/js/SelectFilter2.js']
return Media(js=[staticfiles_storage.url(path) for path in js])
class TagAutoComplete(widgets.AdminTextInputWidget):
"""
Tag widget with autocompletion based on select2.
"""
def get_tags(self):
"""
Returns the list of tags to auto-complete.
"""
return [tag.name for tag in
Tag.objects.usage_for_model(Entry)]
def render(self, name, value, attrs=None, renderer=None):
"""
Render the default widget and initialize select2.
"""
output = [super(TagAutoComplete, self).render(name, value, attrs)]
output.append('<script type="text/javascript">')
output.append('(function($) {')
output.append(' $(document).ready(function() {')
output.append(' $("#id_%s").select2({' % name)
output.append(' width: "element",')
output.append(' maximumInputLength: 50,')
output.append(' tokenSeparators: [",", " "],')
output.append(' tags: %s' % json.dumps(self.get_tags()))
output.append(' });')
output.append(' });')
output.append('}(django.jQuery));')
output.append('</script>')
return mark_safe('\n'.join(output))
@property
def media(self):
"""
TagAutoComplete's Media.
"""
def static(path):
return staticfiles_storage.url(
'zinnia/admin/select2/%s' % path)
return Media(
css={'all': (static('css/select2.css'),)},
js=(static('js/select2.js'),)
)
class MiniTextarea(widgets.AdminTextareaWidget):
"""
Vertically shorter version of the admin textarea widget.
"""
rows = 2
def __init__(self, attrs=None):
super(MiniTextarea, self).__init__(
{'rows': self.rows})
|
from sklearn.linear_model import PassiveAggressiveClassifier
from statsmodels.distributions import ECDF
class DeployedClassifier:
def __init__(self,
target_category,
category_idx_store,
term_idx_store,
entity_types_to_censor,
use_lemmas,
clean_function):
'''Not working
Parameters
----------
target_category
category_idx_store
term_idx_store
entity_types_to_censor
use_lemmas
clean_function
'''
self._target_category = target_category
self._category_idx_store = category_idx_store
self._term_idx_store = term_idx_store,
self._entity_types_to_censor = entity_types_to_censor
self._use_lemmas = use_lemmas
self._clean_function = clean_function
def classify(self, text, nlp):
X, y = self._get_features_and_labels_from_documents_and_indexes(self._category_doc_iter,
self._category_idx_store,
self._term_idx_store)
class NeedToTrainExceptionBeforeDeployingException(Exception):
pass
class DeployedClassifierFactory:
def __init__(self, term_doc_matrix, term_doc_matrix_factory, category, nlp=None):
'''This is a class that enables one to train and save a classification model.
Parameters
----------
term_doc_matrix : TermDocMatrix
term_doc_matrix_factory : TermDocMatrixFactory
category : str
Category name
nlp : spacy parser
'''
self._term_doc_matrix = term_doc_matrix
self._term_doc_matrix_factory = term_doc_matrix_factory
assert term_doc_matrix_factory._nlp is None
assert term_doc_matrix_factory.category_text_iter is None
self._category = category
self._clf = None
self._proba = None
def passive_aggressive_train(self):
'''Trains passive aggressive classifier
'''
self._clf = PassiveAggressiveClassifier(n_iter=50, C=0.2, n_jobs=-1, random_state=0)
self._clf.fit(self._term_doc_matrix._X, self._term_doc_matrix._y)
y_dist = self._clf.decision_function(self._term_doc_matrix._X)
pos_ecdf = ECDF(y_dist[y_dist >= 0])
neg_ecdf = ECDF(y_dist[y_dist <= 0])
def proba_function(distance_from_hyperplane):
if distance_from_hyperplane > 0:
return pos_ecdf(distance_from_hyperplane) / 2. + 0.5
elif distance_from_hyperplane < 0:
return pos_ecdf(distance_from_hyperplane) / 2.
return 0.5
self._proba = proba_function
return self
def build(self):
'''Builds Depoyed Classifier
'''
if self._clf is None:
raise NeedToTrainExceptionBeforeDeployingException()
return DeployedClassifier(self._category,
self._term_doc_matrix._category_idx_store,
self._term_doc_matrix._term_idx_store,
self._term_doc_matrix_factory)
|
from typing import Callable, List
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OCCUPANCY,
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .common import BaseWithingsSensor, async_create_entities
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
entities = await async_create_entities(
hass, entry, WithingsHealthBinarySensor, BINARY_SENSOR_DOMAIN
)
async_add_entities(entities, True)
class WithingsHealthBinarySensor(BaseWithingsSensor, BinarySensorEntity):
"""Implementation of a Withings sensor."""
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self._state_data
@property
def device_class(self) -> str:
"""Provide the device class."""
return DEVICE_CLASS_OCCUPANCY
|
import asyncio
import logging
from pypoint import PointSession
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_TOKEN,
CONF_WEBHOOK_ID,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.dt import as_local, parse_datetime, utc_from_timestamp
from . import config_flow
from .const import (
CONF_WEBHOOK_URL,
DOMAIN,
EVENT_RECEIVED,
POINT_DISCOVERY_NEW,
SCAN_INTERVAL,
SIGNAL_UPDATE_ENTITY,
SIGNAL_WEBHOOK,
)
_LOGGER = logging.getLogger(__name__)
DATA_CONFIG_ENTRY_LOCK = "point_config_entry_lock"
CONFIG_ENTRY_IS_SETUP = "point_config_entry_is_setup"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Minut Point component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
config_flow.register_flow_implementation(
hass, DOMAIN, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET]
)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Set up Point from a config entry."""
async def token_saver(token, **kwargs):
_LOGGER.debug("Saving updated token %s", token)
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_TOKEN: token}
)
session = PointSession(
hass.helpers.aiohttp_client.async_get_clientsession(),
entry.data["refresh_args"][CONF_CLIENT_ID],
entry.data["refresh_args"][CONF_CLIENT_SECRET],
token=entry.data[CONF_TOKEN],
token_saver=token_saver,
)
try:
await session.ensure_active_token()
except Exception: # pylint: disable=broad-except
_LOGGER.error("Authentication Error")
return False
hass.data[DATA_CONFIG_ENTRY_LOCK] = asyncio.Lock()
hass.data[CONFIG_ENTRY_IS_SETUP] = set()
await async_setup_webhook(hass, entry, session)
client = MinutPointClient(hass, entry, session)
hass.data.setdefault(DOMAIN, {}).update({entry.entry_id: client})
hass.async_create_task(client.update())
return True
async def async_setup_webhook(hass: HomeAssistantType, entry: ConfigEntry, session):
"""Set up a webhook to handle binary sensor events."""
if CONF_WEBHOOK_ID not in entry.data:
webhook_id = hass.components.webhook.async_generate_id()
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
_LOGGER.info("Registering new webhook at: %s", webhook_url)
hass.config_entries.async_update_entry(
entry,
data={
**entry.data,
CONF_WEBHOOK_ID: webhook_id,
CONF_WEBHOOK_URL: webhook_url,
},
)
await session.update_webhook(
entry.data[CONF_WEBHOOK_URL],
entry.data[CONF_WEBHOOK_ID],
["*"],
)
hass.components.webhook.async_register(
DOMAIN, "Point", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
session = hass.data[DOMAIN].pop(entry.entry_id)
await session.remove_webhook()
for component in ("binary_sensor", "sensor"):
await hass.config_entries.async_forward_entry_unload(entry, component)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle webhook callback."""
try:
data = await request.json()
_LOGGER.debug("Webhook %s: %s", webhook_id, data)
except ValueError:
return None
if isinstance(data, dict):
data["webhook_id"] = webhook_id
async_dispatcher_send(hass, SIGNAL_WEBHOOK, data, data.get("hook_id"))
hass.bus.async_fire(EVENT_RECEIVED, data)
class MinutPointClient:
"""Get the latest data and update the states."""
def __init__(self, hass: HomeAssistantType, config_entry: ConfigEntry, session):
"""Initialize the Minut data object."""
self._known_devices = set()
self._known_homes = set()
self._hass = hass
self._config_entry = config_entry
self._is_available = True
self._client = session
async_track_time_interval(self._hass, self.update, SCAN_INTERVAL)
async def update(self, *args):
"""Periodically poll the cloud for current state."""
await self._sync()
async def _sync(self):
"""Update local list of devices."""
if not await self._client.update() and self._is_available:
self._is_available = False
_LOGGER.warning("Device is unavailable")
async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY)
return
async def new_device(device_id, component):
"""Load new device."""
config_entries_key = f"{component}.{DOMAIN}"
async with self._hass.data[DATA_CONFIG_ENTRY_LOCK]:
if config_entries_key not in self._hass.data[CONFIG_ENTRY_IS_SETUP]:
await self._hass.config_entries.async_forward_entry_setup(
self._config_entry, component
)
self._hass.data[CONFIG_ENTRY_IS_SETUP].add(config_entries_key)
async_dispatcher_send(
self._hass, POINT_DISCOVERY_NEW.format(component, DOMAIN), device_id
)
self._is_available = True
for home_id in self._client.homes:
if home_id not in self._known_homes:
await new_device(home_id, "alarm_control_panel")
self._known_homes.add(home_id)
for device in self._client.devices:
if device.device_id not in self._known_devices:
for component in ("sensor", "binary_sensor"):
await new_device(device.device_id, component)
self._known_devices.add(device.device_id)
async_dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY)
def device(self, device_id):
"""Return device representation."""
return self._client.device(device_id)
def is_available(self, device_id):
"""Return device availability."""
if not self._is_available:
return False
return device_id in self._client.device_ids
async def remove_webhook(self):
"""Remove the session webhook."""
return await self._client.remove_webhook()
@property
def homes(self):
"""Return known homes."""
return self._client.homes
async def async_alarm_disarm(self, home_id):
"""Send alarm disarm command."""
return await self._client.alarm_disarm(home_id)
async def async_alarm_arm(self, home_id):
"""Send alarm arm command."""
return await self._client.alarm_arm(home_id)
class MinutPointEntity(Entity):
"""Base Entity used by the sensors."""
def __init__(self, point_client, device_id, device_class):
"""Initialize the entity."""
self._async_unsub_dispatcher_connect = None
self._client = point_client
self._id = device_id
self._name = self.device.name
self._device_class = device_class
self._updated = utc_from_timestamp(0)
self._value = None
def __str__(self):
"""Return string representation of device."""
return f"MinutPoint {self.name}"
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
_LOGGER.debug("Created device %s", self)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback
)
await self._update_callback()
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
async def _update_callback(self):
"""Update the value of the sensor."""
@property
def available(self):
"""Return true if device is not offline."""
return self._client.is_available(self.device_id)
@property
def device(self):
"""Return the representation of the device."""
return self._client.device(self.device_id)
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_id(self):
"""Return the id of the device."""
return self._id
@property
def device_state_attributes(self):
"""Return status of device."""
attrs = self.device.device_status
attrs["last_heard_from"] = as_local(self.last_update).strftime(
"%Y-%m-%d %H:%M:%S"
)
return attrs
@property
def device_info(self):
"""Return a device description for device registry."""
device = self.device.device
return {
"connections": {("mac", device["device_mac"])},
"identifieres": device["device_id"],
"manufacturer": "Minut",
"model": f"Point v{device['hardware_version']}",
"name": device["description"],
"sw_version": device["firmware"]["installed"],
"via_device": (DOMAIN, device["home"]),
}
@property
def name(self):
"""Return the display name of this device."""
return f"{self._name} {self.device_class.capitalize()}"
@property
def is_updated(self):
"""Return true if sensor have been updated."""
return self.last_update > self._updated
@property
def last_update(self):
"""Return the last_update time for the device."""
last_update = parse_datetime(self.device.last_update)
return last_update
@property
def should_poll(self):
"""No polling needed for point."""
return False
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return f"point.{self._id}-{self.device_class}"
@property
def value(self):
"""Return the sensor value."""
return self._value
|
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import list_clusters
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"list-clusters",
help="Display a list of all PaaSTA clusters",
description=(
"'paasta list' inspects all of the PaaSTA services declared in the soa-configs "
"directory, and prints the set of unique clusters that are used.\n\n"
"The command can only report those clusters that are actually used by some services."
),
)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
list_parser.set_defaults(command=paasta_list_clusters)
def paasta_list_clusters(args, **kwargs):
for cluster in list_clusters(soa_dir=args.soa_dir):
print(cluster)
|
import unittest
import dedupe.canonical
class CanonicalizationTest(unittest.TestCase):
def test_get_centroid(self):
from affinegap import normalizedAffineGapDistance as comparator
attributeList = ['mary crane center',
'mary crane center north',
'mary crane league - mary crane - west',
'mary crane league mary crane center (east)',
'mary crane league mary crane center (north)',
'mary crane league mary crane center (west)',
'mary crane league - mary crane - east',
'mary crane family and day care center',
'mary crane west', 'mary crane center east',
'mary crane league mary crane center (east)',
'mary crane league mary crane center (north)',
'mary crane league mary crane center (west)',
'mary crane league',
'mary crane',
'mary crane east 0-3',
'mary crane north',
'mary crane north 0-3',
'mary crane league - mary crane - west',
'mary crane league - mary crane - north',
'mary crane league - mary crane - east',
'mary crane league - mary crane - west',
'mary crane league - mary crane - north',
'mary crane league - mary crane - east']
centroid = dedupe.canonical.getCentroid(attributeList, comparator)
assert centroid == 'mary crane'
def test_get_canonical_rep(self):
record_list = [{"name": "mary crane",
"address": "123 main st", "zip": "12345"},
{"name": "mary crane east",
"address": "123 main street", "zip": ""},
{"name": "mary crane west",
"address": "123 man st", "zip": ""}]
rep = dedupe.canonical.getCanonicalRep(record_list)
assert rep == {'name': 'mary crane',
'address': '123 main street', 'zip': "12345"}
rep = dedupe.canonical.getCanonicalRep(record_list[0:2])
assert rep == {"name": "mary crane",
"address": "123 main st", "zip": "12345"}
rep = dedupe.canonical.getCanonicalRep(record_list[0:1])
assert rep == {"name": "mary crane",
"address": "123 main st", "zip": "12345"}
|
from datetime import datetime
from homeassistant.components.ipp.const import DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.components.ipp import init_integration, mock_connection
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_sensors(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of the IPP sensors."""
mock_connection(aioclient_mock)
entry = await init_integration(hass, aioclient_mock, skip_setup=True)
registry = await hass.helpers.entity_registry.async_get_registry()
# Pre-create registry entries for disabled by default sensors
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"cfe92100-67c4-11d4-a45f-f8d027761251_uptime",
suggested_object_id="epson_xp_6000_series_uptime",
disabled_by=None,
)
test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=dt_util.UTC)
with patch("homeassistant.components.ipp.sensor.utcnow", return_value=test_time):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.epson_xp_6000_series")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None
state = hass.states.get("sensor.epson_xp_6000_series_black_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "58"
state = hass.states.get("sensor.epson_xp_6000_series_photo_black_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "98"
state = hass.states.get("sensor.epson_xp_6000_series_cyan_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "91"
state = hass.states.get("sensor.epson_xp_6000_series_yellow_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "95"
state = hass.states.get("sensor.epson_xp_6000_series_magenta_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "73"
state = hass.states.get("sensor.epson_xp_6000_series_uptime")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:clock-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None
assert state.state == "2019-10-26T15:37:00+00:00"
entry = registry.async_get("sensor.epson_xp_6000_series_uptime")
assert entry
assert entry.unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251_uptime"
async def test_disabled_by_default_sensors(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the disabled by default IPP sensors."""
await init_integration(hass, aioclient_mock)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("sensor.epson_xp_6000_series_uptime")
assert state is None
entry = registry.async_get("sensor.epson_xp_6000_series_uptime")
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
async def test_missing_entry_unique_id(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the unique_id of IPP sensor when printer is missing identifiers."""
entry = await init_integration(hass, aioclient_mock, uuid=None, unique_id=None)
registry = await hass.helpers.entity_registry.async_get_registry()
entity = registry.async_get("sensor.epson_xp_6000_series")
assert entity
assert entity.unique_id == f"{entry.entry_id}_printer"
|
import numpy as np
import pytest
from numpy.testing import assert_array_equal
from xarray.core.nputils import NumpyVIndexAdapter, _is_contiguous, rolling_window
def test_is_contiguous():
assert _is_contiguous([1])
assert _is_contiguous([1, 2, 3])
assert not _is_contiguous([1, 3])
def test_vindex():
x = np.arange(3 * 4 * 5).reshape((3, 4, 5))
vindex = NumpyVIndexAdapter(x)
# getitem
assert_array_equal(vindex[0], x[0])
assert_array_equal(vindex[[1, 2], [1, 2]], x[[1, 2], [1, 2]])
assert vindex[[0, 1], [0, 1], :].shape == (2, 5)
assert vindex[[0, 1], :, [0, 1]].shape == (2, 4)
assert vindex[:, [0, 1], [0, 1]].shape == (2, 3)
# setitem
vindex[:] = 0
assert_array_equal(x, np.zeros_like(x))
# assignment should not raise
vindex[[0, 1], [0, 1], :] = vindex[[0, 1], [0, 1], :]
vindex[[0, 1], :, [0, 1]] = vindex[[0, 1], :, [0, 1]]
vindex[:, [0, 1], [0, 1]] = vindex[:, [0, 1], [0, 1]]
def test_rolling():
x = np.array([1, 2, 3, 4], dtype=float)
actual = rolling_window(x, axis=-1, window=3, center=True, fill_value=np.nan)
expected = np.array(
[[np.nan, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, np.nan]], dtype=float
)
assert_array_equal(actual, expected)
actual = rolling_window(x, axis=-1, window=3, center=False, fill_value=0.0)
expected = np.array([[0, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, 4]], dtype=float)
assert_array_equal(actual, expected)
x = np.stack([x, x * 1.1])
actual = rolling_window(x, axis=-1, window=3, center=False, fill_value=0.0)
expected = np.stack([expected, expected * 1.1], axis=0)
assert_array_equal(actual, expected)
@pytest.mark.parametrize("center", [[True, True], [False, False]])
@pytest.mark.parametrize("axis", [(0, 1), (1, 2), (2, 0)])
def test_nd_rolling(center, axis):
x = np.arange(7 * 6 * 8).reshape(7, 6, 8).astype(float)
window = [3, 3]
actual = rolling_window(
x, axis=axis, window=window, center=center, fill_value=np.nan
)
expected = x
for ax, win, cent in zip(axis, window, center):
expected = rolling_window(
expected, axis=ax, window=win, center=cent, fill_value=np.nan
)
assert_array_equal(actual, expected)
|
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import ATTRIBUTION, SENSOR_DEVICE_CLASS, SENSOR_NAME, SENSOR_UNIT
class AbstractOpenWeatherMapSensor(Entity):
"""Abstract class for an OpenWeatherMap sensor."""
def __init__(
self,
name,
unique_id,
sensor_type,
sensor_configuration,
coordinator: DataUpdateCoordinator,
):
"""Initialize the sensor."""
self._name = name
self._unique_id = unique_id
self._sensor_type = sensor_type
self._sensor_name = sensor_configuration[SENSOR_NAME]
self._unit_of_measurement = sensor_configuration.get(SENSOR_UNIT)
self._device_class = sensor_configuration.get(SENSOR_DEVICE_CLASS)
self._coordinator = coordinator
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._sensor_name}"
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return self._unique_id
@property
def should_poll(self):
"""Return the polling requirement of the entity."""
return False
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def device_class(self):
"""Return the device_class."""
return self._device_class
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
@property
def available(self):
"""Return True if entity is available."""
return self._coordinator.last_update_success
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications."""
self.async_on_remove(
self._coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Get the latest data from OWM and updates the states."""
await self._coordinator.async_request_refresh()
|
from homeassistant.components.alexa.auth import Auth
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from . import TEST_TOKEN_URL
async def run_auth_get_access_token(
hass,
aioclient_mock,
expires_in,
client_id,
client_secret,
accept_grant_code,
refresh_token,
):
"""Do auth and request a new token for tests."""
aioclient_mock.post(
TEST_TOKEN_URL,
json={
"access_token": "the_access_token",
"refresh_token": refresh_token,
"expires_in": expires_in,
},
)
auth = Auth(hass, client_id, client_secret)
await auth.async_do_auth(accept_grant_code)
await auth.async_get_access_token()
async def test_auth_get_access_token_expired(hass, aioclient_mock):
"""Test the auth get access token function."""
client_id = "client123"
client_secret = "shhhhh"
accept_grant_code = "abcdefg"
refresh_token = "refresher"
await run_auth_get_access_token(
hass,
aioclient_mock,
-5,
client_id,
client_secret,
accept_grant_code,
refresh_token,
)
assert len(aioclient_mock.mock_calls) == 2
calls = aioclient_mock.mock_calls
auth_call_json = calls[0][2]
token_call_json = calls[1][2]
assert auth_call_json["grant_type"] == "authorization_code"
assert auth_call_json["code"] == accept_grant_code
assert auth_call_json[CONF_CLIENT_ID] == client_id
assert auth_call_json[CONF_CLIENT_SECRET] == client_secret
assert token_call_json["grant_type"] == "refresh_token"
assert token_call_json["refresh_token"] == refresh_token
assert token_call_json[CONF_CLIENT_ID] == client_id
assert token_call_json[CONF_CLIENT_SECRET] == client_secret
async def test_auth_get_access_token_not_expired(hass, aioclient_mock):
"""Test the auth get access token function."""
client_id = "client123"
client_secret = "shhhhh"
accept_grant_code = "abcdefg"
refresh_token = "refresher"
await run_auth_get_access_token(
hass,
aioclient_mock,
555,
client_id,
client_secret,
accept_grant_code,
refresh_token,
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
auth_call_json = call[0][2]
assert auth_call_json["grant_type"] == "authorization_code"
assert auth_call_json["code"] == accept_grant_code
assert auth_call_json[CONF_CLIENT_ID] == client_id
assert auth_call_json[CONF_CLIENT_SECRET] == client_secret
|
import logging
import urllib
from twilio.base.exceptions import TwilioRestException
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.components.twilio import DATA_TWILIO
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_FROM_NUMBER = "from_number"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FROM_NUMBER): vol.All(
cv.string, vol.Match(r"^\+?[1-9]\d{1,14}$")
)
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Twilio Call notification service."""
return TwilioCallNotificationService(
hass.data[DATA_TWILIO], config[CONF_FROM_NUMBER]
)
class TwilioCallNotificationService(BaseNotificationService):
"""Implement the notification service for the Twilio Call service."""
def __init__(self, twilio_client, from_number):
"""Initialize the service."""
self.client = twilio_client
self.from_number = from_number
def send_message(self, message="", **kwargs):
"""Call to specified target users."""
targets = kwargs.get(ATTR_TARGET)
if not targets:
_LOGGER.info("At least 1 target is required")
return
if message.startswith(("http://", "https://")):
twimlet_url = message
else:
twimlet_url = "http://twimlets.com/message?Message="
twimlet_url += urllib.parse.quote(message, safe="")
for target in targets:
try:
self.client.calls.create(
to=target, url=twimlet_url, from_=self.from_number
)
except TwilioRestException as exc:
_LOGGER.error(exc)
|
import json
import os
from textwrap import TextWrapper
def read_tooltips(gui_name):
"""Read and format tooltips, return a dict."""
dirname = os.path.dirname(__file__)
help_path = os.path.join(dirname, 'help', gui_name + '.json')
with open(help_path) as fid:
raw_tooltips = json.load(fid)
format_ = TextWrapper(width=60, fix_sentence_endings=True).fill
return {key: format_(text) for key, text in raw_tooltips.items()}
|
from __future__ import unicode_literals
import hmac
import hashlib
from lib.encode.md5_encode import md5_encode
def hmac_encode(item):
"""hmac message digest algorithm"""
key = "random_key_in_html_js_or_other_place_if_it_is_not_changed"
item = md5_encode(item)
return hmac.new(key.encode("utf-8"), item.encode("utf-8"), hashlib.md5).hexdigest()
|
from django.test import TestCase
import zinnia.signals
from zinnia import settings
from zinnia.managers import DRAFT
from zinnia.managers import PUBLISHED
from zinnia.models.entry import Entry
from zinnia.signals import disable_for_loaddata
from zinnia.signals import disconnect_discussion_signals
from zinnia.signals import disconnect_entry_signals
from zinnia.signals import ping_directories_handler
from zinnia.signals import ping_external_urls_handler
class SignalsTestCase(TestCase):
"""Test cases for signals"""
def setUp(self):
disconnect_entry_signals()
disconnect_discussion_signals()
def test_disable_for_loaddata(self):
self.top = 0
@disable_for_loaddata
def make_top():
self.top += 1
def call():
return make_top()
call()
self.assertEqual(self.top, 1)
# Okay the command is executed
def test_ping_directories_handler(self):
# Set up a stub around DirectoryPinger
self.top = 0
def fake_pinger(*ka, **kw):
self.top += 1
original_pinger = zinnia.signals.DirectoryPinger
zinnia.signals.DirectoryPinger = fake_pinger
params = {'title': 'My entry',
'content': 'My content',
'status': PUBLISHED,
'slug': 'my-entry'}
entry = Entry.objects.create(**params)
self.assertEqual(entry.is_visible, True)
settings.PING_DIRECTORIES = ()
ping_directories_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 0)
settings.PING_DIRECTORIES = ('toto',)
settings.SAVE_PING_DIRECTORIES = True
ping_directories_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 1)
entry.status = DRAFT
ping_directories_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 1)
# Remove stub
zinnia.signals.DirectoryPinger = original_pinger
def test_ping_external_urls_handler(self):
# Set up a stub around ExternalUrlsPinger
self.top = 0
def fake_pinger(*ka, **kw):
self.top += 1
self.original_pinger = zinnia.signals.ExternalUrlsPinger
zinnia.signals.ExternalUrlsPinger = fake_pinger
params = {'title': 'My entry',
'content': 'My content',
'status': PUBLISHED,
'slug': 'my-entry'}
entry = Entry.objects.create(**params)
self.assertEqual(entry.is_visible, True)
settings.SAVE_PING_EXTERNAL_URLS = False
ping_external_urls_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 0)
settings.SAVE_PING_EXTERNAL_URLS = True
ping_external_urls_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 1)
entry.status = 0
ping_external_urls_handler('sender', **{'instance': entry})
self.assertEqual(self.top, 1)
# Remove stub
zinnia.signals.ExternalUrlsPinger = self.original_pinger
|
import logging
from hatasmota.discovery import (
TasmotaDiscovery,
get_device_config as tasmota_get_device_config,
get_entities_for_platform as tasmota_get_entities_for_platform,
get_entity as tasmota_get_entity,
get_trigger as tasmota_get_trigger,
get_triggers as tasmota_get_triggers,
unique_id_from_hash,
)
import homeassistant.components.sensor as sensor
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
ALREADY_DISCOVERED = "tasmota_discovered_components"
TASMOTA_DISCOVERY_ENTITY_NEW = "tasmota_discovery_entity_new_{}"
TASMOTA_DISCOVERY_ENTITY_UPDATED = "tasmota_discovery_entity_updated_{}_{}_{}_{}"
TASMOTA_DISCOVERY_INSTANCE = "tasmota_discovery_instance"
def clear_discovery_hash(hass, discovery_hash):
"""Clear entry in ALREADY_DISCOVERED list."""
if ALREADY_DISCOVERED not in hass.data:
# Discovery is shutting down
return
del hass.data[ALREADY_DISCOVERED][discovery_hash]
def set_discovery_hash(hass, discovery_hash):
"""Set entry in ALREADY_DISCOVERED list."""
hass.data[ALREADY_DISCOVERED][discovery_hash] = {}
async def async_start(
hass: HomeAssistantType, discovery_topic, config_entry, tasmota_mqtt, setup_device
) -> bool:
"""Start Tasmota device discovery."""
async def _discover_entity(tasmota_entity_config, discovery_hash, platform):
"""Handle adding or updating a discovered entity."""
if not tasmota_entity_config:
# Entity disabled, clean up entity registry
entity_registry = await hass.helpers.entity_registry.async_get_registry()
unique_id = unique_id_from_hash(discovery_hash)
entity_id = entity_registry.async_get_entity_id(platform, DOMAIN, unique_id)
if entity_id:
_LOGGER.debug("Removing entity: %s %s", platform, discovery_hash)
entity_registry.async_remove(entity_id)
return
if discovery_hash in hass.data[ALREADY_DISCOVERED]:
_LOGGER.debug(
"Entity already added, sending update: %s %s",
platform,
discovery_hash,
)
async_dispatcher_send(
hass,
TASMOTA_DISCOVERY_ENTITY_UPDATED.format(*discovery_hash),
tasmota_entity_config,
)
else:
tasmota_entity = tasmota_get_entity(tasmota_entity_config, tasmota_mqtt)
_LOGGER.debug(
"Adding new entity: %s %s %s",
platform,
discovery_hash,
tasmota_entity.unique_id,
)
hass.data[ALREADY_DISCOVERED][discovery_hash] = None
async_dispatcher_send(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format(platform),
tasmota_entity,
discovery_hash,
)
async def async_device_discovered(payload, mac):
"""Process the received message."""
if ALREADY_DISCOVERED not in hass.data:
# Discovery is shutting down
return
_LOGGER.debug("Received discovery data for tasmota device: %s", mac)
tasmota_device_config = tasmota_get_device_config(payload)
setup_device(tasmota_device_config, mac)
if not payload:
return
tasmota_triggers = tasmota_get_triggers(payload)
for trigger_config in tasmota_triggers:
discovery_hash = (mac, "automation", "trigger", trigger_config.trigger_id)
if discovery_hash in hass.data[ALREADY_DISCOVERED]:
_LOGGER.debug(
"Trigger already added, sending update: %s",
discovery_hash,
)
async_dispatcher_send(
hass,
TASMOTA_DISCOVERY_ENTITY_UPDATED.format(*discovery_hash),
trigger_config,
)
elif trigger_config.is_active:
_LOGGER.debug("Adding new trigger: %s", discovery_hash)
hass.data[ALREADY_DISCOVERED][discovery_hash] = None
tasmota_trigger = tasmota_get_trigger(trigger_config, tasmota_mqtt)
async_dispatcher_send(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format("device_automation"),
tasmota_trigger,
discovery_hash,
)
for platform in PLATFORMS:
tasmota_entities = tasmota_get_entities_for_platform(payload, platform)
for (tasmota_entity_config, discovery_hash) in tasmota_entities:
await _discover_entity(tasmota_entity_config, discovery_hash, platform)
async def async_sensors_discovered(sensors, mac):
"""Handle discovery of (additional) sensors."""
platform = sensor.DOMAIN
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device = device_registry.async_get_device(set(), {("mac", mac)})
if device is None:
_LOGGER.warning("Got sensors for unknown device mac: %s", mac)
return
orphaned_entities = {
entry.unique_id
for entry in async_entries_for_device(entity_registry, device.id)
if entry.domain == sensor.DOMAIN and entry.platform == DOMAIN
}
for (tasmota_sensor_config, discovery_hash) in sensors:
if tasmota_sensor_config:
orphaned_entities.discard(tasmota_sensor_config.unique_id)
await _discover_entity(tasmota_sensor_config, discovery_hash, platform)
for unique_id in orphaned_entities:
entity_id = entity_registry.async_get_entity_id(platform, DOMAIN, unique_id)
if entity_id:
_LOGGER.debug("Removing entity: %s %s", platform, entity_id)
entity_registry.async_remove(entity_id)
hass.data[ALREADY_DISCOVERED] = {}
tasmota_discovery = TasmotaDiscovery(discovery_topic, tasmota_mqtt)
await tasmota_discovery.start_discovery(
async_device_discovered, async_sensors_discovered
)
hass.data[TASMOTA_DISCOVERY_INSTANCE] = tasmota_discovery
async def async_stop(hass: HomeAssistantType) -> bool:
"""Stop Tasmota device discovery."""
hass.data.pop(ALREADY_DISCOVERED)
tasmota_discovery = hass.data.pop(TASMOTA_DISCOVERY_INSTANCE)
await tasmota_discovery.stop_discovery()
|
from datetime import timedelta
from os import path
from unittest.mock import patch
from homeassistant import config
from homeassistant.components.template import DOMAIN, SERVICE_RELOAD
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import async_fire_time_changed
async def test_reloadable(hass):
"""Test that we can reload."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/sensor_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
async def test_reloadable_can_remove(hass):
"""Test that we can reload and remove all template sensors."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/empty_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
async def test_reloadable_stops_on_invalid_config(hass):
"""Test we stop the reload if configuration.yaml is completely broken."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/configuration.yaml.corrupt",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
async def test_reloadable_handles_partial_valid_config(hass):
"""Test we can still setup valid sensors when configuration.yaml has a broken entry."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert len(hass.states.async_all()) == 2
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/broken_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
async def test_reloadable_multiple_platforms(hass):
"""Test that we can reload."""
hass.states.async_set("sensor.test_sensor", "mytest")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": DOMAIN,
"sensors": {
"state": {
"value_template": "{{ states.sensor.test_sensor.state }}"
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("sensor.state").state == "mytest"
assert hass.states.get("binary_sensor.state").state == "off"
assert len(hass.states.async_all()) == 3
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/sensor_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("sensor.state") is None
assert hass.states.get("sensor.watching_tv_in_master_bedroom").state == "off"
assert float(hass.states.get("sensor.combined_sensor_energy_usage").state) == 0
async def test_reload_sensors_that_reference_other_template_sensors(hass):
"""Test that we can reload sensor that reference other template sensors."""
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": DOMAIN,
"sensors": {
"state": {"value_template": "{{ 1 }}"},
},
}
},
)
await hass.async_block_till_done()
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"template/ref_configuration.yaml",
)
with patch.object(config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
await hass.async_block_till_done()
next_time = dt_util.utcnow() + timedelta(seconds=1.2)
with patch(
"homeassistant.helpers.ratelimit.dt_util.utcnow", return_value=next_time
):
async_fire_time_changed(hass, next_time)
await hass.async_block_till_done()
assert hass.states.get("sensor.test1").state == "3"
assert hass.states.get("sensor.test2").state == "1"
assert hass.states.get("sensor.test3").state == "2"
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
from datetime import timedelta
from iota import Iota
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.entity import Entity
CONF_IRI = "iri"
CONF_TESTNET = "testnet"
CONF_WALLET_NAME = "name"
CONF_WALLET_SEED = "seed"
CONF_WALLETS = "wallets"
DOMAIN = "iota"
IOTA_PLATFORMS = ["sensor"]
SCAN_INTERVAL = timedelta(minutes=10)
WALLET_CONFIG = vol.Schema(
{
vol.Required(CONF_WALLET_NAME): cv.string,
vol.Required(CONF_WALLET_SEED): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_IRI): cv.string,
vol.Optional(CONF_TESTNET, default=False): cv.boolean,
vol.Required(CONF_WALLETS): vol.All(cv.ensure_list, [WALLET_CONFIG]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the IOTA component."""
iota_config = config[DOMAIN]
for platform in IOTA_PLATFORMS:
load_platform(hass, platform, DOMAIN, iota_config, config)
return True
class IotaDevice(Entity):
"""Representation of a IOTA device."""
def __init__(self, name, seed, iri, is_testnet=False):
"""Initialise the IOTA device."""
self._name = name
self._seed = seed
self.iri = iri
self.is_testnet = is_testnet
@property
def name(self):
"""Return the default name of the device."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return {CONF_WALLET_NAME: self._name}
@property
def api(self):
"""Construct API object for interaction with the IRI node."""
return Iota(adapter=self.iri, seed=self._seed)
|
from django.contrib import admin
from weblate.lang.models import Language, Plural
from weblate.wladmin.models import WeblateModelAdmin
class PluralAdmin(admin.TabularInline):
model = Plural
extra = 0
ordering = ["source"]
class LanguageAdmin(WeblateModelAdmin):
list_display = ["name", "code", "direction"]
search_fields = ["name", "code"]
list_filter = ("direction",)
inlines = [PluralAdmin]
ordering = ["name"]
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
lang = form.instance
if lang.plural_set.exists():
return
# Automatically create plurals if language does not have one
try:
baselang = Language.objects.get(code=lang.base_code)
baseplural = baselang.plural
lang.plural_set.create(
source=Plural.SOURCE_DEFAULT,
number=baseplural.number,
formula=baseplural.formula,
)
except (Language.DoesNotExist, IndexError):
lang.plural_set.create(
source=Plural.SOURCE_DEFAULT, number=2, formula="n != 1"
)
|
import argparse
import os.path
import sys
from PyQt5.QtWidgets import QMessageBox
from qutebrowser.api import config as configapi
from qutebrowser.config import (config, configdata, configfiles, configtypes,
configexc, configcommands, stylesheet, qtargs)
from qutebrowser.utils import objreg, usertypes, log, standarddir, message
from qutebrowser.config import configcache
from qutebrowser.misc import msgbox, objects, savemanager
# Error which happened during init, so we can show a message box.
_init_errors = None
def early_init(args: argparse.Namespace) -> None:
"""Initialize the part of the config which works without a QApplication."""
configdata.init()
yaml_config = configfiles.YamlConfig()
config.instance = config.Config(yaml_config=yaml_config)
config.val = config.ConfigContainer(config.instance)
configapi.val = config.ConfigContainer(config.instance)
config.key_instance = config.KeyConfig(config.instance)
config.cache = configcache.ConfigCache()
yaml_config.setParent(config.instance)
for cf in config.change_filters:
cf.validate()
config_commands = configcommands.ConfigCommands(
config.instance, config.key_instance)
objreg.register('config-commands', config_commands, command_only=True)
config_file = standarddir.config_py()
global _init_errors
try:
if os.path.exists(config_file):
configfiles.read_config_py(config_file)
else:
configfiles.read_autoconfig()
except configexc.ConfigFileErrors as e:
log.config.error("Error while loading {}".format(e.basename))
_init_errors = e
try:
configfiles.init()
except configexc.ConfigFileErrors as e:
_init_errors = e
for opt, val in args.temp_settings:
try:
config.instance.set_str(opt, val)
except configexc.Error as e:
message.error("set: {} - {}".format(e.__class__.__name__, e))
objects.backend = get_backend(args)
objects.debug_flags = set(args.debug_flags)
stylesheet.init()
qtargs.init_envvars()
def _update_font_defaults(setting: str) -> None:
"""Update all fonts if fonts.default_family/_size was set."""
if setting not in {'fonts.default_family', 'fonts.default_size'}:
return
configtypes.FontBase.set_defaults(config.val.fonts.default_family,
config.val.fonts.default_size)
for name, opt in configdata.DATA.items():
if not isinstance(opt.typ, configtypes.FontBase):
continue
value = config.instance.get_obj(name)
if value is None or not (value.endswith(' default_family') or
'default_size ' in value):
continue
config.instance.changed.emit(name)
def get_backend(args: argparse.Namespace) -> usertypes.Backend:
"""Find out what backend to use based on available libraries."""
str_to_backend = {
'webkit': usertypes.Backend.QtWebKit,
'webengine': usertypes.Backend.QtWebEngine,
}
if args.backend is not None:
return str_to_backend[args.backend]
else:
return str_to_backend[config.val.backend]
def late_init(save_manager: savemanager.SaveManager) -> None:
"""Initialize the rest of the config after the QApplication is created."""
global _init_errors
if _init_errors is not None:
errbox = msgbox.msgbox(parent=None,
title="Error while reading config",
text=_init_errors.to_html(),
icon=QMessageBox.Warning,
plain_text=False)
errbox.exec_()
if _init_errors.fatal:
sys.exit(usertypes.Exit.err_init)
_init_errors = None
configtypes.FontBase.set_defaults(config.val.fonts.default_family,
config.val.fonts.default_size)
config.instance.changed.connect(_update_font_defaults)
config.instance.init_save_manager(save_manager)
configfiles.state.init_save_manager(save_manager)
|
import vcr
import pytest
from urllib.request import urlopen
DEFAULT_URI = "http://httpbin.org/get?p1=q1&p2=q2" # base uri for testing
def _replace_httpbin(uri, httpbin, httpbin_secure):
return uri.replace("http://httpbin.org", httpbin.url).replace("https://httpbin.org", httpbin_secure.url)
@pytest.fixture
def cassette(tmpdir, httpbin, httpbin_secure):
"""
Helper fixture used to prepare the cassette
returns path to the recorded cassette
"""
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
cassette_path = str(tmpdir.join("test.yml"))
with vcr.use_cassette(cassette_path, record_mode=vcr.mode.ALL):
urlopen(default_uri)
return cassette_path
@pytest.mark.parametrize(
"matcher, matching_uri, not_matching_uri",
[
("uri", "http://httpbin.org/get?p1=q1&p2=q2", "http://httpbin.org/get?p2=q2&p1=q1"),
("scheme", "http://google.com/post?a=b", "https://httpbin.org/get?p1=q1&p2=q2"),
("host", "https://httpbin.org/post?a=b", "http://google.com/get?p1=q1&p2=q2"),
("path", "https://google.com/get?a=b", "http://httpbin.org/post?p1=q1&p2=q2"),
("query", "https://google.com/get?p2=q2&p1=q1", "http://httpbin.org/get?p1=q1&a=b"),
],
)
def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_matching_uri):
matching_uri = _replace_httpbin(matching_uri, httpbin, httpbin_secure)
not_matching_uri = _replace_httpbin(not_matching_uri, httpbin, httpbin_secure)
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
# play cassette with default uri
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
urlopen(default_uri)
assert cass.play_count == 1
# play cassette with matching on uri
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
urlopen(matching_uri)
assert cass.play_count == 1
# play cassette with not matching on uri, it should fail
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
urlopen(not_matching_uri)
def test_method_matcher(cassette, httpbin, httpbin_secure):
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
# play cassette with matching on method
with vcr.use_cassette(cassette, match_on=["method"]) as cass:
urlopen("https://google.com/get?a=b")
assert cass.play_count == 1
# should fail if method does not match
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette, match_on=["method"]) as cass:
# is a POST request
urlopen(default_uri, data=b"")
@pytest.mark.parametrize(
"uri", [DEFAULT_URI, "http://httpbin.org/get?p2=q2&p1=q1", "http://httpbin.org/get?p2=q2&p1=q1"]
)
def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
with vcr.use_cassette(cassette) as cass:
urlopen(uri)
assert cass.play_count == 1
@pytest.mark.parametrize(
"uri",
[
"https://httpbin.org/get?p1=q1&p2=q2",
"http://google.com/get?p1=q1&p2=q2",
"http://httpbin.org/post?p1=q1&p2=q2",
"http://httpbin.org/get?p1=q1&a=b",
],
)
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette):
urlopen(uri)
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette):
# is a POST request
urlopen(default_uri, data=b"")
|
from operator import itemgetter
from .base_classes import Command
from .package import Package
from .utils import NoEscape, escape_latex
# Translations for names used in the quantities package to ones used by SIunitx
UNIT_NAME_TRANSLATIONS = {
'Celsius': 'celsius',
'revolutions_per_minute': 'rpm',
'v': 'volt',
}
def _dimensionality_to_siunitx(dim):
import quantities as pq
string = ''
items = dim.items()
for unit, power in sorted(items, key=itemgetter(1), reverse=True):
if power < 0:
substring = r'\per'
power = -power
elif power == 0:
continue
else:
substring = ''
prefixes = [x for x in dir(pq.prefixes) if not x.startswith('_')]
for prefix in prefixes:
# Split unitname into prefix and actual name if possible
if unit.name.startswith(prefix):
substring += '\\' + prefix
name = unit.name[len(prefix)]
break
else:
# Otherwise simply use the full name
name = unit.name
try:
# Check if the name is different in SIunitx
name = UNIT_NAME_TRANSLATIONS[name]
except KeyError:
pass
substring += '\\' + name
if power > 1:
substring += r'\tothe{' + str(power) + '}'
string += substring
return NoEscape(string)
class Quantity(Command):
"""A class representing quantities."""
packages = [
Package('siunitx', options=[NoEscape('separate-uncertainty=true')]),
NoEscape('\\DeclareSIUnit\\rpm{rpm}')
]
def __init__(self, quantity, *, options=None, format_cb=None):
r"""
Args
----
quantity: `quantities.quantity.Quantity`
The quantity that should be displayed
options: None, str, list or `~.Options`
Options of the command. These are placed in front of the arguments.
format_cb: callable
A function which formats the number in the quantity. By default
this uses `numpy.array_str`.
Examples
--------
>>> import quantities as pq
>>> speed = 3.14159265 * pq.meter / pq.second
>>> Quantity(speed, options={'round-precision': 3,
... 'round-mode': 'figures'}).dumps()
'\\SI[round-mode=figures,round-precision=3]{3.14159265}{\meter\per\second}'
Uncertainties are also handled:
>>> length = pq.UncertainQuantity(16.0, pq.meter, 0.3)
>>> width = pq.UncertainQuantity(16.0, pq.meter, 0.4)
>>> Quantity(length*width).dumps()
'\\SI{256.0 +- 0.5}{\meter\tothe{2}}
Ordinary numbers are also supported:
>>> Avogadro_constant = 6.022140857e23
>>> Quantity(Avogadro_constant, options={'round-precision': 3}).dumps()
'\\num[round-precision=3]{6.022e23}'
"""
import numpy as np
import quantities as pq
self.quantity = quantity
self._format_cb = format_cb
def _format(val):
if format_cb is None:
try:
return np.array_str(val)
except AttributeError:
return escape_latex(val) # Python float and int
else:
return format_cb(val)
if isinstance(quantity, pq.UncertainQuantity):
magnitude_str = '{} +- {}'.format(
_format(quantity.magnitude),
_format(quantity.uncertainty.magnitude))
elif isinstance(quantity, pq.Quantity):
magnitude_str = _format(quantity.magnitude)
if isinstance(quantity, (pq.UncertainQuantity, pq.Quantity)):
unit_str = _dimensionality_to_siunitx(quantity.dimensionality)
super().__init__(command='SI', arguments=(magnitude_str, unit_str),
options=options)
else:
super().__init__(command='num', arguments=_format(quantity),
options=options)
self.arguments._escape = False # dash in e.g. \num{3 +- 2}
if self.options is not None:
self.options._escape = False # siunitx uses dashes in kwargs
|
import asyncio
from collections import OrderedDict
from datetime import timedelta
import hmac
from logging import getLogger
from typing import Any, Dict, List, Optional
from homeassistant.auth.const import ACCESS_TOKEN_EXPIRATION
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import dt as dt_util
from . import models
from .const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY, GROUP_ID_USER
from .permissions import PermissionLookup, system_policies
from .permissions.types import PolicyType
STORAGE_VERSION = 1
STORAGE_KEY = "auth"
GROUP_NAME_ADMIN = "Administrators"
GROUP_NAME_USER = "Users"
GROUP_NAME_READ_ONLY = "Read Only"
class AuthStore:
"""Stores authentication info.
Any mutation to an object should happen inside the auth store.
The auth store is lazy. It won't load the data from disk until a method is
called that needs it.
"""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the auth store."""
self.hass = hass
self._users: Optional[Dict[str, models.User]] = None
self._groups: Optional[Dict[str, models.Group]] = None
self._perm_lookup: Optional[PermissionLookup] = None
self._store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._lock = asyncio.Lock()
async def async_get_groups(self) -> List[models.Group]:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return list(self._groups.values())
async def async_get_group(self, group_id: str) -> Optional[models.Group]:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return self._groups.get(group_id)
async def async_get_users(self) -> List[models.User]:
"""Retrieve all users."""
if self._users is None:
await self._async_load()
assert self._users is not None
return list(self._users.values())
async def async_get_user(self, user_id: str) -> Optional[models.User]:
"""Retrieve a user by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
return self._users.get(user_id)
async def async_create_user(
self,
name: Optional[str],
is_owner: Optional[bool] = None,
is_active: Optional[bool] = None,
system_generated: Optional[bool] = None,
credentials: Optional[models.Credentials] = None,
group_ids: Optional[List[str]] = None,
) -> models.User:
"""Create a new user."""
if self._users is None:
await self._async_load()
assert self._users is not None
assert self._groups is not None
groups = []
for group_id in group_ids or []:
group = self._groups.get(group_id)
if group is None:
raise ValueError(f"Invalid group specified {group_id}")
groups.append(group)
kwargs: Dict[str, Any] = {
"name": name,
# Until we get group management, we just put everyone in the
# same group.
"groups": groups,
"perm_lookup": self._perm_lookup,
}
if is_owner is not None:
kwargs["is_owner"] = is_owner
if is_active is not None:
kwargs["is_active"] = is_active
if system_generated is not None:
kwargs["system_generated"] = system_generated
new_user = models.User(**kwargs)
self._users[new_user.id] = new_user
if credentials is None:
self._async_schedule_save()
return new_user
# Saving is done inside the link.
await self.async_link_user(new_user, credentials)
return new_user
async def async_link_user(
self, user: models.User, credentials: models.Credentials
) -> None:
"""Add credentials to an existing user."""
user.credentials.append(credentials)
self._async_schedule_save()
credentials.is_new = False
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
if self._users is None:
await self._async_load()
assert self._users is not None
self._users.pop(user.id)
self._async_schedule_save()
async def async_update_user(
self,
user: models.User,
name: Optional[str] = None,
is_active: Optional[bool] = None,
group_ids: Optional[List[str]] = None,
) -> None:
"""Update a user."""
assert self._groups is not None
if group_ids is not None:
groups = []
for grid in group_ids:
group = self._groups.get(grid)
if group is None:
raise ValueError("Invalid group specified.")
groups.append(group)
user.groups = groups
user.invalidate_permission_cache()
for attr_name, value in (("name", name), ("is_active", is_active)):
if value is not None:
setattr(user, attr_name, value)
self._async_schedule_save()
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
user.is_active = True
self._async_schedule_save()
async def async_deactivate_user(self, user: models.User) -> None:
"""Activate a user."""
user.is_active = False
self._async_schedule_save()
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
"""Remove credentials."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
found = None
for index, cred in enumerate(user.credentials):
if cred is credentials:
found = index
break
if found is not None:
user.credentials.pop(found)
break
self._async_schedule_save()
async def async_create_refresh_token(
self,
user: models.User,
client_id: Optional[str] = None,
client_name: Optional[str] = None,
client_icon: Optional[str] = None,
token_type: str = models.TOKEN_TYPE_NORMAL,
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
) -> models.RefreshToken:
"""Create a new token for a user."""
kwargs: Dict[str, Any] = {
"user": user,
"client_id": client_id,
"token_type": token_type,
"access_token_expiration": access_token_expiration,
}
if client_name:
kwargs["client_name"] = client_name
if client_icon:
kwargs["client_icon"] = client_icon
refresh_token = models.RefreshToken(**kwargs)
user.refresh_tokens[refresh_token.id] = refresh_token
self._async_schedule_save()
return refresh_token
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Remove a refresh token."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
if user.refresh_tokens.pop(refresh_token.id, None):
self._async_schedule_save()
break
async def async_get_refresh_token(
self, token_id: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
refresh_token = user.refresh_tokens.get(token_id)
if refresh_token is not None:
return refresh_token
return None
async def async_get_refresh_token_by_token(
self, token: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by token."""
if self._users is None:
await self._async_load()
assert self._users is not None
found = None
for user in self._users.values():
for refresh_token in user.refresh_tokens.values():
if hmac.compare_digest(refresh_token.token, token):
found = refresh_token
return found
@callback
def async_log_refresh_token_usage(
self, refresh_token: models.RefreshToken, remote_ip: Optional[str] = None
) -> None:
"""Update refresh token last used information."""
refresh_token.last_used_at = dt_util.utcnow()
refresh_token.last_used_ip = remote_ip
self._async_schedule_save()
async def _async_load(self) -> None:
"""Load the users."""
async with self._lock:
if self._users is not None:
return
await self._async_load_task()
async def _async_load_task(self) -> None:
"""Load the users."""
[ent_reg, dev_reg, data] = await asyncio.gather(
self.hass.helpers.entity_registry.async_get_registry(),
self.hass.helpers.device_registry.async_get_registry(),
self._store.async_load(),
)
# Make sure that we're not overriding data if 2 loads happened at the
# same time
if self._users is not None:
return
self._perm_lookup = perm_lookup = PermissionLookup(ent_reg, dev_reg)
if data is None:
self._set_defaults()
return
users: Dict[str, models.User] = OrderedDict()
groups: Dict[str, models.Group] = OrderedDict()
# Soft-migrating data as we load. We are going to make sure we have a
# read only group and an admin group. There are two states that we can
# migrate from:
# 1. Data from a recent version which has a single group without policy
# 2. Data from old version which has no groups
has_admin_group = False
has_user_group = False
has_read_only_group = False
group_without_policy = None
# When creating objects we mention each attribute explicitly. This
# prevents crashing if user rolls back HA version after a new property
# was added.
for group_dict in data.get("groups", []):
policy: Optional[PolicyType] = None
if group_dict["id"] == GROUP_ID_ADMIN:
has_admin_group = True
name = GROUP_NAME_ADMIN
policy = system_policies.ADMIN_POLICY
system_generated = True
elif group_dict["id"] == GROUP_ID_USER:
has_user_group = True
name = GROUP_NAME_USER
policy = system_policies.USER_POLICY
system_generated = True
elif group_dict["id"] == GROUP_ID_READ_ONLY:
has_read_only_group = True
name = GROUP_NAME_READ_ONLY
policy = system_policies.READ_ONLY_POLICY
system_generated = True
else:
name = group_dict["name"]
policy = group_dict.get("policy")
system_generated = False
# We don't want groups without a policy that are not system groups
# This is part of migrating from state 1
if policy is None:
group_without_policy = group_dict["id"]
continue
groups[group_dict["id"]] = models.Group(
id=group_dict["id"],
name=name,
policy=policy,
system_generated=system_generated,
)
# If there are no groups, add all existing users to the admin group.
# This is part of migrating from state 2
migrate_users_to_admin_group = not groups and group_without_policy is None
# If we find a no_policy_group, we need to migrate all users to the
# admin group. We only do this if there are no other groups, as is
# the expected state. If not expected state, not marking people admin.
# This is part of migrating from state 1
if groups and group_without_policy is not None:
group_without_policy = None
# This is part of migrating from state 1 and 2
if not has_admin_group:
admin_group = _system_admin_group()
groups[admin_group.id] = admin_group
# This is part of migrating from state 1 and 2
if not has_read_only_group:
read_only_group = _system_read_only_group()
groups[read_only_group.id] = read_only_group
if not has_user_group:
user_group = _system_user_group()
groups[user_group.id] = user_group
for user_dict in data["users"]:
# Collect the users group.
user_groups = []
for group_id in user_dict.get("group_ids", []):
# This is part of migrating from state 1
if group_id == group_without_policy:
group_id = GROUP_ID_ADMIN
user_groups.append(groups[group_id])
# This is part of migrating from state 2
if not user_dict["system_generated"] and migrate_users_to_admin_group:
user_groups.append(groups[GROUP_ID_ADMIN])
users[user_dict["id"]] = models.User(
name=user_dict["name"],
groups=user_groups,
id=user_dict["id"],
is_owner=user_dict["is_owner"],
is_active=user_dict["is_active"],
system_generated=user_dict["system_generated"],
perm_lookup=perm_lookup,
)
for cred_dict in data["credentials"]:
users[cred_dict["user_id"]].credentials.append(
models.Credentials(
id=cred_dict["id"],
is_new=False,
auth_provider_type=cred_dict["auth_provider_type"],
auth_provider_id=cred_dict["auth_provider_id"],
data=cred_dict["data"],
)
)
for rt_dict in data["refresh_tokens"]:
# Filter out the old keys that don't have jwt_key (pre-0.76)
if "jwt_key" not in rt_dict:
continue
created_at = dt_util.parse_datetime(rt_dict["created_at"])
if created_at is None:
getLogger(__name__).error(
"Ignoring refresh token %(id)s with invalid created_at "
"%(created_at)s for user_id %(user_id)s",
rt_dict,
)
continue
token_type = rt_dict.get("token_type")
if token_type is None:
if rt_dict["client_id"] is None:
token_type = models.TOKEN_TYPE_SYSTEM
else:
token_type = models.TOKEN_TYPE_NORMAL
# old refresh_token don't have last_used_at (pre-0.78)
last_used_at_str = rt_dict.get("last_used_at")
if last_used_at_str:
last_used_at = dt_util.parse_datetime(last_used_at_str)
else:
last_used_at = None
token = models.RefreshToken(
id=rt_dict["id"],
user=users[rt_dict["user_id"]],
client_id=rt_dict["client_id"],
# use dict.get to keep backward compatibility
client_name=rt_dict.get("client_name"),
client_icon=rt_dict.get("client_icon"),
token_type=token_type,
created_at=created_at,
access_token_expiration=timedelta(
seconds=rt_dict["access_token_expiration"]
),
token=rt_dict["token"],
jwt_key=rt_dict["jwt_key"],
last_used_at=last_used_at,
last_used_ip=rt_dict.get("last_used_ip"),
)
users[rt_dict["user_id"]].refresh_tokens[token.id] = token
self._groups = groups
self._users = users
@callback
def _async_schedule_save(self) -> None:
"""Save users."""
if self._users is None:
return
self._store.async_delay_save(self._data_to_save, 1)
@callback
def _data_to_save(self) -> Dict:
"""Return the data to store."""
assert self._users is not None
assert self._groups is not None
users = [
{
"id": user.id,
"group_ids": [group.id for group in user.groups],
"is_owner": user.is_owner,
"is_active": user.is_active,
"name": user.name,
"system_generated": user.system_generated,
}
for user in self._users.values()
]
groups = []
for group in self._groups.values():
g_dict: Dict[str, Any] = {
"id": group.id,
# Name not read for sys groups. Kept here for backwards compat
"name": group.name,
}
if not group.system_generated:
g_dict["policy"] = group.policy
groups.append(g_dict)
credentials = [
{
"id": credential.id,
"user_id": user.id,
"auth_provider_type": credential.auth_provider_type,
"auth_provider_id": credential.auth_provider_id,
"data": credential.data,
}
for user in self._users.values()
for credential in user.credentials
]
refresh_tokens = [
{
"id": refresh_token.id,
"user_id": user.id,
"client_id": refresh_token.client_id,
"client_name": refresh_token.client_name,
"client_icon": refresh_token.client_icon,
"token_type": refresh_token.token_type,
"created_at": refresh_token.created_at.isoformat(),
"access_token_expiration": refresh_token.access_token_expiration.total_seconds(),
"token": refresh_token.token,
"jwt_key": refresh_token.jwt_key,
"last_used_at": refresh_token.last_used_at.isoformat()
if refresh_token.last_used_at
else None,
"last_used_ip": refresh_token.last_used_ip,
}
for user in self._users.values()
for refresh_token in user.refresh_tokens.values()
]
return {
"users": users,
"groups": groups,
"credentials": credentials,
"refresh_tokens": refresh_tokens,
}
def _set_defaults(self) -> None:
"""Set default values for auth store."""
self._users = OrderedDict()
groups: Dict[str, models.Group] = OrderedDict()
admin_group = _system_admin_group()
groups[admin_group.id] = admin_group
user_group = _system_user_group()
groups[user_group.id] = user_group
read_only_group = _system_read_only_group()
groups[read_only_group.id] = read_only_group
self._groups = groups
def _system_admin_group() -> models.Group:
"""Create system admin group."""
return models.Group(
name=GROUP_NAME_ADMIN,
id=GROUP_ID_ADMIN,
policy=system_policies.ADMIN_POLICY,
system_generated=True,
)
def _system_user_group() -> models.Group:
"""Create system user group."""
return models.Group(
name=GROUP_NAME_USER,
id=GROUP_ID_USER,
policy=system_policies.USER_POLICY,
system_generated=True,
)
def _system_read_only_group() -> models.Group:
"""Create read only group."""
return models.Group(
name=GROUP_NAME_READ_ONLY,
id=GROUP_ID_READ_ONLY,
policy=system_policies.READ_ONLY_POLICY,
system_generated=True,
)
|
import os
from os import path as op
import numpy as np
from numpy.testing import assert_allclose
from mne import read_evokeds
from mne.datasets import testing
from mne.minimum_norm import read_inverse_operator, estimate_snr
from mne.utils import _TempDir, requires_mne, run_subprocess
s_path = op.join(testing.data_path(download=False), 'MEG', 'sample')
fname_inv = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_evoked = op.join(s_path, 'sample_audvis-ave.fif')
@testing.requires_testing_data
@requires_mne
def test_snr():
"""Test SNR calculation."""
tempdir = _TempDir()
inv = read_inverse_operator(fname_inv)
evoked = read_evokeds(fname_evoked, baseline=(None, 0))[0]
snr = estimate_snr(evoked, inv)[0]
orig_dir = os.getcwd()
os.chdir(tempdir)
try:
cmd = ['mne_compute_mne', '--inv', fname_inv, '--meas', fname_evoked,
'--snronly', '--bmin', '-200', '--bmax', '0']
run_subprocess(cmd)
except Exception:
pass # this returns 1 for some reason
finally:
os.chdir(orig_dir)
times, snr_c, _ = np.loadtxt(op.join(tempdir, 'SNR')).T
assert_allclose(times / 1000., evoked.times, atol=1e-2)
assert_allclose(snr, snr_c, atol=1e-2, rtol=1e-2)
|
from __future__ import unicode_literals
import string
import itertools
from lib.data.data import pystrs, pyoptions
from lib.fun.fun import finishprinter, countchecker, range_compatible, finalsavepath, fun_name
# get the dictionary list
def getchars(type):
flag = str(type)
chars = []
if type in pystrs.base_dic_type:
if flag == pystrs.base_dic_type[0]:
chars = string.digits
elif flag == pystrs.base_dic_type[1]:
chars = string.ascii_lowercase
elif flag == pystrs.base_dic_type[2]:
chars = string.ascii_uppercase
elif flag == pystrs.base_dic_type[3]:
chars = string.printable[:36]
elif flag == pystrs.base_dic_type[4]:
chars = string.digits + string.ascii_uppercase
elif flag == pystrs.base_dic_type[5]:
chars = string.ascii_letters
elif flag == pystrs.base_dic_type[6]:
chars = string.printable[:62]
return chars
def get_base_dic(objflag):
storepath = finalsavepath(fun_name())
objflag = getchars(objflag)
countchecker(len(objflag), pyoptions.minlen, pyoptions.maxlen)
with open(storepath, "a") as f:
for i in range_compatible(pyoptions.minlen, pyoptions.maxlen+1):
for item in itertools.product(objflag, repeat=i):
f.write(pyoptions.operator.get(pyoptions.encode)(pyoptions.head + "".join(item) + pyoptions.tail) +
pyoptions.CRLF)
finishprinter(storepath)
|
from pytest import mark
from homeassistant.bootstrap import _async_set_up_integrations
import homeassistant.components.ptvsd as ptvsd_component
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock, patch
@mark.skip("causes code cover to fail")
async def test_ptvsd(hass):
"""Test loading ptvsd component."""
with patch("ptvsd.enable_attach") as attach:
with patch("ptvsd.wait_for_attach") as wait:
assert await async_setup_component(
hass, ptvsd_component.DOMAIN, {ptvsd_component.DOMAIN: {}}
)
attach.assert_called_once_with(("0.0.0.0", 5678))
assert wait.call_count == 0
@mark.skip("causes code cover to fail")
async def test_ptvsd_wait(hass):
"""Test loading ptvsd component with wait."""
with patch("ptvsd.enable_attach") as attach:
with patch("ptvsd.wait_for_attach") as wait:
assert await async_setup_component(
hass,
ptvsd_component.DOMAIN,
{ptvsd_component.DOMAIN: {ptvsd_component.CONF_WAIT: True}},
)
attach.assert_called_once_with(("0.0.0.0", 5678))
assert wait.call_count == 1
async def test_ptvsd_bootstrap(hass):
"""Test loading ptvsd component with wait."""
config = {ptvsd_component.DOMAIN: {ptvsd_component.CONF_WAIT: True}}
with patch("homeassistant.components.ptvsd.async_setup", AsyncMock()) as setup_mock:
setup_mock.return_value = True
await _async_set_up_integrations(hass, config)
assert setup_mock.call_count == 1
|
import logging
import pytest
from PyQt5.QtCore import QTimer
from PyQt5.QtWidgets import QMessageBox
from qutebrowser.utils import error, utils
from qutebrowser.misc import ipc
class Error(Exception):
pass
@pytest.mark.parametrize('exc, name, exc_text', [
# "builtins." stripped
(ValueError('exception'), 'ValueError', 'exception'),
(ValueError, 'ValueError', 'none'),
# "qutebrowser." stripped
(ipc.Error, 'misc.ipc.Error', 'none'),
(Error, 'test_error.Error', 'none'),
])
def test_no_err_windows(caplog, exc, name, exc_text):
"""Test handle_fatal_exc with no_err_windows = True."""
try:
raise exc
except Exception as e:
with caplog.at_level(logging.ERROR):
error.handle_fatal_exc(e, 'title', pre_text='pre',
post_text='post', no_err_windows=True)
expected = [
'Handling fatal {} with --no-err-windows!'.format(name),
'',
'title: title',
'pre_text: pre',
'post_text: post',
'exception text: {}'.format(exc_text),
]
assert caplog.messages == ['\n'.join(expected)]
# This happens on Xvfb for some reason
# See https://github.com/qutebrowser/qutebrowser/issues/984
@pytest.mark.qt_log_ignore(r'^QXcbConnection: XCB error: 8 \(BadMatch\), '
r'sequence: \d+, resource id: \d+, major code: 42 '
r'\(SetInputFocus\), minor code: 0$',
r'^QIODevice::write: device not open')
@pytest.mark.parametrize('pre_text, post_text, expected', [
('', '', 'exception'),
('foo', '', 'foo: exception'),
('foo', 'bar', 'foo: exception\n\nbar'),
('', 'bar', 'exception\n\nbar'),
], ids=repr)
def test_err_windows(qtbot, qapp, pre_text, post_text, expected, caplog):
def err_window_check():
w = qapp.activeModalWidget()
assert w is not None
try:
qtbot.add_widget(w)
if not utils.is_mac:
assert w.windowTitle() == 'title'
assert w.icon() == QMessageBox.Critical
assert w.standardButtons() == QMessageBox.Ok
assert w.text() == expected
finally:
w.close()
QTimer.singleShot(10, err_window_check)
with caplog.at_level(logging.ERROR):
error.handle_fatal_exc(ValueError("exception"), 'title',
pre_text=pre_text, post_text=post_text,
no_err_windows=False)
|
import pytest
import socket
from amqp import RecoverableConnectionError
from unittest.mock import Mock, patch
from case import ContextMock
from kombu import common
from kombu.common import (
Broadcast, maybe_declare,
send_reply, collect_replies,
declaration_cached, ignore_errors,
QoS, PREFETCH_COUNT_MAX, generate_oid
)
from t.mocks import MockPool
def test_generate_oid():
from uuid import NAMESPACE_OID
instance = Mock()
args = (1, 1001, 2001, id(instance))
ent = '%x-%x-%x-%x' % args
with patch('kombu.common.uuid3') as mock_uuid3, \
patch('kombu.common.uuid5') as mock_uuid5:
mock_uuid3.side_effect = ValueError
mock_uuid3.return_value = 'uuid3-6ba7b812-9dad-11d1-80b4'
mock_uuid5.return_value = 'uuid5-6ba7b812-9dad-11d1-80b4'
oid = generate_oid(1, 1001, 2001, instance)
mock_uuid5.assert_called_once_with(NAMESPACE_OID, ent)
assert oid == 'uuid5-6ba7b812-9dad-11d1-80b4'
def test_ignore_errors():
connection = Mock()
connection.channel_errors = (KeyError,)
connection.connection_errors = (KeyError,)
with ignore_errors(connection):
raise KeyError()
def raising():
raise KeyError()
ignore_errors(connection, raising)
connection.channel_errors = connection.connection_errors = ()
with pytest.raises(KeyError):
with ignore_errors(connection):
raise KeyError()
class test_declaration_cached:
def test_when_cached(self):
chan = Mock()
chan.connection.client.declared_entities = ['foo']
assert declaration_cached('foo', chan)
def test_when_not_cached(self):
chan = Mock()
chan.connection.client.declared_entities = ['bar']
assert not declaration_cached('foo', chan)
class test_Broadcast:
def test_arguments(self):
with patch('kombu.common.uuid',
return_value='test') as uuid_mock:
q = Broadcast(name='test_Broadcast')
uuid_mock.assert_called_with()
assert q.name == 'bcast.test'
assert q.alias == 'test_Broadcast'
assert q.auto_delete
assert q.exchange.name == 'test_Broadcast'
assert q.exchange.type == 'fanout'
q = Broadcast('test_Broadcast', 'explicit_queue_name')
assert q.name == 'explicit_queue_name'
assert q.exchange.name == 'test_Broadcast'
q2 = q(Mock())
assert q2.name == q.name
with patch('kombu.common.uuid',
return_value='test') as uuid_mock:
q = Broadcast('test_Broadcast',
'explicit_queue_name',
unique=True)
uuid_mock.assert_called_with()
assert q.name == 'explicit_queue_name.test'
q2 = q(Mock())
assert q2.name.split('.')[0] == q.name.split('.')[0]
class test_maybe_declare:
def _get_mock_channel(self):
# Given: A mock Channel with mock'd connection/client/entities
channel = Mock()
channel.connection.client.declared_entities = set()
return channel
def _get_mock_entity(self, is_bound=False, can_cache_declaration=True):
# Given: Unbound mock Entity (will bind to channel when bind called
entity = Mock()
entity.can_cache_declaration = can_cache_declaration
entity.is_bound = is_bound
def _bind_entity(channel):
entity.channel = channel
entity.is_bound = True
return entity
entity.bind = _bind_entity
return entity
def test_cacheable(self):
# Given: A mock Channel and mock entity
channel = self._get_mock_channel()
# Given: A mock Entity that is already bound
entity = self._get_mock_entity(
is_bound=True, can_cache_declaration=True)
entity.channel = channel
entity.auto_delete = False
assert entity.is_bound, "Expected entity is bound to begin this test."
# When: Calling maybe_declare default
maybe_declare(entity, channel)
# Then: It called declare on the entity queue and added it to list
assert entity.declare.call_count == 1
assert hash(entity) in channel.connection.client.declared_entities
# When: Calling maybe_declare default (again)
maybe_declare(entity, channel)
# Then: we did not call declare again because its already in our list
assert entity.declare.call_count == 1
# When: Entity channel connection has gone away
entity.channel.connection = None
# Then: maybe_declare must raise a RecoverableConnectionError
with pytest.raises(RecoverableConnectionError):
maybe_declare(entity)
def test_binds_entities(self):
# Given: A mock Channel and mock entity
channel = self._get_mock_channel()
# Given: A mock Entity that is not bound
entity = self._get_mock_entity()
assert not entity.is_bound, "Expected entity unbound to begin test."
# When: calling maybe_declare with default of no retry policy
maybe_declare(entity, channel)
# Then: the entity is now bound because it called to bind it
assert entity.is_bound is True, "Expected entity is now marked bound."
def test_binds_entities_when_retry_policy(self):
# Given: A mock Channel and mock entity
channel = self._get_mock_channel()
# Given: A mock Entity that is not bound
entity = self._get_mock_entity()
assert not entity.is_bound, "Expected entity unbound to begin test."
# Given: A retry policy
sample_retry_policy = {
'interval_start': 0,
'interval_max': 1,
'max_retries': 3,
'interval_step': 0.2,
'errback': lambda x: "Called test errback retry policy",
}
# When: calling maybe_declare with retry enabled
maybe_declare(entity, channel, retry=True, **sample_retry_policy)
# Then: the entity is now bound because it called to bind it
assert entity.is_bound is True, "Expected entity is now marked bound."
def test_with_retry(self):
# Given: A mock Channel and mock entity
channel = self._get_mock_channel()
# Given: A mock Entity that is already bound
entity = self._get_mock_entity(
is_bound=True, can_cache_declaration=True)
entity.channel = channel
assert entity.is_bound, "Expected entity is bound to begin this test."
# When calling maybe_declare with retry enabled (default policy)
maybe_declare(entity, channel, retry=True)
# Then: the connection client used ensure to ensure the retry policy
assert channel.connection.client.ensure.call_count
def test_with_retry_dropped_connection(self):
# Given: A mock Channel and mock entity
channel = self._get_mock_channel()
# Given: A mock Entity that is already bound
entity = self._get_mock_entity(
is_bound=True, can_cache_declaration=True)
entity.channel = channel
assert entity.is_bound, "Expected entity is bound to begin this test."
# When: Entity channel connection has gone away
entity.channel.connection = None
# When: calling maybe_declare with retry
# Then: the RecoverableConnectionError should be raised
with pytest.raises(RecoverableConnectionError):
maybe_declare(entity, channel, retry=True)
class test_replies:
def test_send_reply(self):
req = Mock()
req.content_type = 'application/json'
req.content_encoding = 'binary'
req.properties = {'reply_to': 'hello',
'correlation_id': 'world'}
channel = Mock()
exchange = Mock()
exchange.is_bound = True
exchange.channel = channel
producer = Mock()
producer.channel = channel
producer.channel.connection.client.declared_entities = set()
send_reply(exchange, req, {'hello': 'world'}, producer)
assert producer.publish.call_count
args = producer.publish.call_args
assert args[0][0] == {'hello': 'world'}
assert args[1] == {
'exchange': exchange,
'routing_key': 'hello',
'correlation_id': 'world',
'serializer': 'json',
'retry': False,
'retry_policy': None,
'content_encoding': 'binary',
}
@patch('kombu.common.itermessages')
def test_collect_replies_with_ack(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
body, message = Mock(), Mock()
itermessages.return_value = [(body, message)]
it = collect_replies(conn, channel, queue, no_ack=False)
m = next(it)
assert m is body
itermessages.assert_called_with(conn, channel, queue, no_ack=False)
message.ack.assert_called_with()
with pytest.raises(StopIteration):
next(it)
channel.after_reply_message_received.assert_called_with(queue.name)
@patch('kombu.common.itermessages')
def test_collect_replies_no_ack(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
body, message = Mock(), Mock()
itermessages.return_value = [(body, message)]
it = collect_replies(conn, channel, queue)
m = next(it)
assert m is body
itermessages.assert_called_with(conn, channel, queue, no_ack=True)
message.ack.assert_not_called()
@patch('kombu.common.itermessages')
def test_collect_replies_no_replies(self, itermessages):
conn, channel, queue = Mock(), Mock(), Mock()
itermessages.return_value = []
it = collect_replies(conn, channel, queue)
with pytest.raises(StopIteration):
next(it)
channel.after_reply_message_received.assert_not_called()
class test_insured:
@patch('kombu.common.logger')
def test_ensure_errback(self, logger):
common._ensure_errback('foo', 30)
logger.error.assert_called()
def test_revive_connection(self):
on_revive = Mock()
channel = Mock()
common.revive_connection(Mock(), channel, on_revive)
on_revive.assert_called_with(channel)
common.revive_connection(Mock(), channel, None)
def get_insured_mocks(self, insured_returns=('works', 'ignored')):
conn = ContextMock()
pool = MockPool(conn)
fun = Mock()
insured = conn.autoretry.return_value = Mock()
insured.return_value = insured_returns
return conn, pool, fun, insured
def test_insured(self):
conn, pool, fun, insured = self.get_insured_mocks()
ret = common.insured(pool, fun, (2, 2), {'foo': 'bar'})
assert ret == 'works'
conn.ensure_connection.assert_called_with(
errback=common._ensure_errback,
)
insured.assert_called()
i_args, i_kwargs = insured.call_args
assert i_args == (2, 2)
assert i_kwargs == {'foo': 'bar', 'connection': conn}
conn.autoretry.assert_called()
ar_args, ar_kwargs = conn.autoretry.call_args
assert ar_args == (fun, conn.default_channel)
assert ar_kwargs.get('on_revive')
assert ar_kwargs.get('errback')
def test_insured_custom_errback(self):
conn, pool, fun, insured = self.get_insured_mocks()
custom_errback = Mock()
common.insured(pool, fun, (2, 2), {'foo': 'bar'},
errback=custom_errback)
conn.ensure_connection.assert_called_with(errback=custom_errback)
class MockConsumer:
consumers = set()
def __init__(self, channel, queues=None, callbacks=None, **kwargs):
self.channel = channel
self.queues = queues
self.callbacks = callbacks
def __enter__(self):
self.consumers.add(self)
return self
def __exit__(self, *exc_info):
self.consumers.discard(self)
class test_itermessages:
class MockConnection:
should_raise_timeout = False
def drain_events(self, **kwargs):
if self.should_raise_timeout:
raise socket.timeout()
for consumer in MockConsumer.consumers:
for callback in consumer.callbacks:
callback('body', 'message')
def test_default(self):
conn = self.MockConnection()
channel = Mock()
channel.connection.client = conn
conn.Consumer = MockConsumer
it = common.itermessages(conn, channel, 'q', limit=1)
ret = next(it)
assert ret == ('body', 'message')
with pytest.raises(StopIteration):
next(it)
def test_when_raises_socket_timeout(self):
conn = self.MockConnection()
conn.should_raise_timeout = True
channel = Mock()
channel.connection.client = conn
conn.Consumer = MockConsumer
it = common.itermessages(conn, channel, 'q', limit=1)
with pytest.raises(StopIteration):
next(it)
@patch('kombu.common.deque')
def test_when_raises_IndexError(self, deque):
deque_instance = deque.return_value = Mock()
deque_instance.popleft.side_effect = IndexError()
conn = self.MockConnection()
channel = Mock()
conn.Consumer = MockConsumer
it = common.itermessages(conn, channel, 'q', limit=1)
with pytest.raises(StopIteration):
next(it)
class test_QoS:
class _QoS(QoS):
def __init__(self, value):
self.value = value
QoS.__init__(self, None, value)
def set(self, value):
return value
def test_qos_exceeds_16bit(self):
with patch('kombu.common.logger') as logger:
callback = Mock()
qos = QoS(callback, 10)
qos.prev = 100
# cannot use 2 ** 32 because of a bug on macOS Py2.5:
# https://jira.mongodb.org/browse/PYTHON-389
qos.set(4294967296)
logger.warning.assert_called()
callback.assert_called_with(prefetch_count=0)
def test_qos_increment_decrement(self):
qos = self._QoS(10)
assert qos.increment_eventually() == 11
assert qos.increment_eventually(3) == 14
assert qos.increment_eventually(-30) == 14
assert qos.decrement_eventually(7) == 7
assert qos.decrement_eventually() == 6
def test_qos_disabled_increment_decrement(self):
qos = self._QoS(0)
assert qos.increment_eventually() == 0
assert qos.increment_eventually(3) == 0
assert qos.increment_eventually(-30) == 0
assert qos.decrement_eventually(7) == 0
assert qos.decrement_eventually() == 0
assert qos.decrement_eventually(10) == 0
def test_qos_thread_safe(self):
qos = self._QoS(10)
def add():
for i in range(1000):
qos.increment_eventually()
def sub():
for i in range(1000):
qos.decrement_eventually()
def threaded(funs):
from threading import Thread
threads = [Thread(target=fun) for fun in funs]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
threaded([add, add])
assert qos.value == 2010
qos.value = 1000
threaded([add, sub]) # n = 2
assert qos.value == 1000
def test_exceeds_short(self):
qos = QoS(Mock(), PREFETCH_COUNT_MAX - 1)
qos.update()
assert qos.value == PREFETCH_COUNT_MAX - 1
qos.increment_eventually()
assert qos.value == PREFETCH_COUNT_MAX
qos.increment_eventually()
assert qos.value == PREFETCH_COUNT_MAX + 1
qos.decrement_eventually()
assert qos.value == PREFETCH_COUNT_MAX
qos.decrement_eventually()
assert qos.value == PREFETCH_COUNT_MAX - 1
def test_consumer_increment_decrement(self):
mconsumer = Mock()
qos = QoS(mconsumer.qos, 10)
qos.update()
assert qos.value == 10
mconsumer.qos.assert_called_with(prefetch_count=10)
qos.decrement_eventually()
qos.update()
assert qos.value == 9
mconsumer.qos.assert_called_with(prefetch_count=9)
qos.decrement_eventually()
assert qos.value == 8
mconsumer.qos.assert_called_with(prefetch_count=9)
assert {'prefetch_count': 9} in mconsumer.qos.call_args
# Does not decrement 0 value
qos.value = 0
qos.decrement_eventually()
assert qos.value == 0
qos.increment_eventually()
assert qos.value == 0
def test_consumer_decrement_eventually(self):
mconsumer = Mock()
qos = QoS(mconsumer.qos, 10)
qos.decrement_eventually()
assert qos.value == 9
qos.value = 0
qos.decrement_eventually()
assert qos.value == 0
def test_set(self):
mconsumer = Mock()
qos = QoS(mconsumer.qos, 10)
qos.set(12)
assert qos.prev == 12
qos.set(qos.prev)
|
from flexx.util.testing import raises, run_tests_if_main
import os
import sys
import tempfile
from flexx.util.config import Config
SAMPLE1 = """
foo = yes
bar = 3
spam = 2.3
eggs = bla bla
[other]
bar = 9
"""
SAMPLE2 = """
[testconfig]
foo = yes
bar = 4
spam = 3.3
eggs = bla bla bla
[other]
bar = 9
"""
SAMPLE3 = """
<bullocks
:: -=
"""
def test_config_name():
# Empty config
c = Config('aa')
assert len(c) == 0
# ok
c = Config('AA')
with raises(TypeError):
Config()
with raises(ValueError):
Config(3)
with raises(ValueError):
Config('0aa')
with raises(ValueError):
Config('_aa')
def test_defaults():
c = Config('testconfig',
x01=(3, int, 'an int'),
x02=(3, float, 'a float'),
x03=('yes', bool, 'a bool'),
x04=((1,2,3), str, 'A list of ints, as a string'),
x05=((1,2,3), (int, ), 'A list of ints, as a tuple'),
x06=((1,2,3), (str, ), 'A list of strings, as a tuple'),
)
# Test iteration
assert len(c) == 6
for name in c:
assert name in ('x01', 'x02', 'x03', 'x04', 'x05', 'x06')
assert set(dir(c)) == set([name for name in c])
# Test values
assert c.x01 == 3
assert c.x02 == 3.0
assert c.x03 == True
assert c.x04 == '(1, 2, 3)'
assert c.x05 == (1, 2, 3)
assert c.x06 == ('1', '2', '3')
# Test docstring (e.g. alphabetic order)
i1 = c.__doc__.find('x01')
i2 = c.__doc__.find('x02')
i3 = c.__doc__.find('x03')
i4 = c.__doc__.find('x04')
assert i1 > 0
assert i2 > i1
assert i3 > i2
assert i4 > i3
assert 'x01 (int): ' in c.__doc__
assert 'x04 (str): ' in c.__doc__
assert 'x05 (int-tuple): ' in c.__doc__
assert 'x06 (str-tuple): ' in c.__doc__
def test_option_spec_fail():
# ok
Config('aa', foo=(3, int, ''))
with raises(ValueError):
Config('aa', _foo=(3, int, ''))
for spec in [(), # too short
(3, int), # still too short
(3, int, 'docs', None), # too long
(3, None, 'docs'), # type is not a type
('', set, 'docs'), # type is not supported
('3,3', [], 'docs'), # tuple type needs one element
('3,3', [int, int], 'docs'), # not two
('3,3', [set], 'docs'), # and must be supported
]:
with raises(ValueError):
Config('aa', foo=spec)
def test_read_file():
# Prepare config files
filename1 = os.path.join(tempfile.gettempdir(), 'flexx_config_test1.cfg')
with open(filename1, 'wb') as f:
f.write(SAMPLE1.encode())
filename2 = os.path.join(tempfile.gettempdir(), 'flexx_config_test2.cfg')
with open(filename2, 'wb') as f:
f.write(SAMPLE2.encode())
filename3 = os.path.join(tempfile.gettempdir(), 'flexx_config_test3.cfg')
with open(filename3, 'wb') as f:
f.write(SAMPLE3.encode())
filename4 = os.path.join(tempfile.gettempdir(), 'flexx_config_test4.cfg')
with open(filename4, 'wb') as f:
f.write(b'\x00\xff')
# Config without sources
c = Config('testconfig',
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == False
assert c.bar == 1
# Config with filename, implicit section
c = Config('testconfig', filename1,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == True
assert c.bar == 3
assert c.eggs == 'bla bla'
# Config with filename, explicit section
c = Config('testconfig', filename2,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == True
assert c.bar == 4
assert c.eggs == 'bla bla bla'
# Config with string, implicit section
c = Config('testconfig', SAMPLE1,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == True
assert c.bar == 3
assert c.eggs == 'bla bla'
# Config with string, explicit section
c = Config('testconfig', SAMPLE2,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == True
assert c.bar == 4
assert c.eggs == 'bla bla bla'
# Config with string, implicit section, different name
c = Config('aaaa', SAMPLE1,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == True
assert c.bar == 3
# Config with string, explicit section, different name (no section match)
c = Config('aaaa', SAMPLE2,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.foo == False
assert c.bar == 1
# Config with both, and filenames can be nonexistent
c = Config('testconfig', SAMPLE1, filename2, filename1+'.cfg',
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.bar == 4
#
c = Config('testconfig', filename2, filename1+'.cfg', SAMPLE1,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
assert c.bar == 3
# Config from invalid string is ignored (logged)
c = Config('testconfig', SAMPLE3, bar=(1, int, ''))
assert c.bar == 1
# Config from invalid file is ignored (logged)
c = Config('testconfig', filename3, bar=(1, int, ''))
assert c.bar == 1
# Config from invalid unidocde file is ignored (logged)
c = Config('testconfig', filename4, bar=(1, int, ''))
assert c.bar == 1
# Fails
with raises(ValueError):
c = Config('testconfig', [])
with raises(ValueError):
c = Config('testconfig', 3)
def test_read_file_later():
filename1 = os.path.join(tempfile.gettempdir(), 'flexx_config_test1.cfg')
with open(filename1, 'wb') as f:
f.write(SAMPLE1.encode())
filename2 = os.path.join(tempfile.gettempdir(), 'flexx_config_test2.cfg')
with open(filename2, 'wb') as f:
f.write(SAMPLE2.encode())
os.environ['TESTCONFIG_SPAM'] = '100'
c = Config('testconfig', filename1,
foo=(False, bool, ''), bar=(1, int, ''),
spam=(0.0, float, ''), eggs=('', str, ''))
del os.environ['TESTCONFIG_SPAM']
assert c.bar == 3 # from filename1
assert c.spam == 100
c.eggs = 'haha'
c.spam = 10
c.load_from_file(filename2)
assert c.bar == 4 # from filename2
assert c.eggs == 'haha' # from what we set - takes precedense
assert c.spam == 10 # from what we set - precedense over env var
def test_access():
c = Config('testconfig', foo=(1, int, ''), BAR=(1, int, ''))
assert len(c) == 2
c.foo = 3
c.BAR = 4
assert c['foo'] == 3
assert c['BAR'] == 4
c['foO'] = 30
c['BAr'] = 40
assert c['FOO'] == 30
assert c['bar'] == 40
with raises(AttributeError):
c.FOO
with raises(AttributeError):
c.bar
with raises(TypeError):
c[3]
with raises(IndexError):
c['optiondoesnotexist']
with raises(TypeError):
c[3] = ''
with raises(IndexError):
c['optiondoesnotexist'] = ''
def test_repr_and_str():
# Prepare file
filename1 = os.path.join(tempfile.gettempdir(), 'flexx_config_test1.cfg')
with open(filename1, 'wb') as f:
f.write(SAMPLE1.encode())
c = Config('aaa', foo=(False, bool, ''), bar=(1, int, ''))
r = repr(c)
summary = str(c)
summary1 = summary.splitlines()[0]
# Test repr
assert 'aaa' in r
assert r.startswith('<') and r.endswith('>')
assert '2' in r # shows how many options
# Test first line of summary
assert 'aaa' in summary1
assert '2' in summary1
assert 'default' in summary
assert not 'set' in summary
assert not 'string' in summary
# set some
c.bar = 2
summary = str(c)
summary1 = summary.splitlines()[0]
# Continue
assert 'default' in summary
assert 'set' in summary
assert summary.count('default') == 2 # once for each opt
assert summary.count('set') == 1 # once for one opt
# Again, now with a file
c = Config('aaa', filename1, foo=(False, bool, ''), bar=(1, int, ''))
summary = str(c)
summary1 = summary.splitlines()[0]
# Test first line of summary
assert 'aaa' in summary1
assert '2' in summary1
assert 'default' in summary
assert filename1 in summary
assert not 'set' in summary
assert not 'string' in summary
# Again, now with a string
c = Config('aaa', SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
summary = str(c)
summary1 = summary.splitlines()[0]
# Test first line of summary
assert 'aaa' in summary1
assert '2' in summary1
assert 'default' in summary
assert filename1 not in summary
assert not 'set' in summary
assert 'string' in summary
def test_set_from_cmdline():
old_argv = sys.argv
try:
sys.argv = '', '--aaa-bar=9'
c = Config('aaa', SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
assert c.bar == 9
sys.argv = '', '--aAa-bAr=9'
c = Config('aaa', SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
assert c.bar == 9 # case insensitive
sys.argv = '', '--aaa-bar', '9'
c = Config('aaa', SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
assert c.bar == 3 # need syntax using equals sign
sys.argv = '', '--bar', '9'
c = Config('aaa', SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
assert c.bar == 3 # neeed name prefix
sys.argv = '', '--aaa-foo=1,2,3'
c = Config('aaa', foo=([], [int], ''))
assert c.foo == (1, 2, 3)
finally:
sys.argv = old_argv
def test_set_from_env():
name = 'config_env_test'
os.environ[name.upper() + '_' + 'BAR'] = '8'
c = Config(name, SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
del os.environ[name.upper() + '_' + 'BAR']
assert c.bar == 8
os.environ[name + '-' + 'bar'] = '8'
c = Config(name, SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
del os.environ[name + '-' + 'bar']
assert c.bar == 3 # must be uppercase
os.environ[name.upper() + '-' + 'bar'] = '8'
c = Config(name, SAMPLE1, foo=(False, bool, ''), bar=(1, int, ''))
del os.environ[name.upper() + '-' + 'bar']
assert c.bar == 3 # should use underscore
def test_order():
filename1 = os.path.join(tempfile.gettempdir(), 'flexx_config_test1.cfg')
with open(filename1, 'wb') as f:
f.write(SAMPLE1.encode())
filename2 = os.path.join(tempfile.gettempdir(), 'flexx_config_test2.cfg')
with open(filename2, 'wb') as f:
f.write(SAMPLE2.encode())
old_argv = sys.argv
os.environ['TESTCONFIG_BAR'] = '5'
sys.argv = '', '--testconfig-bar=6'
try:
c = Config('testconfig', filename1, filename2,
bar=(2, int, ''))
finally:
del os.environ['TESTCONFIG_BAR']
sys.argv = old_argv
c.bar = 7
s = str(c)
indices1 = [s.index(' %i '%i) for i in [2, 3, 4, 5, 6, 7]]
indices2 = [s.rindex(' %i '%i) for i in [2, 3, 4, 5, 6, 7]]
indices3 = list(sorted(indices1))
assert indices1 == indices3
assert indices2 == indices3
def test_docstring():
c = Config('aaa', foo=(False, bool, ''), bar=(1, int, ''))
assert 'aaa' in c.__doc__
assert 'foo (bool)' in c.__doc__
assert 'bar (int)' in c.__doc__
def test_bool():
c = Config('testconfig', foo=(True, bool, ''), bar=(False, bool, ''))
assert c.foo == True
c.foo = True
assert c.foo == True
c.foo = False
assert c.foo == False
for name in 'yes on true Yes On TRUE 1'.split(' '):
c.foo = name
assert c.foo == True
for name in 'no off fAlse No Off FALSE 0'.split(' '):
c.foo = name
assert c.foo == False
for name in 'none ok bla asdasdasd cancel'.split(' '):
with raises(ValueError):
c.foo = name
for val in (1, 2, [2], None, 0, 0.0, 1.0, []):
with raises(ValueError):
c.foo = val
def test_int():
c = Config('testconfig', foo=(1, int, ''), bar=('1', int, ''))
assert c.foo == 1
assert c.bar == 1
c.foo = 12.1
assert c.foo == 12
c.foo = '7'
assert c.foo == 7
c.foo = '-23'
assert c.foo == -23
for val in ([], None, '1e2', '12.1', 'a'):
with raises(ValueError):
c.foo = val
def test_float():
c = Config('testconfig', foo=(1, float, ''), bar=('1', float, ''))
assert c.foo == 1.0
assert c.bar == 1.0
c.foo = 3
assert c.foo == 3.0
c.foo = -3.1
assert c.foo == -3.1
c.foo = '2e3'
assert c.foo == 2000.0
c.foo = '12.12'
assert c.foo == 12.12
for val in ([], None, 'a', '0a'):
with raises(ValueError):
c.foo = val
def test_str():
c = Config('testconfig', foo=(1, str, ''), bar=((1,2,3), str, ''))
assert c.foo == '1'
assert c.bar == '(1, 2, 3)'
c.foo = 3
assert c.foo == '3'
c.foo = 3.1
assert c.foo == '3.1'
c.foo = 'hello there, you!'
assert c.foo == 'hello there, you!'
c.foo = None
assert c.foo == 'None'
c.foo = False
assert c.foo == 'False'
c.foo = []
assert c.foo == '[]'
def test_tuple():
c = Config('testconfig', foo=('1,2', [int], ''), bar=((1,2,3), [str], ''))
assert c.foo == (1, 2)
assert c.bar == ('1', '2', '3')
c.foo = 1.2, 3.3, 5
assert c.foo == (1, 3, 5)
c.foo = '(7, 8, 9)'
assert c.foo == (7, 8, 9)
c.foo = '1, 2,-3,4'
assert c.foo == (1, 2, -3, 4)
c.foo = [1, '2']
assert c.foo == (1, 2)
for val in ([[]], [None], ['a'], ['0a'], ['1.2'], 3):
with raises(ValueError):
c.foo = val
c.bar = 'hello, there, you '
assert c.bar == ('hello', 'there', 'you')
c.bar = [1, '2']
assert c.bar == ('1', '2')
run_tests_if_main()
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from zookeeper import ZookeeperCollector
###############################################################################
class TestZookeeperCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ZookeeperCollector', {
})
self.collector = ZookeeperCollector(config, None)
def test_import(self):
self.assertTrue(ZookeeperCollector)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from homeassistant.components.google_assistant import error, report_state
from homeassistant.util.dt import utcnow
from . import BASIC_CONFIG
from tests.async_mock import AsyncMock, patch
from tests.common import async_fire_time_changed
async def test_report_state(hass, caplog, legacy_patchable_time):
"""Test report state works."""
hass.states.async_set("light.ceiling", "off")
hass.states.async_set("switch.ac", "on")
with patch.object(
BASIC_CONFIG, "async_report_state_all", AsyncMock()
) as mock_report, patch.object(report_state, "INITIAL_REPORT_DELAY", 0):
unsub = report_state.async_enable_report_state(hass, BASIC_CONFIG)
async_fire_time_changed(hass, utcnow())
await hass.async_block_till_done()
# Test that enabling report state does a report on all entities
assert len(mock_report.mock_calls) == 1
assert mock_report.mock_calls[0][1][0] == {
"devices": {
"states": {
"light.ceiling": {"on": False, "online": True},
"switch.ac": {"on": True, "online": True},
}
}
}
with patch.object(
BASIC_CONFIG, "async_report_state_all", AsyncMock()
) as mock_report:
hass.states.async_set("light.kitchen", "on")
await hass.async_block_till_done()
assert len(mock_report.mock_calls) == 1
assert mock_report.mock_calls[0][1][0] == {
"devices": {"states": {"light.kitchen": {"on": True, "online": True}}}
}
# Test that state changes that change something that Google doesn't care about
# do not trigger a state report.
with patch.object(
BASIC_CONFIG, "async_report_state_all", AsyncMock()
) as mock_report:
hass.states.async_set(
"light.kitchen", "on", {"irrelevant": "should_be_ignored"}
)
await hass.async_block_till_done()
assert len(mock_report.mock_calls) == 0
# Test that entities that we can't query don't report a state
with patch.object(
BASIC_CONFIG, "async_report_state_all", AsyncMock()
) as mock_report, patch(
"homeassistant.components.google_assistant.report_state.GoogleEntity.query_serialize",
side_effect=error.SmartHomeError("mock-error", "mock-msg"),
):
hass.states.async_set("light.kitchen", "off")
await hass.async_block_till_done()
assert "Not reporting state for light.kitchen: mock-error"
assert len(mock_report.mock_calls) == 0
unsub()
with patch.object(
BASIC_CONFIG, "async_report_state_all", AsyncMock()
) as mock_report:
hass.states.async_set("light.kitchen", "on")
await hass.async_block_till_done()
assert len(mock_report.mock_calls) == 0
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch, call
from diamond.collector import Collector
from redisstat import RedisCollector
##########################################################################
def run_only_if_redis_is_available(func):
"""Decorator for checking if python-redis is available.
Note: this test will be silently skipped if python-redis is missing.
"""
try:
import redis
except ImportError:
redis = None
pred = lambda: redis is not None
return run_only(func, pred)
class TestRedisCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('RedisCollector', {
'interval': '1',
'databases': 1,
})
self.collector = RedisCollector(config, None)
def test_import(self):
self.assertTrue(RedisCollector)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_real_data(self, publish_mock):
data_1 = {'pubsub_channels': 0,
'used_memory_peak_human': '700.71K',
'bgrewriteaof_in_progress': 0,
'connected_slaves': 0,
'uptime_in_days': 0,
'multiplexing_api': 'epoll',
'lru_clock': 954113,
'last_save_time': 1351718385,
'redis_version': '2.4.10',
'redis_git_sha1': 0,
'gcc_version': '4.4.6',
'connected_clients': 1,
'keyspace_misses': 0,
'used_memory': 726144,
'vm_enabled': 0,
'used_cpu_user_children': '0.00',
'used_memory_peak': 717528,
'role': 'master',
'total_commands_processed': 1,
'latest_fork_usec': 0,
'loading': 0,
'used_memory_rss': 7254016,
'total_connections_received': 1,
'pubsub_patterns': 0,
'aof_enabled': 0,
'used_cpu_sys': '0.02',
'used_memory_human': '709.12K',
'used_cpu_sys_children': '0.00',
'blocked_clients': 0,
'used_cpu_user': '0.00',
'client_biggest_input_buf': 0,
'arch_bits': 64,
'mem_fragmentation_ratio': '9.99',
'expired_keys': 0,
'evicted_keys': 0,
'bgsave_in_progress': 0,
'client_longest_output_list': 0,
'mem_allocator': 'jemalloc-2.2.5',
'process_id': 3020,
'uptime_in_seconds': 32,
'changes_since_last_save': 0,
'redis_git_dirty': 0,
'keyspace_hits': 0
}
data_2 = {'pubsub_channels': 1,
'used_memory_peak_human': '1700.71K',
'bgrewriteaof_in_progress': 4,
'connected_slaves': 2,
'master_last_io_seconds_ago': 7,
'uptime_in_days': 1,
'multiplexing_api': 'epoll',
'lru_clock': 5954113,
'last_save_time': 51351718385,
'redis_version': '2.4.10',
'redis_git_sha1': 0,
'gcc_version': '4.4.6',
'connected_clients': 100,
'keyspace_misses': 670,
'used_memory': 1726144,
'vm_enabled': 0,
'used_cpu_user_children': '2.00',
'used_memory_peak': 1717528,
'role': 'master',
'total_commands_processed': 19764,
'latest_fork_usec': 8,
'loading': 0,
'used_memory_rss': 17254016,
'total_connections_received': 18764,
'pubsub_patterns': 0,
'aof_enabled': 0,
'used_cpu_sys': '0.05',
'used_memory_human': '1709.12K',
'used_cpu_sys_children': '0.09',
'blocked_clients': 8,
'used_cpu_user': '0.09',
'client_biggest_input_buf': 40,
'arch_bits': 64,
'mem_fragmentation_ratio': '0.99',
'expired_keys': 0,
'evicted_keys': 0,
'bgsave_in_progress': 0,
'client_longest_output_list': 0,
'mem_allocator': 'jemalloc-2.2.5',
'process_id': 3020,
'uptime_in_seconds': 95732,
'changes_since_last_save': 759,
'redis_git_dirty': 0,
'keyspace_hits': 5700
}
patch_collector = patch.object(RedisCollector, '_get_info',
Mock(return_value=data_1))
patch_config = patch.object(RedisCollector, '_get_config',
Mock(return_value={'maxmemory': '2097152'}))
patch_time = patch('time.time', Mock(return_value=10))
patch_collector.start()
patch_config.start()
patch_time.start()
self.collector.collect()
patch_collector.stop()
patch_config.stop()
patch_time.stop()
self.assertPublishedMany(publish_mock, {})
patch_collector = patch.object(RedisCollector, '_get_info',
Mock(return_value=data_2))
patch_config = patch.object(RedisCollector, '_get_config',
Mock(return_value={'maxmemory': '2097152'}))
patch_time = patch('time.time', Mock(return_value=20))
patch_collector.start()
patch_config.start()
patch_time.start()
self.collector.collect()
patch_collector.stop()
patch_config.stop()
patch_time.stop()
metrics = {'6379.process.uptime': 95732,
'6379.replication.master': 1,
'6379.replication.master_sync_in_progress': 0,
'6379.pubsub.channels': 1,
'6379.slaves.connected': 2,
'6379.slaves.last_io': 7,
'6379.process.connections_received': 18764,
'6379.clients.longest_output_list': 0,
'6379.process.commands_processed': 19764,
'6379.last_save.changes_since': 759,
'6379.memory.external_view': 17254016,
'6379.memory.fragmentation_ratio': 0.99,
'6379.last_save.time': 51351718385,
'6379.clients.connected': 100,
'6379.clients.blocked': 8,
'6379.pubsub.patterns': 0,
'6379.cpu.parent.user': 0.09,
'6379.last_save.time_since': -51351718365,
'6379.memory.internal_view': 1726144,
'6379.cpu.parent.sys': 0.05,
'6379.keyspace.misses': 670,
'6379.keys.expired': 0,
'6379.keys.evicted': 0,
'6379.keyspace.hits': 5700,
'6379.memory.used_percent': 82.31,
}
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_hostport_or_instance_config(self, publish_mock):
testcases = {
'default': {
'config': {}, # test default settings
'calls': [call('6379', 'localhost', 6379, None, None)],
},
'host_set': {
'config': {'host': 'myhost'},
'calls': [call('6379', 'myhost', 6379, None, None)],
},
'port_set': {
'config': {'port': 5005},
'calls': [call('5005', 'localhost', 5005, None, None)],
},
'hostport_set': {
'config': {'host': 'megahost', 'port': 5005},
'calls': [call('5005', 'megahost', 5005, None, None)],
},
'portauth_set': {
'config': {'port': 5005, 'auth': 'pass'},
'calls': [call('5005', 'localhost', 5005, None, 'pass')],
},
'unix_socket_host_set': {
'config': {'host': 'unix:/var/run/redis/myhost.sock'},
'calls': [call('myhost', 'localhost', 6379,
'/var/run/redis/myhost.sock', None)],
},
'instance_1_host': {
'config': {'instances': ['nick@myhost']},
'calls': [call('nick', 'myhost', 6379, None, None)],
},
'unix_socket_instance_1_host': {
'config': {'instances': [
'nick@unix:/var/run/redis/myhost.sock'
]},
'calls': [call('nick', 'localhost', 6379,
'/var/run/redis/myhost.sock', None)],
},
'unix_socket_instance_1_hostauth': {
'config': {'instances': [
'nick@unix:/var/run/redis/myhost.sock:/pass'
]},
'calls': [call('nick', 'localhost', 6379,
'/var/run/redis/myhost.sock', 'pass')],
},
'instance_1_port': {
'config': {'instances': ['nick@:9191']},
'calls': [call('nick', 'localhost', 9191, None, None)],
},
'instance_1_hostport': {
'config': {'instances': ['nick@host1:8765']},
'calls': [call('nick', 'host1', 8765, None, None)],
},
'instance_2': {
'config': {'instances': [
'foo@hostX',
'bar@:1000/pass',
'unix:/var/run/redis/myhost.sock:1/pass'
]},
'calls': [
call('foo', 'hostX', 6379, None, None),
call('bar', 'localhost', 1000, None, 'pass'),
call('myhost', 'localhost', 6379,
'/var/run/redis/myhost.sock', 'pass'),
],
},
'old_and_new': {
'config': {
'host': 'myhost',
'port': 1234,
'instances': [
'foo@hostX',
'bar@:1000',
'hostonly',
':1234'
]
},
'calls': [
call('foo', 'hostX', 6379, None, None),
call('bar', 'localhost', 1000, None, None),
call('6379', 'hostonly', 6379, None, None),
call('1234', 'localhost', 1234, None, None),
],
},
}
for testname, data in testcases.items():
config = get_collector_config('RedisCollector', data['config'])
collector = RedisCollector(config, None)
mock = Mock(return_value={}, name=testname)
patch_c = patch.object(RedisCollector, 'collect_instance', mock)
patch_c.start()
collector.collect()
patch_c.stop()
expected_call_count = len(data['calls'])
self.assertEqual(mock.call_count, expected_call_count,
msg='[%s] mock.calls=%d != expected_calls=%d' %
(testname, mock.call_count, expected_call_count))
mock.assert_has_calls(data['calls'], any_order=True)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_process_config_with_instances(self, publish_mock):
config_data = {
'instances': [
'nick1@host1:1111',
'nick2@:2222',
'nick3@host3',
'nick4@host4:3333/@pass/word',
'bla'
]
}
expected_processed_config = {
'nick2': ('localhost', 2222, None, None),
'nick3': ('host3', 6379, None, None),
'nick1': ('host1', 1111, None, None),
'nick4': ('host4', 3333, None, '@pass/word'),
'6379': ('bla', 6379, None, None)
}
config = get_collector_config('RedisCollector', config_data)
collector = RedisCollector(config, None)
self.assertEqual(collector.instances, expected_processed_config)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_key_naming_when_using_instances(self, publish_mock):
config_data = {
'instances': [
'nick1@host1:1111',
'nick2@:2222',
'nick3@host3',
'nick4@host4:3333/@password',
'bla'
]
}
get_info_data = {
'role': 'slave',
'total_connections_received': 200,
'total_commands_processed': 100,
}
expected_calls = [
call('nick1.process.connections_received', 200, precision=0,
metric_type='GAUGE'),
call('nick1.process.commands_processed', 100, precision=0,
metric_type='GAUGE'),
call('nick1.replication.master', 0, precision=0,
metric_type='GAUGE'),
call('nick2.process.connections_received', 200, precision=0,
metric_type='GAUGE'),
call('nick2.process.commands_processed', 100, precision=0,
metric_type='GAUGE'),
call('nick2.replication.master', 0, precision=0,
metric_type='GAUGE'),
call('nick3.process.connections_received', 200, precision=0,
metric_type='GAUGE'),
call('nick3.process.commands_processed', 100, precision=0,
metric_type='GAUGE'),
call('nick3.replication.master', 0, precision=0,
metric_type='GAUGE'),
call('nick4.process.connections_received', 200, precision=0,
metric_type='GAUGE'),
call('nick4.process.commands_processed', 100, precision=0,
metric_type='GAUGE'),
call('nick4.replication.master', 0, precision=0,
metric_type='GAUGE'),
call('6379.process.connections_received', 200, precision=0,
metric_type='GAUGE'),
call('6379.process.commands_processed', 100, precision=0,
metric_type='GAUGE'),
call('6379.replication.master', 0, precision=0,
metric_type='GAUGE'),
]
config = get_collector_config('RedisCollector', config_data)
collector = RedisCollector(config, None)
patch_c = patch.object(RedisCollector, '_get_info',
Mock(return_value=get_info_data))
patch_c.start()
collector.collect()
patch_c.stop()
self.assertEqual(publish_mock.call_count, len(expected_calls))
publish_mock.assert_has_calls(expected_calls, any_order=True)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import copy
import voluptuous as vol
from homeassistant.helpers import config_validation as cv, intent, script, template
DOMAIN = "intent_script"
CONF_INTENTS = "intents"
CONF_SPEECH = "speech"
CONF_ACTION = "action"
CONF_CARD = "card"
CONF_TYPE = "type"
CONF_TITLE = "title"
CONF_CONTENT = "content"
CONF_TEXT = "text"
CONF_ASYNC_ACTION = "async_action"
DEFAULT_CONF_ASYNC_ACTION = False
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
cv.string: {
vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(
CONF_ASYNC_ACTION, default=DEFAULT_CONF_ASYNC_ACTION
): cv.boolean,
vol.Optional(CONF_CARD): {
vol.Optional(CONF_TYPE, default="simple"): cv.string,
vol.Required(CONF_TITLE): cv.template,
vol.Required(CONF_CONTENT): cv.template,
},
vol.Optional(CONF_SPEECH): {
vol.Optional(CONF_TYPE, default="plain"): cv.string,
vol.Required(CONF_TEXT): cv.template,
},
}
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Activate Alexa component."""
intents = copy.deepcopy(config[DOMAIN])
template.attach(hass, intents)
for intent_type, conf in intents.items():
if CONF_ACTION in conf:
conf[CONF_ACTION] = script.Script(
hass, conf[CONF_ACTION], f"Intent Script {intent_type}", DOMAIN
)
intent.async_register(hass, ScriptIntentHandler(intent_type, conf))
return True
class ScriptIntentHandler(intent.IntentHandler):
"""Respond to an intent with a script."""
def __init__(self, intent_type, config):
"""Initialize the script intent handler."""
self.intent_type = intent_type
self.config = config
async def async_handle(self, intent_obj):
"""Handle the intent."""
speech = self.config.get(CONF_SPEECH)
card = self.config.get(CONF_CARD)
action = self.config.get(CONF_ACTION)
is_async_action = self.config.get(CONF_ASYNC_ACTION)
slots = {key: value["value"] for key, value in intent_obj.slots.items()}
if action is not None:
if is_async_action:
intent_obj.hass.async_create_task(
action.async_run(slots, intent_obj.context)
)
else:
await action.async_run(slots, intent_obj.context)
response = intent_obj.create_response()
if speech is not None:
response.async_set_speech(
speech[CONF_TEXT].async_render(slots, parse_result=False),
speech[CONF_TYPE],
)
if card is not None:
response.async_set_card(
card[CONF_TITLE].async_render(slots, parse_result=False),
card[CONF_CONTENT].async_render(slots, parse_result=False),
card[CONF_TYPE],
)
return response
|
import ssl
from urllib.request import urlopen
from urllib.parse import urlencode
import pytest_httpbin.certs
# Internal imports
import vcr
from assertions import assert_cassette_has_one_response
def urlopen_with_cafile(*args, **kwargs):
context = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
context.check_hostname = False
kwargs["context"] = context
try:
return urlopen(*args, **kwargs)
except TypeError:
# python2/pypi don't let us override this
del kwargs["cafile"]
return urlopen(*args, **kwargs)
def test_response_code(httpbin_both, tmpdir):
"""Ensure we can read a response code from a fetch"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
code = urlopen_with_cafile(url).getcode()
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
assert code == urlopen_with_cafile(url).getcode()
def test_random_body(httpbin_both, tmpdir):
"""Ensure we can read the content, and that it's served from cache"""
url = httpbin_both.url + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
body = urlopen_with_cafile(url).read()
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
assert body == urlopen_with_cafile(url).read()
def test_response_headers(httpbin_both, tmpdir):
"""Ensure we can get information from the response"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
open1 = urlopen_with_cafile(url).info().items()
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
open2 = urlopen_with_cafile(url).info().items()
assert sorted(open1) == sorted(open2)
def test_effective_url(httpbin_both, tmpdir):
"""Ensure that the effective_url is captured"""
url = httpbin_both.url + "/redirect-to?url=/html"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
effective_url = urlopen_with_cafile(url).geturl()
assert effective_url == httpbin_both.url + "/html"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
assert effective_url == urlopen_with_cafile(url).geturl()
def test_multiple_requests(httpbin_both, tmpdir):
"""Ensure that we can cache multiple requests"""
urls = [httpbin_both.url, httpbin_both.url, httpbin_both.url + "/get", httpbin_both.url + "/bytes/1024"]
with vcr.use_cassette(str(tmpdir.join("multiple.yaml"))) as cass:
[urlopen_with_cafile(url) for url in urls]
assert len(cass) == len(urls)
def test_get_data(httpbin_both, tmpdir):
"""Ensure that it works with query data"""
data = urlencode({"some": 1, "data": "here"})
url = httpbin_both.url + "/get?" + data
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
res1 = urlopen_with_cafile(url).read()
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
res2 = urlopen_with_cafile(url).read()
assert res1 == res2
def test_post_data(httpbin_both, tmpdir):
"""Ensure that it works when posting data"""
data = urlencode({"some": 1, "data": "here"}).encode("utf-8")
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
res1 = urlopen_with_cafile(url, data).read()
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
res2 = urlopen_with_cafile(url, data).read()
assert len(cass) == 1
assert res1 == res2
assert_cassette_has_one_response(cass)
def test_post_unicode_data(httpbin_both, tmpdir):
"""Ensure that it works when posting unicode data"""
data = urlencode({"snowman": "☃".encode()}).encode("utf-8")
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
res1 = urlopen_with_cafile(url, data).read()
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
res2 = urlopen_with_cafile(url, data).read()
assert len(cass) == 1
assert res1 == res2
assert_cassette_has_one_response(cass)
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
"""Ensure that requests between schemes are treated separately"""
# First fetch a url under https, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
urlopen_with_cafile(httpbin_secure.url)
urlopen_with_cafile(httpbin.url)
assert len(cass) == 2
assert cass.play_count == 0
def test_decorator(httpbin_both, tmpdir):
"""Test the decorator version of VCR.py"""
url = httpbin_both.url
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
def inner1():
return urlopen_with_cafile(url).getcode()
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
def inner2():
return urlopen_with_cafile(url).getcode()
assert inner1() == inner2()
|
import collections
import contextlib
import logging
import iptc
log = logging.getLogger(__name__)
RULE_TARGET_SORT_ORDER = {
# all else defaults to '0'
"LOG": 1,
"REJECT": 2.0,
}
_RuleBase = collections.namedtuple(
"Rule", ("protocol", "src", "dst", "target", "matches", "target_parameters")
)
class Rule(_RuleBase):
"""Rule representation.
Working with iptc's rule classes directly doesn't work well, since rules
represent actual existing iptables rules, and changes are applied
immediately. They're also difficult to compare.
"""
def __new__(cls, *args, **kwargs):
result = _RuleBase.__new__(cls, *args, **kwargs)
result.validate()
return result
def _replace(self, **kwargs):
result = super()._replace(**kwargs)
result.validate()
return result
def validate(self):
if self.target == "REJECT":
assert any(
name == "reject-with" for name, _ in self.target_parameters
), "REJECT rules must specify reject-with"
assert tuple(sorted(self.matches)) == self.matches, "matches should be sorted"
for match_name, params in self.matches:
for param_name, param_value in params:
assert (
"_" not in param_name
), f"use dashes instead of underscores in {param_name}"
assert isinstance(
param_value, tuple
), f"value of {param_name} should be tuple"
assert (
tuple(sorted(self.target_parameters)) == self.target_parameters
), "target_parameters should be sorted"
for param_name, param_value in self.target_parameters:
assert (
"_" not in param_name
), f"use dashes instead of underscores in {param_name}"
assert isinstance(
param_value, tuple
), f"value of {param_name} should be tuple"
@classmethod
def from_iptc(cls, rule):
fields = {
"protocol": rule.protocol,
"src": rule.src,
"dst": rule.dst,
"target": rule.target.name,
"matches": (),
"target_parameters": (),
}
for param_name, param_value in sorted(rule.target.get_all_parameters().items()):
fields["target_parameters"] += ((param_name, tuple(param_value)),)
matches = []
for match in rule.matches:
matches.append(
(
match.name,
tuple(
(param, tuple(value))
for param, value in sorted(match.get_all_parameters().items())
),
)
)
# ensure that matches are sorted for consistency with matching
fields["matches"] = tuple(sorted(matches))
return cls(**fields)
def to_iptc(self):
rule = iptc.Rule()
rule.protocol = self.protocol
rule.src = self.src
rule.dst = self.dst
target = rule.create_target(self.target)
for param_name, param_value in self.target_parameters:
target.set_parameter(param_name, param_value)
for name, params in self.matches:
match = rule.create_match(name)
for param_name, param_value in params:
match.set_parameter(param_name, param_value)
return rule
@contextlib.contextmanager
def iptables_txn(table):
"""Temporarily disable autocommit and commit at the end.
If an exception occurs, changes are rolled back.
By default, changes to iptables rules are applied immediately. In some
cases, we want to avoid that.
https://github.com/ldx/python-iptables#autocommit
"""
assert table.autocommit is True, table.autocommit
try:
table.autocommit = False
yield
table.commit()
finally:
table.refresh()
table.autocommit = True
class ChainDoesNotExist(Exception):
pass
def all_chains():
return {chain.name for chain in iptc.Table(iptc.Table.FILTER).chains}
def ensure_chain(chain, rules):
"""Idempotently ensure a chain exists and has an exact set of rules.
This function creates or updates an existing chain to match the rules
passed in.
This function will not reorder existing rules, but any new rules are always
inserted at the front of the chain.
"""
try:
current_rules = set(list_chain(chain))
except ChainDoesNotExist:
create_chain(chain)
current_rules = set()
for rule in rules:
if rule not in current_rules:
insert_rule(chain, rule)
extra_rules = current_rules - set(rules)
if extra_rules:
delete_rules(chain, extra_rules)
def _rule_sort_key(rule_tuple):
old_index, rule = rule_tuple
target_name = rule.target
return (RULE_TARGET_SORT_ORDER.get(target_name, 0), old_index)
def reorder_chain(chain_name):
"""Ensure that any REJECT rules are last, and any LOG rules are second-to-last
"""
table = iptc.Table(iptc.Table.FILTER)
with iptables_txn(table):
rules = list_chain(chain_name)
chain = iptc.Chain(table, chain_name)
# sort the rules by rule_key, which uses (RULE_TARGET_SORT_ORDER, idx)
sorted_rules_with_indices = sorted(enumerate(rules), key=_rule_sort_key)
for new_index, (old_index, rule) in enumerate(sorted_rules_with_indices):
if new_index == old_index:
continue
log.debug(f"reordering chain {chain_name} rule {rule} to #{new_index}")
chain.replace_rule(rule.to_iptc(), new_index)
def ensure_rule(chain, rule):
rules = list_chain(chain)
if rule not in rules:
insert_rule(chain, rule)
def insert_rule(chain_name, rule):
log.debug(f"adding rule to {chain_name}: {rule}")
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), chain_name)
chain.insert_rule(rule.to_iptc())
def delete_rules(chain_name, rules):
log.debug(f"deleting rules from {chain_name}: {rules}")
table = iptc.Table(iptc.Table.FILTER)
with iptables_txn(table):
chain = iptc.Chain(table, chain_name)
for potential_rule in chain.rules:
if Rule.from_iptc(potential_rule) in rules:
chain.delete_rule(potential_rule)
def create_chain(chain_name):
log.debug(f"creating chain: {chain_name}")
iptc.Table(iptc.Table.FILTER).create_chain(chain_name)
def delete_chain(chain_name):
log.debug(f"deleting chain: {chain_name}")
chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), chain_name)
chain.flush()
chain.delete()
def list_chain(chain_name):
"""List rules in a chain.
Returns a list of iptables rules, or raises ChainDoesNotExist.
"""
table = iptc.Table(iptc.Table.FILTER)
chain = iptc.Chain(table, chain_name)
# TODO: is there any way to do this without listing all chains? (probably slow)
# If the chain doesn't exist, chain.rules will be an empty list, so we need
# to make sure the chain actually _does_ exist.
if chain in table.chains:
return tuple(Rule.from_iptc(rule) for rule in chain.rules)
else:
raise ChainDoesNotExist(chain_name)
|
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.entity import Entity
from .const import (
DOORBIRD_INFO_KEY_BUILD_NUMBER,
DOORBIRD_INFO_KEY_DEVICE_TYPE,
DOORBIRD_INFO_KEY_FIRMWARE,
MANUFACTURER,
)
from .util import get_mac_address_from_doorstation_info
class DoorBirdEntity(Entity):
"""Base class for doorbird entities."""
def __init__(self, doorstation, doorstation_info):
"""Initialize the entity."""
super().__init__()
self._doorstation_info = doorstation_info
self._doorstation = doorstation
self._mac_addr = get_mac_address_from_doorstation_info(doorstation_info)
@property
def device_info(self):
"""Doorbird device info."""
firmware = self._doorstation_info[DOORBIRD_INFO_KEY_FIRMWARE]
firmware_build = self._doorstation_info[DOORBIRD_INFO_KEY_BUILD_NUMBER]
return {
"connections": {(dr.CONNECTION_NETWORK_MAC, self._mac_addr)},
"name": self._doorstation.name,
"manufacturer": MANUFACTURER,
"sw_version": f"{firmware} {firmware_build}",
"model": self._doorstation_info[DOORBIRD_INFO_KEY_DEVICE_TYPE],
}
|
from collections import deque
import json
import operator
import os
import re
import subprocess
import sys
from typing import Dict, Set
from stdlib_list import stdlib_list
from tqdm import tqdm
from homeassistant.const import REQUIRED_PYTHON_VER
import homeassistant.util.package as pkg_util
from script.gen_requirements_all import COMMENT_REQUIREMENTS
from .model import Config, Integration
IGNORE_PACKAGES = {
commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS
}
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)")
SUPPORTED_PYTHON_TUPLES = [
REQUIRED_PYTHON_VER[:2],
tuple(map(operator.add, REQUIRED_PYTHON_VER, (0, 1, 0)))[:2],
]
SUPPORTED_PYTHON_VERSIONS = [
".".join(map(str, version_tuple)) for version_tuple in SUPPORTED_PYTHON_TUPLES
]
STD_LIBS = {version: set(stdlib_list(version)) for version in SUPPORTED_PYTHON_VERSIONS}
PIPDEPTREE_CACHE = None
IGNORE_VIOLATIONS = {
# Still has standard library requirements.
"acmeda",
"blink",
"ezviz",
"hdmi_cec",
"juicenet",
"lupusec",
"rainbird",
"slide",
"suez_water",
}
def normalize_package_name(requirement: str) -> str:
"""Return a normalized package name from a requirement string."""
match = PACKAGE_REGEX.search(requirement)
if not match:
return ""
# pipdeptree needs lowercase and dash instead of underscore as separator
package = match.group(1).lower().replace("_", "-")
return package
def validate(integrations: Dict[str, Integration], config: Config):
"""Handle requirements for integrations."""
ensure_cache()
# check for incompatible requirements
disable_tqdm = config.specific_integrations or os.environ.get("CI", False)
for integration in tqdm(integrations.values(), disable=disable_tqdm):
if not integration.manifest:
continue
validate_requirements(integration)
def validate_requirements(integration: Integration):
"""Validate requirements."""
# Some integrations have not been fixed yet so are allowed to have violations.
if integration.domain in IGNORE_VIOLATIONS:
return
integration_requirements = set()
integration_packages = set()
for req in integration.requirements:
package = normalize_package_name(req)
if not package:
integration.add_error(
"requirements",
f"Failed to normalize package name from requirement {req}",
)
return
if package in IGNORE_PACKAGES:
continue
integration_requirements.add(req)
integration_packages.add(package)
install_ok = install_requirements(integration, integration_requirements)
if not install_ok:
return
all_integration_requirements = get_requirements(integration, integration_packages)
if integration_requirements and not all_integration_requirements:
integration.add_error(
"requirements",
f"Failed to resolve requirements {integration_requirements}",
)
return
# Check for requirements incompatible with standard library.
for version, std_libs in STD_LIBS.items():
for req in all_integration_requirements:
if req in std_libs:
integration.add_error(
"requirements",
f"Package {req} is not compatible with Python {version} standard library",
)
def ensure_cache():
"""Ensure we have a cache of pipdeptree.
{
"flake8-docstring": {
"key": "flake8-docstrings",
"package_name": "flake8-docstrings",
"installed_version": "1.5.0"
"dependencies": {"flake8"}
}
}
"""
global PIPDEPTREE_CACHE
if PIPDEPTREE_CACHE is not None:
return
cache = {}
for item in json.loads(
subprocess.run(
["pipdeptree", "-w", "silence", "--json"],
check=True,
capture_output=True,
text=True,
).stdout
):
cache[item["package"]["key"]] = {
**item["package"],
"dependencies": {dep["key"] for dep in item["dependencies"]},
}
PIPDEPTREE_CACHE = cache
def get_requirements(integration: Integration, packages: Set[str]) -> Set[str]:
"""Return all (recursively) requirements for an integration."""
ensure_cache()
all_requirements = set()
to_check = deque(packages)
while to_check:
package = to_check.popleft()
if package in all_requirements:
continue
all_requirements.add(package)
item = PIPDEPTREE_CACHE.get(package)
if item is None:
# Only warn if direct dependencies could not be resolved
if package in packages:
integration.add_error(
"requirements", f"Failed to resolve requirements for {package}"
)
continue
to_check.extend(item["dependencies"])
return all_requirements
def install_requirements(integration: Integration, requirements: Set[str]) -> bool:
"""Install integration requirements.
Return True if successful.
"""
global PIPDEPTREE_CACHE
ensure_cache()
for req in requirements:
match = PIP_REGEX.search(req)
if not match:
integration.add_error(
"requirements",
f"Failed to parse requirement {req} before installation",
)
continue
install_args = match.group(1)
requirement_arg = match.group(2)
is_installed = False
normalized = normalize_package_name(requirement_arg)
if normalized and "==" in requirement_arg:
ver = requirement_arg.split("==")[-1]
item = PIPDEPTREE_CACHE.get(normalized)
is_installed = item and item["installed_version"] == ver
if not is_installed:
try:
is_installed = pkg_util.is_installed(req)
except ValueError:
is_installed = False
if is_installed:
continue
args = [sys.executable, "-m", "pip", "install", "--quiet"]
if install_args:
args.append(install_args)
args.append(requirement_arg)
try:
result = subprocess.run(args, check=True, capture_output=True, text=True)
except subprocess.SubprocessError:
integration.add_error(
"requirements",
f"Requirement {req} failed to install",
)
else:
# Clear the pipdeptree cache if something got installed
if "Successfully installed" in result.stdout:
PIPDEPTREE_CACHE = None
if integration.errors:
return False
return True
|
from homeassistant.const import ATTR_NAME
from homeassistant.core import callback
from . import (
SENSOR_TYPES,
TYPE_SOLARRADIATION,
TYPE_SOLARRADIATION_LX,
AmbientWeatherEntity,
)
from .const import (
ATTR_LAST_DATA,
ATTR_MONITORED_CONDITIONS,
DATA_CLIENT,
DOMAIN,
TYPE_SENSOR,
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Ambient PWS sensors based on a config entry."""
ambient = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
sensor_list = []
for mac_address, station in ambient.stations.items():
for condition in station[ATTR_MONITORED_CONDITIONS]:
name, unit, kind, device_class = SENSOR_TYPES[condition]
if kind == TYPE_SENSOR:
sensor_list.append(
AmbientWeatherSensor(
ambient,
mac_address,
station[ATTR_NAME],
condition,
name,
device_class,
unit,
)
)
async_add_entities(sensor_list, True)
class AmbientWeatherSensor(AmbientWeatherEntity):
"""Define an Ambient sensor."""
def __init__(
self,
ambient,
mac_address,
station_name,
sensor_type,
sensor_name,
device_class,
unit,
):
"""Initialize the sensor."""
super().__init__(
ambient, mac_address, station_name, sensor_type, sensor_name, device_class
)
self._unit = unit
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@callback
def update_from_latest_data(self):
"""Fetch new state data for the sensor."""
if self._sensor_type == TYPE_SOLARRADIATION_LX:
# If the user requests the solarradiation_lx sensor, use the
# value of the solarradiation sensor and apply a very accurate
# approximation of converting sunlight W/m^2 to lx:
w_m2_brightness_val = self._ambient.stations[self._mac_address][
ATTR_LAST_DATA
].get(TYPE_SOLARRADIATION)
if w_m2_brightness_val is None:
self._state = None
else:
self._state = round(float(w_m2_brightness_val) / 0.0079)
else:
self._state = self._ambient.stations[self._mac_address][ATTR_LAST_DATA].get(
self._sensor_type
)
|
from datetime import timedelta
import logging
from homeassistant.components.rpi_power.binary_sensor import (
DESCRIPTION_NORMALIZED,
DESCRIPTION_UNDER_VOLTAGE,
)
from homeassistant.components.rpi_power.const import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.async_mock import MagicMock
from tests.common import MockConfigEntry, async_fire_time_changed, patch
ENTITY_ID = "binary_sensor.rpi_power_status"
MODULE = "homeassistant.components.rpi_power.binary_sensor.new_under_voltage"
async def _async_setup_component(hass, detected):
mocked_under_voltage = MagicMock()
type(mocked_under_voltage).get = MagicMock(return_value=detected)
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
with patch(MODULE, return_value=mocked_under_voltage):
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
return mocked_under_voltage
async def test_new(hass, caplog):
"""Test new entry."""
await _async_setup_component(hass, False)
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_OFF
assert not any(x.levelno == logging.WARNING for x in caplog.records)
async def test_new_detected(hass, caplog):
"""Test new entry with under voltage detected."""
mocked_under_voltage = await _async_setup_component(hass, True)
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_ON
assert (
len(
[
x
for x in caplog.records
if x.levelno == logging.WARNING
and x.message == DESCRIPTION_UNDER_VOLTAGE
]
)
== 1
)
# back to normal
type(mocked_under_voltage).get = MagicMock(return_value=False)
future = dt_util.utcnow() + timedelta(minutes=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert (
len(
[
x
for x in caplog.records
if x.levelno == logging.INFO and x.message == DESCRIPTION_NORMALIZED
]
)
== 1
)
|
from marshmallow import fields, post_dump
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
from lemur.schemas import (
PluginInputSchema,
PluginOutputSchema,
AssociatedCertificateSchema,
)
class NotificationInputSchema(LemurInputSchema):
id = fields.Integer()
label = fields.String(required=True)
description = fields.String()
active = fields.Boolean()
plugin = fields.Nested(PluginInputSchema, required=True)
certificates = fields.Nested(AssociatedCertificateSchema, many=True, missing=[])
class NotificationOutputSchema(LemurOutputSchema):
id = fields.Integer()
label = fields.String()
description = fields.String()
active = fields.Boolean()
options = fields.List(fields.Dict())
plugin = fields.Nested(PluginOutputSchema)
@post_dump
def fill_object(self, data):
if data:
data["plugin"]["pluginOptions"] = data["options"]
return data
class NotificationNestedOutputSchema(LemurOutputSchema):
__envelope__ = False
id = fields.Integer()
label = fields.String()
description = fields.String()
active = fields.Boolean()
options = fields.List(fields.Dict())
plugin = fields.Nested(PluginOutputSchema)
notification_input_schema = NotificationInputSchema()
notification_output_schema = NotificationOutputSchema()
notifications_output_schema = NotificationOutputSchema(many=True)
|
import logging
import time
from collections import namedtuple
from sys import stdout
log = logging.getLogger(__name__)
StepperInfo = namedtuple(
'StepperInfo', 'loop_count,steps,loadscheme,duration,ammo_count,instances')
class StepperStatus(object):
'''
Raises StopIteration when limits are reached.
'''
def __init__(self):
self.core = None # dirty hack. StepperWrapper should pass core here.
self.info = {
'loop_count': 0,
'steps': None,
'loadscheme': None,
'duration': None,
'ammo_count': 0,
'instances': None,
}
self._ammo_count = 0
self._old_ammo_count = 0
self._loop_count = 0
self._af_position = None
self.af_size = None
self.loop_limit = None
self.ammo_limit = None
self.lp_len = None
self.lp_progress = 0
self.af_progress = 0
self._timer = time.time()
def publish(self, key, value):
if key not in self.info:
raise RuntimeError(
"Tried to publish to a non-existent key: %s" % key)
log.debug('Published %s to %s', value, key)
self.info[key] = value
@property
def af_position(self):
return self._af_position
@af_position.setter
def af_position(self, value):
self._af_position = value
self.update_af_progress()
@property
def ammo_count(self):
return self._ammo_count
@ammo_count.setter
def ammo_count(self, value):
self._ammo_count = value
self.update_lp_progress()
if self.ammo_limit and value > self.ammo_limit:
print()
log.info("Ammo limit reached: %s", self.ammo_limit)
raise StopIteration
def inc_ammo_count(self):
self.ammo_count += 1
@property
def loop_count(self):
return self._loop_count
@loop_count.setter
def loop_count(self, value):
self._loop_count = value
if self.loop_limit and value >= self.loop_limit:
print() # do not overwrite status (go to new line)
log.info("Loop limit reached: %s", self.loop_limit)
raise StopIteration
def inc_loop_count(self):
self.loop_count += 1
def get_info(self):
self.info['ammo_count'] = self._ammo_count
self.info['loop_count'] = self._loop_count
for key in self.info:
if self.info[key] is None:
raise RuntimeError(
"Information for %s is not published yet." % key)
return StepperInfo(**self.info)
def update_view(self):
ammo_generated = self._ammo_count - self._old_ammo_count
self._old_ammo_count = self._ammo_count
cur_time = time.time()
time_delta = cur_time - self._timer
self._timer = cur_time
if time_delta > 0:
stdout.write(
"AF: %3s%%, LP: %3s%%, loops: %10s, speed: %5s Krps\r" % (
self.af_progress, self.lp_progress, self.loop_count,
int(ammo_generated / time_delta / 1000.0)))
stdout.flush()
if self.core:
self.core.publish("stepper", "progress", self.lp_progress)
self.core.publish("stepper", "loop_count", self.loop_count)
self.core.publish(
"stepper", "speed",
"%s Krps" % int(ammo_generated / time_delta / 1000.0))
def update_af_progress(self):
if self.af_size and self.loop_limit and self.af_position is not None:
bytes_read = self.af_size * self.loop_count + self.af_position
total_bytes = self.af_size * self.loop_limit
progress = int(float(bytes_read) / float(total_bytes) * 100.0)
else:
progress = 100
if self.af_progress != progress:
self.af_progress = progress
self.update_view()
def update_lp_progress(self):
if self.ammo_limit or self.lp_len:
if self.ammo_limit:
if self.lp_len:
max_ammo = min(self.ammo_limit, self.lp_len)
else:
max_ammo = self.ammo_limit
else:
max_ammo = self.lp_len
progress = int(float(self.ammo_count) / float(max_ammo) * 100.0)
else:
progress = 100
if self.lp_progress != progress:
self.lp_progress = progress
self.update_view()
status = StepperStatus()
|
import os
import shutil
import tempfile
import requests
import six
def get_stash_dir():
return os.path.join(os.path.expanduser("~"), "Documents", "site-packages", "stash")
def remove_stash():
shutil.rmtree(get_stash_dir())
def install_stash(repo="ywangd", branch="master"):
if not "TMPDIR" in os.environ:
os.environ["TMPDIR"] = tempfile.gettempdir()
ns = {"_owner": repo, "_br": branch}
exec(requests.get("https://bit.ly/get-stash").content, ns, ns)
def parse_gh_target(s):
if s == "":
return "ywangd", "master"
s = s.replace("/", ":")
if ":" not in s:
s = "ywangd:" + s
repo, branch = s.split(":")
return repo, branch
def main():
ts = six.moves.input("New target (repo:branch, empty for default): ")
t = parse_gh_target(ts)
if os.path.exists(get_stash_dir()):
remove_stash()
install_stash(*t)
if __name__ == "__main__":
main()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.