text
stringlengths 213
32.3k
|
---|
from django import template
from django.conf import settings
from django.contrib.staticfiles.storage import staticfiles_storage
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy
register = template.Library()
SOCIALS = {
"auth0": {
"name": settings.SOCIAL_AUTH_AUTH0_TITLE,
"image": settings.SOCIAL_AUTH_AUTH0_IMAGE,
},
"saml": {
"name": settings.SOCIAL_AUTH_SAML_TITLE,
"image": settings.SOCIAL_AUTH_SAML_IMAGE,
},
"google": {"name": "Google", "image": "google.svg"},
"google-oauth2": {"name": "Google", "image": "google.svg"},
"google-plus": {"name": "Google+", "image": "google.svg"},
"password": {"name": gettext_lazy("Password"), "image": "password.svg"},
"email": {"name": gettext_lazy("E-mail"), "image": "email.svg"},
"ubuntu": {"name": "Ubuntu", "image": "ubuntu.svg"},
"opensuse": {"name": "openSUSE", "image": "opensuse.svg"},
"fedora": {"name": "Fedora", "image": "fedora.svg"},
"facebook": {"name": "Facebook", "image": "facebook.svg"},
"github": {"name": "GitHub", "image": "github.svg"},
"github-enterprise": {"name": "GitHub Enterprise", "image": "github.svg"},
"bitbucket": {"name": "Bitbucket", "image": "bitbucket.svg"},
"bitbucket-oauth2": {"name": "Bitbucket", "image": "bitbucket.svg"},
"azuread-oauth2": {"name": "Azure", "image": "azure.svg"},
"azuread-tenant-oauth2": {"name": "Azure", "image": "azure.svg"},
"gitlab": {"name": "GitLab", "image": "gitlab.svg"},
"amazon": {"name": "Amazon", "image": "amazon.svg"},
"twitter": {"name": "Twitter", "image": "twitter.svg"},
"stackoverflow": {"name": "Stack Overflow", "image": "stackoverflow.svg"},
}
IMAGE_SOCIAL_TEMPLATE = """
<img class="auth-image" src="{image}" />
"""
SOCIAL_TEMPLATE = """
{icon}
{separator}
{name}
"""
@register.simple_tag
def auth_name(auth, separator="<br />"):
"""Create HTML markup for social authentication method."""
params = {"name": auth, "separator": separator, "image": "password.svg"}
if auth in SOCIALS:
params.update(SOCIALS[auth])
if not params["image"].startswith("http"):
params["image"] = staticfiles_storage.url("auth/" + params["image"])
params["icon"] = IMAGE_SOCIAL_TEMPLATE.format(**params)
return mark_safe(SOCIAL_TEMPLATE.format(**params))
def get_auth_name(auth):
"""Get nice name for authentication backend."""
if auth in SOCIALS:
return SOCIALS[auth]["name"]
return auth
|
from gzip import GzipFile
import os.path as op
import re
import time
import uuid
import numpy as np
from scipy import linalg, sparse
from .constants import FIFF
from ..utils import logger, _file_like
from ..utils.numerics import _cal_to_julian
# We choose a "magic" date to store (because meas_date is obligatory)
# to treat as meas_date=None. This one should be impossible for systems
# to write -- the second field is microseconds, so anything >= 1e6
# should be moved into the first field (seconds).
DATE_NONE = (0, 2 ** 31 - 1)
def _write(fid, data, kind, data_size, FIFFT_TYPE, dtype):
"""Write data."""
if isinstance(data, np.ndarray):
data_size *= data.size
# XXX for string types the data size is used as
# computed in ``write_string``.
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_TYPE, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(data, dtype=dtype).tobytes())
def _get_split_size(split_size):
"""Convert human-readable bytes to machine-readable bytes."""
if isinstance(split_size, str):
exp = dict(MB=20, GB=30).get(split_size[-2:], None)
if exp is None:
raise ValueError('split_size has to end with either'
'"MB" or "GB"')
split_size = int(float(split_size[:-2]) * 2 ** exp)
if split_size > 2147483648:
raise ValueError('split_size cannot be larger than 2GB')
return split_size
_NEXT_FILE_BUFFER = 1048576 # 2 ** 20 extra cushion for last post-data tags
def write_nop(fid, last=False):
"""Write a FIFF_NOP."""
fid.write(np.array(FIFF.FIFF_NOP, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFT_VOID, dtype='>i4').tobytes())
fid.write(np.array(0, dtype='>i4').tobytes())
next_ = FIFF.FIFFV_NEXT_NONE if last else FIFF.FIFFV_NEXT_SEQ
fid.write(np.array(next_, dtype='>i4').tobytes())
INT32_MAX = 2147483647
def write_int(fid, kind, data):
"""Write a 32-bit integer tag to a fif file."""
data_size = 4
data = np.asarray(data)
if data.dtype.kind not in 'uib' and data.size > 0:
raise TypeError(
f'Cannot safely write data with dtype {data.dtype} as int')
max_val = data.max() if data.size > 0 else 0
if max_val > INT32_MAX:
raise TypeError(
f'Value {max_val} exceeds maximum allowed ({INT32_MAX}) for '
f'tag {kind}')
data = data.astype('>i4').T
_write(fid, data, kind, data_size, FIFF.FIFFT_INT, '>i4')
def write_double(fid, kind, data):
"""Write a double-precision floating point tag to a fif file."""
data_size = 8
data = np.array(data, dtype='>f8').T
_write(fid, data, kind, data_size, FIFF.FIFFT_DOUBLE, '>f8')
def write_float(fid, kind, data):
"""Write a single-precision floating point tag to a fif file."""
data_size = 4
data = np.array(data, dtype='>f4').T
_write(fid, data, kind, data_size, FIFF.FIFFT_FLOAT, '>f4')
def write_dau_pack16(fid, kind, data):
"""Write a dau_pack16 tag to a fif file."""
data_size = 2
data = np.array(data, dtype='>i2').T
_write(fid, data, kind, data_size, FIFF.FIFFT_DAU_PACK16, '>i2')
def write_complex64(fid, kind, data):
"""Write a 64 bit complex floating point tag to a fif file."""
data_size = 8
data = np.array(data, dtype='>c8').T
_write(fid, data, kind, data_size, FIFF.FIFFT_COMPLEX_FLOAT, '>c8')
def write_complex128(fid, kind, data):
"""Write a 128 bit complex floating point tag to a fif file."""
data_size = 16
data = np.array(data, dtype='>c16').T
_write(fid, data, kind, data_size, FIFF.FIFFT_COMPLEX_FLOAT, '>c16')
def write_julian(fid, kind, data):
"""Write a Julian-formatted date to a FIF file."""
assert len(data) == 3
data_size = 4
jd = np.sum(_cal_to_julian(*data))
data = np.array(jd, dtype='>i4')
_write(fid, data, kind, data_size, FIFF.FIFFT_JULIAN, '>i4')
def write_string(fid, kind, data):
"""Write a string tag."""
str_data = data.encode('latin1')
data_size = len(str_data) # therefore compute size here
my_dtype = '>a' # py2/3 compatible on writing -- don't ask me why
if data_size > 0:
_write(fid, str_data, kind, data_size, FIFF.FIFFT_STRING, my_dtype)
def write_name_list(fid, kind, data):
"""Write a colon-separated list of names.
Parameters
----------
data : list of strings
"""
write_string(fid, kind, ':'.join(data))
def write_float_matrix(fid, kind, mat):
"""Write a single-precision floating-point matrix tag."""
FIFFT_MATRIX = 1 << 30
FIFFT_MATRIX_FLOAT = FIFF.FIFFT_FLOAT | FIFFT_MATRIX
data_size = 4 * mat.size + 4 * (mat.ndim + 1)
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_FLOAT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat, dtype='>f4').tobytes())
dims = np.empty(mat.ndim + 1, dtype=np.int32)
dims[:mat.ndim] = mat.shape[::-1]
dims[-1] = mat.ndim
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def write_double_matrix(fid, kind, mat):
"""Write a double-precision floating-point matrix tag."""
FIFFT_MATRIX = 1 << 30
FIFFT_MATRIX_DOUBLE = FIFF.FIFFT_DOUBLE | FIFFT_MATRIX
data_size = 8 * mat.size + 4 * (mat.ndim + 1)
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_DOUBLE, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat, dtype='>f8').tobytes())
dims = np.empty(mat.ndim + 1, dtype=np.int32)
dims[:mat.ndim] = mat.shape[::-1]
dims[-1] = mat.ndim
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def write_int_matrix(fid, kind, mat):
"""Write integer 32 matrix tag."""
FIFFT_MATRIX = 1 << 30
FIFFT_MATRIX_INT = FIFF.FIFFT_INT | FIFFT_MATRIX
data_size = 4 * mat.size + 4 * 3
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_INT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat, dtype='>i4').tobytes())
dims = np.empty(3, dtype=np.int32)
dims[0] = mat.shape[1]
dims[1] = mat.shape[0]
dims[2] = 2
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def write_complex_float_matrix(fid, kind, mat):
"""Write complex 64 matrix tag."""
FIFFT_MATRIX = 1 << 30
FIFFT_MATRIX_COMPLEX_FLOAT = FIFF.FIFFT_COMPLEX_FLOAT | FIFFT_MATRIX
data_size = 4 * 2 * mat.size + 4 * (mat.ndim + 1)
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_COMPLEX_FLOAT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat, dtype='>c8').tobytes())
dims = np.empty(mat.ndim + 1, dtype=np.int32)
dims[:mat.ndim] = mat.shape[::-1]
dims[-1] = mat.ndim
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def write_complex_double_matrix(fid, kind, mat):
"""Write complex 128 matrix tag."""
FIFFT_MATRIX = 1 << 30
FIFFT_MATRIX_COMPLEX_DOUBLE = FIFF.FIFFT_COMPLEX_DOUBLE | FIFFT_MATRIX
data_size = 8 * 2 * mat.size + 4 * (mat.ndim + 1)
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_COMPLEX_DOUBLE, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat, dtype='>c16').tobytes())
dims = np.empty(mat.ndim + 1, dtype=np.int32)
dims[:mat.ndim] = mat.shape[::-1]
dims[-1] = mat.ndim
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def get_machid():
"""Get (mostly) unique machine ID.
Returns
-------
ids : array (length 2, int32)
The machine identifier used in MNE.
"""
mac = b'%012x' % uuid.getnode() # byte conversion for Py3
mac = re.findall(b'..', mac) # split string
mac += [b'00', b'00'] # add two more fields
# Convert to integer in reverse-order (for some reason)
from codecs import encode
mac = b''.join([encode(h, 'hex_codec') for h in mac[::-1]])
ids = np.flipud(np.frombuffer(mac, np.int32, count=2))
return ids
def get_new_file_id():
"""Create a new file ID tag."""
secs, usecs = divmod(time.time(), 1.)
secs, usecs = int(secs), int(usecs * 1e6)
return {'machid': get_machid(), 'version': FIFF.FIFFC_VERSION,
'secs': secs, 'usecs': usecs}
def write_id(fid, kind, id_=None):
"""Write fiff id."""
id_ = _generate_meas_id() if id_ is None else id_
data_size = 5 * 4 # The id comprises five integers
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFT_ID_STRUCT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
# Collect the bits together for one write
arr = np.array([id_['version'],
id_['machid'][0], id_['machid'][1],
id_['secs'], id_['usecs']], dtype='>i4')
fid.write(arr.tobytes())
def start_block(fid, kind):
"""Write a FIFF_BLOCK_START tag."""
write_int(fid, FIFF.FIFF_BLOCK_START, kind)
def end_block(fid, kind):
"""Write a FIFF_BLOCK_END tag."""
write_int(fid, FIFF.FIFF_BLOCK_END, kind)
def start_file(fname, id_=None):
"""Open a fif file for writing and writes the compulsory header tags.
Parameters
----------
fname : string | fid
The name of the file to open. It is recommended
that the name ends with .fif or .fif.gz. Can also be an
already opened file.
id_ : dict | None
ID to use for the FIFF_FILE_ID.
"""
if _file_like(fname):
logger.debug('Writing using %s I/O' % type(fname))
fid = fname
fid.seek(0)
else:
fname = str(fname)
if op.splitext(fname)[1].lower() == '.gz':
logger.debug('Writing using gzip')
# defaults to compression level 9, which is barely smaller but much
# slower. 2 offers a good compromise.
fid = GzipFile(fname, "wb", compresslevel=2)
else:
logger.debug('Writing using normal I/O')
fid = open(fname, "wb")
# Write the compulsory items
write_id(fid, FIFF.FIFF_FILE_ID, id_)
write_int(fid, FIFF.FIFF_DIR_POINTER, -1)
write_int(fid, FIFF.FIFF_FREE_LIST, -1)
return fid
def check_fiff_length(fid, close=True):
"""Ensure our file hasn't grown too large to work properly."""
if fid.tell() > 2147483648: # 2 ** 31, FIFF uses signed 32-bit locations
if close:
fid.close()
raise IOError('FIFF file exceeded 2GB limit, please split file, reduce'
' split_size (if possible), or save to a different '
'format')
def end_file(fid):
"""Write the closing tags to a fif file and closes the file."""
write_nop(fid, last=True)
check_fiff_length(fid)
fid.close()
def write_coord_trans(fid, trans):
"""Write a coordinate transformation structure."""
data_size = 4 * 2 * 12 + 4 * 2
fid.write(np.array(FIFF.FIFF_COORD_TRANS, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFT_COORD_TRANS_STRUCT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(trans['from'], dtype='>i4').tobytes())
fid.write(np.array(trans['to'], dtype='>i4').tobytes())
# The transform...
rot = trans['trans'][:3, :3]
move = trans['trans'][:3, 3]
fid.write(np.array(rot, dtype='>f4').tobytes())
fid.write(np.array(move, dtype='>f4').tobytes())
# ...and its inverse
trans_inv = linalg.inv(trans['trans'])
rot = trans_inv[:3, :3]
move = trans_inv[:3, 3]
fid.write(np.array(rot, dtype='>f4').tobytes())
fid.write(np.array(move, dtype='>f4').tobytes())
def write_ch_info(fid, ch):
"""Write a channel information record to a fif file."""
data_size = 4 * 13 + 4 * 7 + 16
fid.write(np.array(FIFF.FIFF_CH_INFO, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFT_CH_INFO_STRUCT, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
# Start writing fiffChInfoRec
fid.write(np.array(ch['scanno'], dtype='>i4').tobytes())
fid.write(np.array(ch['logno'], dtype='>i4').tobytes())
fid.write(np.array(ch['kind'], dtype='>i4').tobytes())
fid.write(np.array(ch['range'], dtype='>f4').tobytes())
fid.write(np.array(ch['cal'], dtype='>f4').tobytes())
fid.write(np.array(ch['coil_type'], dtype='>i4').tobytes())
fid.write(np.array(ch['loc'], dtype='>f4').tobytes()) # writing 12 values
# unit and unit multiplier
fid.write(np.array(ch['unit'], dtype='>i4').tobytes())
fid.write(np.array(ch['unit_mul'], dtype='>i4').tobytes())
# Finally channel name
ch_name = ch['ch_name'][:15]
fid.write(np.array(ch_name, dtype='>c').tobytes())
fid.write(b'\0' * (16 - len(ch_name)))
def write_dig_points(fid, dig, block=False, coord_frame=None):
"""Write a set of digitizer data points into a fif file."""
if dig is not None:
data_size = 5 * 4
if block:
start_block(fid, FIFF.FIFFB_ISOTRAK)
if coord_frame is not None:
write_int(fid, FIFF.FIFF_MNE_COORD_FRAME, coord_frame)
for d in dig:
fid.write(np.array(FIFF.FIFF_DIG_POINT, '>i4').tobytes())
fid.write(np.array(FIFF.FIFFT_DIG_POINT_STRUCT, '>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, '>i4').tobytes())
# Start writing fiffDigPointRec
fid.write(np.array(d['kind'], '>i4').tobytes())
fid.write(np.array(d['ident'], '>i4').tobytes())
fid.write(np.array(d['r'][:3], '>f4').tobytes())
if block:
end_block(fid, FIFF.FIFFB_ISOTRAK)
def write_float_sparse_rcs(fid, kind, mat):
"""Write a single-precision sparse compressed row matrix tag."""
return write_float_sparse(fid, kind, mat, fmt='csr')
def write_float_sparse_ccs(fid, kind, mat):
"""Write a single-precision sparse compressed column matrix tag."""
return write_float_sparse(fid, kind, mat, fmt='csc')
def write_float_sparse(fid, kind, mat, fmt='auto'):
"""Write a single-precision floating-point sparse matrix tag."""
from .tag import _matrix_coding_CCS, _matrix_coding_RCS
if fmt == 'auto':
fmt = 'csr' if isinstance(mat, sparse.csr_matrix) else 'csc'
if fmt == 'csr':
need = sparse.csr_matrix
bits = _matrix_coding_RCS
else:
need = sparse.csc_matrix
bits = _matrix_coding_CCS
if not isinstance(mat, need):
raise TypeError('Must write %s, got %s' % (fmt.upper(), type(mat),))
FIFFT_MATRIX = bits << 16
FIFFT_MATRIX_FLOAT_RCS = FIFF.FIFFT_FLOAT | FIFFT_MATRIX
nnzm = mat.nnz
nrow = mat.shape[0]
data_size = 4 * nnzm + 4 * nnzm + 4 * (nrow + 1) + 4 * 4
fid.write(np.array(kind, dtype='>i4').tobytes())
fid.write(np.array(FIFFT_MATRIX_FLOAT_RCS, dtype='>i4').tobytes())
fid.write(np.array(data_size, dtype='>i4').tobytes())
fid.write(np.array(FIFF.FIFFV_NEXT_SEQ, dtype='>i4').tobytes())
fid.write(np.array(mat.data, dtype='>f4').tobytes())
fid.write(np.array(mat.indices, dtype='>i4').tobytes())
fid.write(np.array(mat.indptr, dtype='>i4').tobytes())
dims = [nnzm, mat.shape[0], mat.shape[1], 2]
fid.write(np.array(dims, dtype='>i4').tobytes())
check_fiff_length(fid)
def _generate_meas_id():
"""Generate a new meas_id dict."""
id_ = dict()
id_['version'] = FIFF.FIFFC_VERSION
id_['machid'] = get_machid()
id_['secs'], id_['usecs'] = DATE_NONE
return id_
|
import re # noqa: F401
import sys # noqa: F401
from paasta_tools.paastaapi.api_client import ApiClient, Endpoint
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from paasta_tools.paastaapi.model.marathon_dashboard import MarathonDashboard
class MarathonDashboardApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __marathon_dashboard(
self,
**kwargs
):
"""Get marathon service instances and their shards # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.marathon_dashboard(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MarathonDashboard
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.marathon_dashboard = Endpoint(
settings={
'response_type': (MarathonDashboard,),
'auth': [],
'endpoint_path': '/marathon_dashboard',
'operation_id': 'marathon_dashboard',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__marathon_dashboard
)
|
import re
from typing import Optional
from homeassistant import core, setup
from homeassistant.components.cover.intent import INTENT_CLOSE_COVER, INTENT_OPEN_COVER
from homeassistant.components.shopping_list.intent import (
INTENT_ADD_ITEM,
INTENT_LAST_ITEMS,
)
from homeassistant.const import EVENT_COMPONENT_LOADED
from homeassistant.core import callback
from homeassistant.helpers import intent
from homeassistant.setup import ATTR_COMPONENT
from .agent import AbstractConversationAgent
from .const import DOMAIN
from .util import create_matcher
REGEX_TURN_COMMAND = re.compile(r"turn (?P<name>(?: |\w)+) (?P<command>\w+)")
REGEX_TYPE = type(re.compile(""))
UTTERANCES = {
"cover": {
INTENT_OPEN_COVER: ["Open [the] [a] [an] {name}[s]"],
INTENT_CLOSE_COVER: ["Close [the] [a] [an] {name}[s]"],
},
"shopping_list": {
INTENT_ADD_ITEM: ["Add [the] [a] [an] {item} to my shopping list"],
INTENT_LAST_ITEMS: ["What is on my shopping list"],
},
}
@core.callback
def async_register(hass, intent_type, utterances):
"""Register utterances and any custom intents for the default agent.
Registrations don't require conversations to be loaded. They will become
active once the conversation component is loaded.
"""
intents = hass.data.setdefault(DOMAIN, {})
conf = intents.setdefault(intent_type, [])
for utterance in utterances:
if isinstance(utterance, REGEX_TYPE):
conf.append(utterance)
else:
conf.append(create_matcher(utterance))
class DefaultAgent(AbstractConversationAgent):
"""Default agent for conversation agent."""
def __init__(self, hass: core.HomeAssistant):
"""Initialize the default agent."""
self.hass = hass
async def async_initialize(self, config):
"""Initialize the default agent."""
if "intent" not in self.hass.config.components:
await setup.async_setup_component(self.hass, "intent", {})
config = config.get(DOMAIN, {})
intents = self.hass.data.setdefault(DOMAIN, {})
for intent_type, utterances in config.get("intents", {}).items():
conf = intents.get(intent_type)
if conf is None:
conf = intents[intent_type] = []
conf.extend(create_matcher(utterance) for utterance in utterances)
# We strip trailing 's' from name because our state matcher will fail
# if a letter is not there. By removing 's' we can match singular and
# plural names.
async_register(
self.hass,
intent.INTENT_TURN_ON,
["Turn [the] [a] {name}[s] on", "Turn on [the] [a] [an] {name}[s]"],
)
async_register(
self.hass,
intent.INTENT_TURN_OFF,
["Turn [the] [a] [an] {name}[s] off", "Turn off [the] [a] [an] {name}[s]"],
)
async_register(
self.hass,
intent.INTENT_TOGGLE,
["Toggle [the] [a] [an] {name}[s]", "[the] [a] [an] {name}[s] toggle"],
)
@callback
def component_loaded(event):
"""Handle a new component loaded."""
self.register_utterances(event.data[ATTR_COMPONENT])
self.hass.bus.async_listen(EVENT_COMPONENT_LOADED, component_loaded)
# Check already loaded components.
for component in self.hass.config.components:
self.register_utterances(component)
@callback
def register_utterances(self, component):
"""Register utterances for a component."""
if component not in UTTERANCES:
return
for intent_type, sentences in UTTERANCES[component].items():
async_register(self.hass, intent_type, sentences)
async def async_process(
self, text: str, context: core.Context, conversation_id: Optional[str] = None
) -> intent.IntentResponse:
"""Process a sentence."""
intents = self.hass.data[DOMAIN]
for intent_type, matchers in intents.items():
for matcher in matchers:
match = matcher.match(text)
if not match:
continue
return await intent.async_handle(
self.hass,
DOMAIN,
intent_type,
{key: {"value": value} for key, value in match.groupdict().items()},
text,
context,
)
|
import ipaddress
import logging
from pyaehw4a1.aehw4a1 import AehW4a1
import pyaehw4a1.exceptions
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.const import CONF_IP_ADDRESS
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
def coerce_ip(value):
"""Validate that provided value is a valid IP address."""
if not value:
raise vol.Invalid("Must define an IP address")
try:
ipaddress.IPv4Network(value)
except ValueError as err:
raise vol.Invalid("Not a valid IP address") from err
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
CLIMATE_DOMAIN: vol.Schema(
{
vol.Optional(CONF_IP_ADDRESS, default=[]): vol.All(
cv.ensure_list, [vol.All(cv.string, coerce_ip)]
)
}
)
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Hisense AEH-W4A1 integration."""
conf = config.get(DOMAIN)
hass.data[DOMAIN] = {}
if conf is not None:
devices = conf[CONF_IP_ADDRESS][:]
for device in devices:
try:
await AehW4a1(device).check()
except pyaehw4a1.exceptions.ConnectionError:
conf[CONF_IP_ADDRESS].remove(device)
_LOGGER.warning("Hisense AEH-W4A1 at %s not found", device)
if conf[CONF_IP_ADDRESS]:
hass.data[DOMAIN] = conf
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry for Hisense AEH-W4A1."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, CLIMATE_DOMAIN)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.config_entries.async_forward_entry_unload(entry, CLIMATE_DOMAIN)
|
import os
import sys
import os.path as op
import shutil
from invoke import task
from ._config import DOC_DIR, DOC_BUILD_DIR
@task(help=dict(clean='clear the doc output; start fresh',
build='build html docs',
show='show the docs in the browser.'))
def docs(ctx, clean=False, build=False, show=False, **kwargs):
""" make API documentation
"""
# Prepare
if not (clean or build or show):
sys.exit('Task "docs" must be called with --clean, --build or --show')
if clean:
sphinx_clean(DOC_BUILD_DIR)
if build:
sphinx_build(DOC_DIR, DOC_BUILD_DIR)
if show:
sphinx_show(os.path.join(DOC_BUILD_DIR, 'html'))
def sphinx_clean(build_dir):
if op.isdir(build_dir):
shutil.rmtree(build_dir)
os.mkdir(build_dir)
os.mkdir(os.path.join(build_dir, 'html'))
print('Cleared build directory.')
def sphinx_build(src_dir, build_dir):
import sphinx
cmd = [ '-b', 'html',
'-d', op.join(build_dir, 'doctrees'),
src_dir, # Source
op.join(build_dir, 'html'), # Dest
]
if sphinx.version_info > (1, 7):
import sphinx.cmd.build
ret = sphinx.cmd.build.build_main(cmd)
else:
ret = sphinx.build_main(['sphinx-build'] + cmd)
if ret != 0:
raise RuntimeError('Sphinx error: %s' % ret)
print("Build finished. The HTML pages are in %s/html." % build_dir)
def sphinx_show(html_dir):
index_html = op.join(html_dir, 'index.html')
if not op.isfile(index_html):
sys.exit('Cannot show pages, build the html first.')
import webbrowser
webbrowser.open_new_tab(index_html)
|
import unittest
from unittest import mock
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_cool_link import DysonPureCoolLink
from homeassistant.components import dyson as dyson_parent
from homeassistant.components.dyson import sensor as dyson
from homeassistant.const import (
PERCENTAGE,
STATE_OFF,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TIME_HOURS,
)
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
from .common import load_mock_device
from tests.async_mock import patch
from tests.common import get_test_home_assistant
def _get_dyson_purecool_device():
"""Return a valid device provide by Dyson web services."""
device = mock.Mock(spec=DysonPureCool)
load_mock_device(device)
return device
def _get_config():
"""Return a config dictionary."""
return {
dyson_parent.DOMAIN: {
dyson_parent.CONF_USERNAME: "email",
dyson_parent.CONF_PASSWORD: "password",
dyson_parent.CONF_LANGUAGE: "GB",
dyson_parent.CONF_DEVICES: [
{"device_id": "XX-XXXXX-XX", "device_ip": "192.168.0.1"}
],
}
}
def _get_device_without_state():
"""Return a valid device provide by Dyson web services."""
device = mock.Mock(spec=DysonPureCoolLink)
device.name = "Device_name"
device.state = None
device.environmental_state = None
return device
def _get_with_state():
"""Return a valid device with state values."""
device = mock.Mock()
load_mock_device(device)
device.name = "Device_name"
device.state.filter_life = 100
device.environmental_state.dust = 5
device.environmental_state.humidity = 45
device.environmental_state.temperature = 295
device.environmental_state.volatil_organic_compounds = 2
return device
def _get_with_standby_monitoring():
"""Return a valid device with state but with standby monitoring disable."""
device = mock.Mock()
load_mock_device(device)
device.name = "Device_name"
device.environmental_state.humidity = 0
device.environmental_state.temperature = 0
return device
class DysonTest(unittest.TestCase):
"""Dyson Sensor component test class."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component_with_no_devices(self):
"""Test setup component with no devices."""
self.hass.data[dyson.DYSON_DEVICES] = []
add_entities = mock.MagicMock()
dyson.setup_platform(self.hass, None, add_entities)
add_entities.assert_not_called()
def test_setup_component(self):
"""Test setup component with devices."""
def _add_device(devices):
assert len(devices) == 5
assert devices[0].name == "Device_name Filter Life"
assert devices[1].name == "Device_name Dust"
assert devices[2].name == "Device_name Humidity"
assert devices[3].name == "Device_name Temperature"
assert devices[4].name == "Device_name AQI"
device_fan = _get_device_without_state()
device_non_fan = _get_with_state()
self.hass.data[dyson.DYSON_DEVICES] = [device_fan, device_non_fan]
dyson.setup_platform(self.hass, None, _add_device, mock.MagicMock())
def test_dyson_filter_life_sensor(self):
"""Test filter life sensor with no value."""
sensor = dyson.DysonFilterLifeSensor(_get_device_without_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state is None
assert sensor.unit_of_measurement == TIME_HOURS
assert sensor.name == "Device_name Filter Life"
assert sensor.entity_id == "sensor.dyson_1"
sensor.on_message("message")
def test_dyson_filter_life_sensor_with_values(self):
"""Test filter sensor with values."""
sensor = dyson.DysonFilterLifeSensor(_get_with_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 100
assert sensor.unit_of_measurement == TIME_HOURS
assert sensor.name == "Device_name Filter Life"
assert sensor.entity_id == "sensor.dyson_1"
sensor.on_message("message")
def test_dyson_dust_sensor(self):
"""Test dust sensor with no value."""
sensor = dyson.DysonDustSensor(_get_device_without_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state is None
assert sensor.unit_of_measurement is None
assert sensor.name == "Device_name Dust"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_dust_sensor_with_values(self):
"""Test dust sensor with values."""
sensor = dyson.DysonDustSensor(_get_with_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 5
assert sensor.unit_of_measurement is None
assert sensor.name == "Device_name Dust"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_humidity_sensor(self):
"""Test humidity sensor with no value."""
sensor = dyson.DysonHumiditySensor(_get_device_without_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state is None
assert sensor.unit_of_measurement == PERCENTAGE
assert sensor.name == "Device_name Humidity"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_humidity_sensor_with_values(self):
"""Test humidity sensor with values."""
sensor = dyson.DysonHumiditySensor(_get_with_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 45
assert sensor.unit_of_measurement == PERCENTAGE
assert sensor.name == "Device_name Humidity"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_humidity_standby_monitoring(self):
"""Test humidity sensor while device is in standby monitoring."""
sensor = dyson.DysonHumiditySensor(_get_with_standby_monitoring())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == STATE_OFF
assert sensor.unit_of_measurement == PERCENTAGE
assert sensor.name == "Device_name Humidity"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_temperature_sensor(self):
"""Test temperature sensor with no value."""
sensor = dyson.DysonTemperatureSensor(_get_device_without_state(), TEMP_CELSIUS)
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state is None
assert sensor.unit_of_measurement == TEMP_CELSIUS
assert sensor.name == "Device_name Temperature"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_temperature_sensor_with_values(self):
"""Test temperature sensor with values."""
sensor = dyson.DysonTemperatureSensor(_get_with_state(), TEMP_CELSIUS)
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 21.9
assert sensor.unit_of_measurement == TEMP_CELSIUS
assert sensor.name == "Device_name Temperature"
assert sensor.entity_id == "sensor.dyson_1"
sensor = dyson.DysonTemperatureSensor(_get_with_state(), TEMP_FAHRENHEIT)
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 71.3
assert sensor.unit_of_measurement == TEMP_FAHRENHEIT
assert sensor.name == "Device_name Temperature"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_temperature_standby_monitoring(self):
"""Test temperature sensor while device is in standby monitoring."""
sensor = dyson.DysonTemperatureSensor(
_get_with_standby_monitoring(), TEMP_CELSIUS
)
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == STATE_OFF
assert sensor.unit_of_measurement == TEMP_CELSIUS
assert sensor.name == "Device_name Temperature"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_air_quality_sensor(self):
"""Test air quality sensor with no value."""
sensor = dyson.DysonAirQualitySensor(_get_device_without_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state is None
assert sensor.unit_of_measurement is None
assert sensor.name == "Device_name AQI"
assert sensor.entity_id == "sensor.dyson_1"
def test_dyson_air_quality_sensor_with_values(self):
"""Test air quality sensor with values."""
sensor = dyson.DysonAirQualitySensor(_get_with_state())
sensor.hass = self.hass
sensor.entity_id = "sensor.dyson_1"
assert not sensor.should_poll
assert sensor.state == 2
assert sensor.unit_of_measurement is None
assert sensor.name == "Device_name AQI"
assert sensor.entity_id == "sensor.dyson_1"
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_component_setup_only_once(devices, login, hass):
"""Test if entities are created only once."""
config = _get_config()
await async_setup_component(hass, dyson_parent.DOMAIN, config)
await hass.async_block_till_done()
discovery.load_platform(hass, "sensor", dyson_parent.DOMAIN, {}, config)
await hass.async_block_till_done()
assert len(hass.data[dyson.DYSON_SENSOR_DEVICES]) == 2
|
import os
import pytest
from jinja2 import Environment
from jinja2 import loaders
from jinja2.utils import have_async_gen
def pytest_ignore_collect(path):
if "async" in path.basename and not have_async_gen:
return True
return False
@pytest.fixture
def env():
"""returns a new environment."""
return Environment()
@pytest.fixture
def dict_loader():
"""returns DictLoader"""
return loaders.DictLoader({"justdict.html": "FOO"})
@pytest.fixture
def package_loader():
"""returns PackageLoader initialized from templates"""
return loaders.PackageLoader("res", "templates")
@pytest.fixture
def filesystem_loader():
"""returns FileSystemLoader initialized to res/templates directory"""
here = os.path.dirname(os.path.abspath(__file__))
return loaders.FileSystemLoader(here + "/res/templates")
@pytest.fixture
def function_loader():
"""returns a FunctionLoader"""
return loaders.FunctionLoader({"justfunction.html": "FOO"}.get)
@pytest.fixture
def choice_loader(dict_loader, package_loader):
"""returns a ChoiceLoader"""
return loaders.ChoiceLoader([dict_loader, package_loader])
@pytest.fixture
def prefix_loader(filesystem_loader, dict_loader):
"""returns a PrefixLoader"""
return loaders.PrefixLoader({"a": filesystem_loader, "b": dict_loader})
|
import copy
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.db import models
from django.utils.functional import LazyObject, empty
from polymorphic.models import PolymorphicModelBase
from shop.conf import app_settings
class DeferredRelatedField:
def __init__(self, to, **kwargs):
try:
self.abstract_model = to._meta.object_name
except AttributeError:
assert isinstance(to, str), "%s(%r) is invalid. First parameter must be either a model or a model name" % (self.__class__.__name__, to)
self.abstract_model = to
self.options = dict(**kwargs)
class OneToOneField(DeferredRelatedField):
"""
Use this class to specify a one-to-one key in abstract classes. It will be converted into a real
``OneToOneField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.OneToOneField
def __init__(self, to, on_delete, **kwargs):
super().__init__(to, on_delete=on_delete, **kwargs)
class ForeignKey(DeferredRelatedField):
"""
Use this class to specify foreign keys in abstract classes. It will be converted into a real
``ForeignKey`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ForeignKey
def __init__(self, to, on_delete, **kwargs):
super().__init__(to, on_delete=on_delete, **kwargs)
class ManyToManyField(DeferredRelatedField):
"""
Use this class to specify many-to-many keys in abstract classes. They will be converted into a
real ``ManyToManyField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ManyToManyField
def __init__(self, to, **kwargs):
super().__init__(to, **kwargs)
through = kwargs.get('through')
if through is None:
self.abstract_through_model = None
else:
try:
self.abstract_through_model = through._meta.object_name
except AttributeError:
assert isinstance(through, str), ('%s(%r) is invalid. '
'Through parameter must be either a model or a model name'
% (self.__class__.__name__, through))
self.abstract_through_model = through
class ForeignKeyBuilder(ModelBase):
"""
In Django we can not point a ``OneToOneField``, ``ForeignKey`` or ``ManyToManyField`` onto
an abstract Model class. In Django-SHOP this limitation is circumvented by creating deferred
foreign keys, which are mapped to their correct model's counterpart during the model
materialization step.
If the main application stores its models in its own directory, add to settings.py:
SHOP_APP_LABEL = 'myshop', so that the models are created inside your own shop instantiation.
"""
_model_allocation = {}
_pending_mappings = []
_materialized_models = {}
def __new__(cls, name, bases, attrs):
class Meta:
app_label = app_settings.APP_LABEL
attrs.setdefault('Meta', Meta)
attrs.setdefault('__module__', getattr(bases[-1], '__module__'))
if not hasattr(attrs['Meta'], 'app_label') and not getattr(attrs['Meta'], 'abstract', False):
attrs['Meta'].app_label = Meta.app_label
Model = super().__new__(cls, name, bases, attrs)
if Model._meta.abstract:
return Model
if any(isinstance(base, cls) for base in bases):
for baseclass in bases:
if not isinstance(baseclass, cls):
continue
assert issubclass(baseclass, models.Model)
basename = baseclass.__name__
if baseclass._meta.abstract:
if basename in cls._model_allocation:
raise ImproperlyConfigured(
"Both Model classes '%s' and '%s' inherited from abstract "
"base class %s, which is disallowed in this configuration."
% (Model.__name__, cls._model_allocation[basename], basename)
)
cls._model_allocation[basename] = Model.__name__
# remember the materialized model mapping in the base class for further usage
baseclass._materialized_model = Model
cls.process_pending_mappings(Model, basename)
else:
# Non abstract model that uses this Metaclass
basename = Model.__name__
cls._model_allocation[basename] = basename
Model._materialized_model = Model
cls.process_pending_mappings(Model, basename)
cls.handle_deferred_foreign_fields(Model)
cls.perform_meta_model_check(Model)
cls._materialized_models[name] = Model
return Model
@classmethod
def handle_deferred_foreign_fields(cls, Model):
"""
Search for deferred foreign fields in our Model and contribute them to the class or
append them to our list of pending mappings
"""
for attrname in dir(Model):
try:
member = getattr(Model, attrname)
except AttributeError:
continue
if not isinstance(member, DeferredRelatedField):
continue
if member.abstract_model == 'self':
mapmodel = Model
else:
mapmodel = cls._model_allocation.get(member.abstract_model)
abstract_through_model = getattr(member, 'abstract_through_model', None)
mapmodel_through = cls._model_allocation.get(abstract_through_model)
if mapmodel and (not abstract_through_model or mapmodel_through):
if mapmodel_through:
member.options['through'] = mapmodel_through
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(Model, attrname)
else:
ForeignKeyBuilder._pending_mappings.append((Model, attrname, member,))
@staticmethod
def process_pending_mappings(Model, basename):
assert basename in ForeignKeyBuilder._model_allocation
assert Model._materialized_model
"""
Check for pending mappings and in case, process, and remove them from the list
"""
for mapping in ForeignKeyBuilder._pending_mappings[:]:
member = mapping[2]
mapmodel = ForeignKeyBuilder._model_allocation.get(member.abstract_model)
abstract_through_model = getattr(member, 'abstract_through_model', None)
mapmodel_through = ForeignKeyBuilder._model_allocation.get(abstract_through_model)
if member.abstract_model == basename or abstract_through_model == basename:
if member.abstract_model == basename and abstract_through_model and not mapmodel_through:
continue
elif abstract_through_model == basename and not mapmodel:
continue
if mapmodel_through:
member.options['through'] = mapmodel_through
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(mapping[0], mapping[1])
ForeignKeyBuilder._pending_mappings.remove(mapping)
def __getattr__(self, key):
if key == '_materialized_model':
msg = "No class implements abstract base model: `{}`."
raise ImproperlyConfigured(msg.format(self.__name__))
return object.__getattribute__(self, key)
@classmethod
def perform_meta_model_check(cls, Model):
"""
Hook for each meta class inheriting from ForeignKeyBuilder, to perform checks on the
implementation of the just created type.
"""
@classmethod
def check_for_pending_mappings(cls):
if cls._pending_mappings:
msg = "Deferred foreign key '{0}.{1}' has not been mapped"
pm = cls._pending_mappings
raise ImproperlyConfigured(msg.format(pm[0][0].__name__, pm[0][1]))
class PolymorphicForeignKeyBuilder(ForeignKeyBuilder, PolymorphicModelBase):
"""
Base class for PolymorphicProductMetaclass
"""
class MaterializedModel(LazyObject):
"""
Wrap the base model into a lazy object, so that we can refer to members of its
materialized model using lazy evaluation.
"""
def __init__(self, base_model):
self.__dict__['_base_model'] = base_model
super().__init__()
def _setup(self):
self._wrapped = getattr(self._base_model, '_materialized_model')
def __call__(self, *args, **kwargs):
# calls the constructor of the materialized model
if self._wrapped is empty:
self._setup()
return self._wrapped(*args, **kwargs)
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use type(self),
# not self.__class__, because the latter is proxied.
return type(self)(self._base_model)
else:
# In Python 2.7 we can't return `copy.copy(self._wrapped)`,
# it fails with `TypeError: can't pickle int objects`.
# In Python 3 it works, because it checks if the copied value
# is a subclass of `type`.
# In this case it just returns the value.
# As we know that self._wrapped is a subclass of `type`,
# we can just return it here.
return self._wrapped
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__,
# because the latter is proxied.
result = type(self)(self._base_model)
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._base_model
else:
repr_attr = self._wrapped
return '<MaterializedModel: {}>'.format(repr_attr)
|
from pytile import async_login
from pytile.errors import TileError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import aiohttp_client
from .const import DOMAIN # pylint: disable=unused-import
class TileFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Tile config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize the config flow."""
self.data_schema = vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
)
async def _show_form(self, errors=None):
"""Show the form to the user."""
return self.async_show_form(
step_id="user", data_schema=self.data_schema, errors=errors or {}
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return await self._show_form()
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
session = aiohttp_client.async_get_clientsession(self.hass)
try:
await async_login(
user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session=session
)
except TileError:
return await self._show_form({"base": "invalid_auth"})
return self.async_create_entry(title=user_input[CONF_USERNAME], data=user_input)
|
import sys
import mne
from mne.io import read_raw_kit
from mne.utils import ETSContext
def run():
"""Run command."""
from mne.commands.utils import get_optparser
parser = get_optparser(__file__)
parser.add_option('--input', dest='input_fname',
help='Input data file name', metavar='filename')
parser.add_option('--mrk', dest='mrk_fname',
help='MEG Marker file name', metavar='filename')
parser.add_option('--elp', dest='elp_fname',
help='Headshape points file name', metavar='filename')
parser.add_option('--hsp', dest='hsp_fname',
help='Headshape file name', metavar='filename')
parser.add_option('--stim', dest='stim',
help='Colon Separated Stimulus Trigger Channels',
metavar='chs')
parser.add_option('--slope', dest='slope', help='Slope direction',
metavar='slope')
parser.add_option('--stimthresh', dest='stimthresh', default=1,
help='Threshold value for trigger channels',
metavar='value')
parser.add_option('--output', dest='out_fname',
help='Name of the resulting fiff file',
metavar='filename')
parser.add_option('--debug', dest='debug', action='store_true',
default=False,
help='Set logging level for terminal output to debug')
options, args = parser.parse_args()
if options.debug:
mne.set_log_level('debug')
input_fname = options.input_fname
if input_fname is None:
with ETSContext():
mne.gui.kit2fiff()
sys.exit(0)
hsp_fname = options.hsp_fname
elp_fname = options.elp_fname
mrk_fname = options.mrk_fname
stim = options.stim
slope = options.slope
stimthresh = options.stimthresh
out_fname = options.out_fname
if isinstance(stim, str):
stim = map(int, stim.split(':'))
raw = read_raw_kit(input_fname=input_fname, mrk=mrk_fname, elp=elp_fname,
hsp=hsp_fname, stim=stim, slope=slope,
stimthresh=stimthresh)
raw.save(out_fname)
raw.close()
mne.utils.run_command_if_main()
|
import asyncio
import logging
from copy import copy
from re import search
from string import Formatter
from typing import Dict, List, Literal
import discord
from redbot.core import Config, commands, checks
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.utils.chat_formatting import box, pagify
from redbot.core.utils.menus import menu, DEFAULT_CONTROLS
from redbot.core.bot import Red
from .alias_entry import AliasEntry, AliasCache, ArgParseError
_ = Translator("Alias", __file__)
log = logging.getLogger("red.cogs.alias")
class _TrackingFormatter(Formatter):
def __init__(self):
super().__init__()
self.max = -1
def get_value(self, key, args, kwargs):
if isinstance(key, int):
self.max = max((key, self.max))
return super().get_value(key, args, kwargs)
@cog_i18n(_)
class Alias(commands.Cog):
"""Create aliases for commands.
Aliases are alternative names shortcuts for commands. They
can act as both a lambda (storing arguments for repeated use)
or as simply a shortcut to saying "x y z".
When run, aliases will accept any additional arguments
and append them to the stored alias.
"""
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
self.config = Config.get_conf(self, 8927348724)
self.config.register_global(entries=[], handled_string_creator=False)
self.config.register_guild(entries=[])
self._aliases: AliasCache = AliasCache(config=self.config, cache_enabled=True)
self._ready_event = asyncio.Event()
async def red_delete_data_for_user(
self,
*,
requester: Literal["discord_deleted_user", "owner", "user", "user_strict"],
user_id: int,
):
if requester != "discord_deleted_user":
return
await self._ready_event.wait()
await self._aliases.anonymize_aliases(user_id)
async def cog_before_invoke(self, ctx):
await self._ready_event.wait()
async def _maybe_handle_string_keys(self):
# This isn't a normal schema migration because it's being added
# after the fact for GH-3788
if await self.config.handled_string_creator():
return
async with self.config.entries() as alias_list:
bad_aliases = []
for a in alias_list:
for keyname in ("creator", "guild"):
if isinstance((val := a.get(keyname)), str):
try:
a[keyname] = int(val)
except ValueError:
# Because migrations weren't created as changes were made,
# and the prior form was a string of an ID,
# if this fails, there's nothing to go back to
bad_aliases.append(a)
break
for a in bad_aliases:
alias_list.remove(a)
# if this was using a custom group of (guild_id, aliasname) it would be better but...
all_guild_aliases = await self.config.all_guilds()
for guild_id, guild_data in all_guild_aliases.items():
to_set = []
modified = False
for a in guild_data.get("entries", []):
for keyname in ("creator", "guild"):
if isinstance((val := a.get(keyname)), str):
try:
a[keyname] = int(val)
except ValueError:
break
finally:
modified = True
else:
to_set.append(a)
if modified:
await self.config.guild_from_id(guild_id).entries.set(to_set)
await asyncio.sleep(0)
# control yielded per loop since this is most likely to happen
# at bot startup, where this is most likely to have a performance
# hit.
await self.config.handled_string_creator.set(True)
def sync_init(self):
t = asyncio.create_task(self._initialize())
def done_callback(fut: asyncio.Future):
try:
t.result()
except Exception as exc:
log.exception("Failed to load alias cog", exc_info=exc)
# Maybe schedule extension unloading with message to owner in future
t.add_done_callback(done_callback)
async def _initialize(self):
""" Should only ever be a task """
await self._maybe_handle_string_keys()
if not self._aliases._loaded:
await self._aliases.load_aliases()
self._ready_event.set()
def is_command(self, alias_name: str) -> bool:
"""
The logic here is that if this returns true, the name should not be used for an alias
The function name can be changed when alias is reworked
"""
command = self.bot.get_command(alias_name)
return command is not None or alias_name in commands.RESERVED_COMMAND_NAMES
@staticmethod
def is_valid_alias_name(alias_name: str) -> bool:
return not bool(search(r"\s", alias_name)) and alias_name.isprintable()
async def get_prefix(self, message: discord.Message) -> str:
"""
Tries to determine what prefix is used in a message object.
Looks to identify from longest prefix to smallest.
Will raise ValueError if no prefix is found.
:param message: Message object
:return:
"""
content = message.content
prefix_list = await self.bot.command_prefix(self.bot, message)
prefixes = sorted(prefix_list, key=lambda pfx: len(pfx), reverse=True)
for p in prefixes:
if content.startswith(p):
return p
raise ValueError("No prefix found.")
async def call_alias(self, message: discord.Message, prefix: str, alias: AliasEntry):
new_message = copy(message)
try:
args = alias.get_extra_args_from_alias(message, prefix)
except commands.BadArgument:
return
trackform = _TrackingFormatter()
command = trackform.format(alias.command, *args)
# noinspection PyDunderSlots
new_message.content = "{}{} {}".format(
prefix, command, " ".join(args[trackform.max + 1 :])
)
await self.bot.process_commands(new_message)
async def paginate_alias_list(
self, ctx: commands.Context, alias_list: List[AliasEntry]
) -> None:
names = sorted(["+ " + a.name for a in alias_list])
message = "\n".join(names)
temp = list(pagify(message, delims=["\n"], page_length=1850))
alias_list = []
count = 0
for page in temp:
count += 1
page = page.lstrip("\n")
page = (
_("Aliases:\n")
+ page
+ _("\n\nPage {page}/{total}").format(page=count, total=len(temp))
)
alias_list.append(box("".join(page), "diff"))
if len(alias_list) == 1:
await ctx.send(alias_list[0])
return
await menu(ctx, alias_list, DEFAULT_CONTROLS)
@commands.group()
async def alias(self, ctx: commands.Context):
"""Manage command aliases."""
pass
@alias.group(name="global")
async def global_(self, ctx: commands.Context):
"""Manage global aliases."""
pass
@checks.mod_or_permissions(manage_guild=True)
@alias.command(name="add")
@commands.guild_only()
async def _add_alias(self, ctx: commands.Context, alias_name: str, *, command):
"""Add an alias for a command."""
# region Alias Add Validity Checking
is_command = self.is_command(alias_name)
if is_command:
await ctx.send(
_(
"You attempted to create a new alias"
" with the name {name} but that"
" name is already a command on this bot."
).format(name=alias_name)
)
return
alias = await self._aliases.get_alias(ctx.guild, alias_name)
if alias:
await ctx.send(
_(
"You attempted to create a new alias"
" with the name {name} but that"
" alias already exists."
).format(name=alias_name)
)
return
is_valid_name = self.is_valid_alias_name(alias_name)
if not is_valid_name:
await ctx.send(
_(
"You attempted to create a new alias"
" with the name {name} but that"
" name is an invalid alias name. Alias"
" names may not contain spaces."
).format(name=alias_name)
)
return
given_command_exists = self.bot.get_command(command.split(maxsplit=1)[0]) is not None
if not given_command_exists:
await ctx.send(
_("You attempted to create a new alias for a command that doesn't exist.")
)
return
# endregion
# At this point we know we need to make a new alias
# and that the alias name is valid.
try:
await self._aliases.add_alias(ctx, alias_name, command)
except ArgParseError as e:
return await ctx.send(" ".join(e.args))
await ctx.send(
_("A new alias with the trigger `{name}` has been created.").format(name=alias_name)
)
@checks.is_owner()
@global_.command(name="add")
async def _add_global_alias(self, ctx: commands.Context, alias_name: str, *, command):
"""Add a global alias for a command."""
# region Alias Add Validity Checking
is_command = self.is_command(alias_name)
if is_command:
await ctx.send(
_(
"You attempted to create a new global alias"
" with the name {name} but that"
" name is already a command on this bot."
).format(name=alias_name)
)
return
alias = await self._aliases.get_alias(None, alias_name)
if alias:
await ctx.send(
_(
"You attempted to create a new global alias"
" with the name {name} but that"
" alias already exists."
).format(name=alias_name)
)
return
is_valid_name = self.is_valid_alias_name(alias_name)
if not is_valid_name:
await ctx.send(
_(
"You attempted to create a new global alias"
" with the name {name} but that"
" name is an invalid alias name. Alias"
" names may not contain spaces."
).format(name=alias_name)
)
return
given_command_exists = self.bot.get_command(command.split(maxsplit=1)[0]) is not None
if not given_command_exists:
await ctx.send(
_("You attempted to create a new alias for a command that doesn't exist.")
)
return
# endregion
try:
await self._aliases.add_alias(ctx, alias_name, command, global_=True)
except ArgParseError as e:
return await ctx.send(" ".join(e.args))
await ctx.send(
_("A new global alias with the trigger `{name}` has been created.").format(
name=alias_name
)
)
@alias.command(name="help")
async def _help_alias(self, ctx: commands.Context, alias_name: str):
"""Try to execute help for the base command of the alias."""
alias = await self._aliases.get_alias(ctx.guild, alias_name=alias_name)
if alias:
await self.bot.send_help_for(ctx, alias.command)
else:
await ctx.send(_("No such alias exists."))
@alias.command(name="show")
async def _show_alias(self, ctx: commands.Context, alias_name: str):
"""Show what command the alias executes."""
alias = await self._aliases.get_alias(ctx.guild, alias_name)
if alias:
await ctx.send(
_("The `{alias_name}` alias will execute the command `{command}`").format(
alias_name=alias_name, command=alias.command
)
)
else:
await ctx.send(_("There is no alias with the name `{name}`").format(name=alias_name))
@checks.mod_or_permissions(manage_guild=True)
@alias.command(name="delete", aliases=["del", "remove"])
@commands.guild_only()
async def _del_alias(self, ctx: commands.Context, alias_name: str):
"""Delete an existing alias on this server."""
if not await self._aliases.get_guild_aliases(ctx.guild):
await ctx.send(_("There are no aliases on this server."))
return
if await self._aliases.delete_alias(ctx, alias_name):
await ctx.send(
_("Alias with the name `{name}` was successfully deleted.").format(name=alias_name)
)
else:
await ctx.send(_("Alias with name `{name}` was not found.").format(name=alias_name))
@checks.is_owner()
@global_.command(name="delete", aliases=["del", "remove"])
async def _del_global_alias(self, ctx: commands.Context, alias_name: str):
"""Delete an existing global alias."""
if not await self._aliases.get_global_aliases():
await ctx.send(_("There are no global aliases on this bot."))
return
if await self._aliases.delete_alias(ctx, alias_name, global_=True):
await ctx.send(
_("Alias with the name `{name}` was successfully deleted.").format(name=alias_name)
)
else:
await ctx.send(_("Alias with name `{name}` was not found.").format(name=alias_name))
@alias.command(name="list")
@commands.guild_only()
@checks.bot_has_permissions(add_reactions=True)
async def _list_alias(self, ctx: commands.Context):
"""List the available aliases on this server."""
guild_aliases = await self._aliases.get_guild_aliases(ctx.guild)
if not guild_aliases:
return await ctx.send(_("There are no aliases on this server."))
await self.paginate_alias_list(ctx, guild_aliases)
@global_.command(name="list")
@checks.bot_has_permissions(add_reactions=True)
async def _list_global_alias(self, ctx: commands.Context):
"""List the available global aliases on this bot."""
global_aliases = await self._aliases.get_global_aliases()
if not global_aliases:
return await ctx.send(_("There are no global aliases."))
await self.paginate_alias_list(ctx, global_aliases)
@commands.Cog.listener()
async def on_message_without_command(self, message: discord.Message):
await self._ready_event.wait()
if message.guild is not None:
if await self.bot.cog_disabled_in_guild(self, message.guild):
return
try:
prefix = await self.get_prefix(message)
except ValueError:
return
try:
potential_alias = message.content[len(prefix) :].split(" ")[0]
except IndexError:
return
alias = await self._aliases.get_alias(message.guild, potential_alias)
if alias:
await self.call_alias(message, prefix, alias)
|
from jinja2 import nodes
from jinja2.idtracking import symbols_for_node
def test_basics():
for_loop = nodes.For(
nodes.Name("foo", "store"),
nodes.Name("seq", "load"),
[nodes.Output([nodes.Name("foo", "load")])],
[],
None,
False,
)
tmpl = nodes.Template(
[nodes.Assign(nodes.Name("foo", "store"), nodes.Name("bar", "load")), for_loop]
)
sym = symbols_for_node(tmpl)
assert sym.refs == {
"foo": "l_0_foo",
"bar": "l_0_bar",
"seq": "l_0_seq",
}
assert sym.loads == {
"l_0_foo": ("undefined", None),
"l_0_bar": ("resolve", "bar"),
"l_0_seq": ("resolve", "seq"),
}
sym = symbols_for_node(for_loop, sym)
assert sym.refs == {
"foo": "l_1_foo",
}
assert sym.loads == {
"l_1_foo": ("param", None),
}
def test_complex():
title_block = nodes.Block(
"title", [nodes.Output([nodes.TemplateData("Page Title")])], False
)
render_title_macro = nodes.Macro(
"render_title",
[nodes.Name("title", "param")],
[],
[
nodes.Output(
[
nodes.TemplateData('\n <div class="title">\n <h1>'),
nodes.Name("title", "load"),
nodes.TemplateData("</h1>\n <p>"),
nodes.Name("subtitle", "load"),
nodes.TemplateData("</p>\n "),
]
),
nodes.Assign(
nodes.Name("subtitle", "store"), nodes.Const("something else")
),
nodes.Output(
[
nodes.TemplateData("\n <p>"),
nodes.Name("subtitle", "load"),
nodes.TemplateData("</p>\n </div>\n"),
nodes.If(
nodes.Name("something", "load"),
[
nodes.Assign(
nodes.Name("title_upper", "store"),
nodes.Filter(
nodes.Name("title", "load"),
"upper",
[],
[],
None,
None,
),
),
nodes.Output(
[
nodes.Name("title_upper", "load"),
nodes.Call(
nodes.Name("render_title", "load"),
[nodes.Const("Aha")],
[],
None,
None,
),
]
),
],
[],
[],
),
]
),
],
)
for_loop = nodes.For(
nodes.Name("item", "store"),
nodes.Name("seq", "load"),
[
nodes.Output(
[
nodes.TemplateData("\n <li>"),
nodes.Name("item", "load"),
nodes.TemplateData("</li>\n <span>"),
]
),
nodes.Include(nodes.Const("helper.html"), True, False),
nodes.Output([nodes.TemplateData("</span>\n ")]),
],
[],
None,
False,
)
body_block = nodes.Block(
"body",
[
nodes.Output(
[
nodes.TemplateData("\n "),
nodes.Call(
nodes.Name("render_title", "load"),
[nodes.Name("item", "load")],
[],
None,
None,
),
nodes.TemplateData("\n <ul>\n "),
]
),
for_loop,
nodes.Output([nodes.TemplateData("\n </ul>\n")]),
],
False,
)
tmpl = nodes.Template(
[
nodes.Extends(nodes.Const("layout.html")),
title_block,
render_title_macro,
body_block,
]
)
tmpl_sym = symbols_for_node(tmpl)
assert tmpl_sym.refs == {
"render_title": "l_0_render_title",
}
assert tmpl_sym.loads == {
"l_0_render_title": ("undefined", None),
}
assert tmpl_sym.stores == {"render_title"}
assert tmpl_sym.dump_stores() == {
"render_title": "l_0_render_title",
}
macro_sym = symbols_for_node(render_title_macro, tmpl_sym)
assert macro_sym.refs == {
"subtitle": "l_1_subtitle",
"something": "l_1_something",
"title": "l_1_title",
"title_upper": "l_1_title_upper",
}
assert macro_sym.loads == {
"l_1_subtitle": ("resolve", "subtitle"),
"l_1_something": ("resolve", "something"),
"l_1_title": ("param", None),
"l_1_title_upper": ("resolve", "title_upper"),
}
assert macro_sym.stores == {"title", "title_upper", "subtitle"}
assert macro_sym.find_ref("render_title") == "l_0_render_title"
assert macro_sym.dump_stores() == {
"title": "l_1_title",
"title_upper": "l_1_title_upper",
"subtitle": "l_1_subtitle",
"render_title": "l_0_render_title",
}
body_sym = symbols_for_node(body_block)
assert body_sym.refs == {
"item": "l_0_item",
"seq": "l_0_seq",
"render_title": "l_0_render_title",
}
assert body_sym.loads == {
"l_0_item": ("resolve", "item"),
"l_0_seq": ("resolve", "seq"),
"l_0_render_title": ("resolve", "render_title"),
}
assert body_sym.stores == set()
for_sym = symbols_for_node(for_loop, body_sym)
assert for_sym.refs == {
"item": "l_1_item",
}
assert for_sym.loads == {
"l_1_item": ("param", None),
}
assert for_sym.stores == {"item"}
assert for_sym.dump_stores() == {
"item": "l_1_item",
}
def test_if_branching_stores():
tmpl = nodes.Template(
[
nodes.If(
nodes.Name("expression", "load"),
[nodes.Assign(nodes.Name("variable", "store"), nodes.Const(42))],
[],
[],
)
]
)
sym = symbols_for_node(tmpl)
assert sym.refs == {"variable": "l_0_variable", "expression": "l_0_expression"}
assert sym.stores == {"variable"}
assert sym.loads == {
"l_0_variable": ("resolve", "variable"),
"l_0_expression": ("resolve", "expression"),
}
assert sym.dump_stores() == {
"variable": "l_0_variable",
}
def test_if_branching_stores_undefined():
tmpl = nodes.Template(
[
nodes.Assign(nodes.Name("variable", "store"), nodes.Const(23)),
nodes.If(
nodes.Name("expression", "load"),
[nodes.Assign(nodes.Name("variable", "store"), nodes.Const(42))],
[],
[],
),
]
)
sym = symbols_for_node(tmpl)
assert sym.refs == {"variable": "l_0_variable", "expression": "l_0_expression"}
assert sym.stores == {"variable"}
assert sym.loads == {
"l_0_variable": ("undefined", None),
"l_0_expression": ("resolve", "expression"),
}
assert sym.dump_stores() == {
"variable": "l_0_variable",
}
def test_if_branching_multi_scope():
for_loop = nodes.For(
nodes.Name("item", "store"),
nodes.Name("seq", "load"),
[
nodes.If(
nodes.Name("expression", "load"),
[nodes.Assign(nodes.Name("x", "store"), nodes.Const(42))],
[],
[],
),
nodes.Include(nodes.Const("helper.html"), True, False),
],
[],
None,
False,
)
tmpl = nodes.Template(
[nodes.Assign(nodes.Name("x", "store"), nodes.Const(23)), for_loop]
)
tmpl_sym = symbols_for_node(tmpl)
for_sym = symbols_for_node(for_loop, tmpl_sym)
assert for_sym.stores == {"item", "x"}
assert for_sym.loads == {
"l_1_x": ("alias", "l_0_x"),
"l_1_item": ("param", None),
"l_1_expression": ("resolve", "expression"),
}
|
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
try:
import pygal
except ImportError:
pygal = None
_site = None
class Plugin(RestExtension):
"""Plugin for chart role."""
name = "rest_chart"
def set_site(self, site):
"""Set Nikola site."""
global _site
_site = self.site = site
directives.register_directive('chart', Chart)
return super().set_site(site)
class Chart(Directive):
"""reStructuredText extension for inserting charts as SVG.
Usage:
.. chart:: Bar
:title: 'Browser usage evolution (in %)'
:x_labels: ["2002", "2003", "2004", "2005", "2006", "2007"]
'Firefox', [None, None, 0, 16.6, 25, 31]
'Chrome', [None, None, None, None, None, None]
'IE', [85.8, 84.6, 84.7, 74.5, 66, 58.6]
'Others', [14.2, 15.4, 15.3, 8.9, 9, 10.4]
"""
has_content = True
required_arguments = 1
option_spec = {
"box_mode": directives.unchanged,
"classes": directives.unchanged,
"css": directives.unchanged,
"defs": directives.unchanged,
"data_file": directives.unchanged,
"disable_xml_declaration": directives.unchanged,
"dots_size": directives.unchanged,
"dynamic_print_values": directives.unchanged,
"explicit_size": directives.unchanged,
"fill": directives.unchanged,
"force_uri_protocol": directives.unchanged,
"half_pie": directives.unchanged,
"height": directives.unchanged,
"human_readable": directives.unchanged,
"include_x_axis": directives.unchanged,
"inner_radius": directives.unchanged,
"interpolate": directives.unchanged,
"interpolation_parameters": directives.unchanged,
"interpolation_precision": directives.unchanged,
"inverse_y_axis": directives.unchanged,
"js": directives.unchanged,
"legend_at_bottom": directives.unchanged,
"legend_at_bottom_columns": directives.unchanged,
"legend_box_size": directives.unchanged,
"logarithmic": directives.unchanged,
"margin": directives.unchanged,
"margin_bottom": directives.unchanged,
"margin_left": directives.unchanged,
"margin_right": directives.unchanged,
"margin_top": directives.unchanged,
"max_scale": directives.unchanged,
"min_scale": directives.unchanged,
"missing_value_fill_truncation": directives.unchanged,
"no_data_text": directives.unchanged,
"no_prefix": directives.unchanged,
"order_min": directives.unchanged,
"pretty_print": directives.unchanged,
"print_labels": directives.unchanged,
"print_values": directives.unchanged,
"print_values_position": directives.unchanged,
"print_zeroes": directives.unchanged,
"range": directives.unchanged,
"rounded_bars": directives.unchanged,
"secondary_range": directives.unchanged,
"show_dots": directives.unchanged,
"show_legend": directives.unchanged,
"show_minor_x_labels": directives.unchanged,
"show_minor_y_labels": directives.unchanged,
"show_only_major_dots": directives.unchanged,
"show_x_guides": directives.unchanged,
"show_x_labels": directives.unchanged,
"show_y_guides": directives.unchanged,
"show_y_labels": directives.unchanged,
"spacing": directives.unchanged,
"stack_from_top": directives.unchanged,
"strict": directives.unchanged,
"stroke": directives.unchanged,
"stroke_style": directives.unchanged,
"style": directives.unchanged,
"title": directives.unchanged,
"tooltip_border_radius": directives.unchanged,
"truncate_label": directives.unchanged,
"truncate_legend": directives.unchanged,
"value_formatter": directives.unchanged,
"width": directives.unchanged,
"x_label_rotation": directives.unchanged,
"x_labels": directives.unchanged,
"x_labels_major": directives.unchanged,
"x_labels_major_count": directives.unchanged,
"x_labels_major_every": directives.unchanged,
"x_title": directives.unchanged,
"x_value_formatter": directives.unchanged,
"xrange": directives.unchanged,
"y_label_rotation": directives.unchanged,
"y_labels": directives.unchanged,
"y_labels_major": directives.unchanged,
"y_labels_major_count": directives.unchanged,
"y_labels_major_every": directives.unchanged,
"y_title": directives.unchanged,
"zero": directives.unchanged,
}
def run(self):
"""Run the directive."""
self.options['site'] = None
html = _site.plugin_manager.getPluginByName(
'chart', 'ShortcodePlugin').plugin_object.handler(
self.arguments[0],
data='\n'.join(self.content),
**self.options)
return [nodes.raw('', html, format='html')]
|
import os
import shutil
import tempfile
from configparser import RawConfigParser
import pytest
from radicale import config
from radicale.tests.helpers import configuration_to_dict
class TestConfig:
"""Test the configuration."""
def setup(self):
self.colpath = tempfile.mkdtemp()
def teardown(self):
shutil.rmtree(self.colpath)
def _write_config(self, config_dict, name):
parser = RawConfigParser()
parser.read_dict(config_dict)
config_path = os.path.join(self.colpath, name)
with open(config_path, "w") as f:
parser.write(f)
return config_path
def test_parse_compound_paths(self):
assert len(config.parse_compound_paths()) == 0
assert len(config.parse_compound_paths("")) == 0
assert len(config.parse_compound_paths(None, "")) == 0
assert len(config.parse_compound_paths("config", "")) == 0
assert len(config.parse_compound_paths("config", None)) == 1
assert len(config.parse_compound_paths(os.pathsep.join(["", ""]))) == 0
assert len(config.parse_compound_paths(os.pathsep.join([
"", "config", ""]))) == 1
paths = config.parse_compound_paths(os.pathsep.join([
"config1", "?config2", "config3"]))
assert len(paths) == 3
for i, (name, ignore_if_missing) in enumerate([
("config1", False), ("config2", True), ("config3", False)]):
assert os.path.isabs(paths[i][0])
assert os.path.basename(paths[i][0]) == name
assert paths[i][1] is ignore_if_missing
def test_load_empty(self):
config_path = self._write_config({}, "config")
config.load([(config_path, False)])
def test_load_full(self):
config_path = self._write_config(
configuration_to_dict(config.load()), "config")
config.load([(config_path, False)])
def test_load_missing(self):
config_path = os.path.join(self.colpath, "does_not_exist")
config.load([(config_path, True)])
with pytest.raises(Exception) as exc_info:
config.load([(config_path, False)])
e = exc_info.value
assert "Failed to load config file %r" % config_path in str(e)
def test_load_multiple(self):
config_path1 = self._write_config({
"server": {"hosts": "192.0.2.1:1111"}}, "config1")
config_path2 = self._write_config({
"server": {"max_connections": 1111}}, "config2")
configuration = config.load([(config_path1, False),
(config_path2, False)])
assert len(configuration.get("server", "hosts")) == 1
assert configuration.get("server", "hosts")[0] == ("192.0.2.1", 1111)
assert configuration.get("server", "max_connections") == 1111
def test_copy(self):
configuration1 = config.load()
configuration1.update({"server": {"max_connections": "1111"}}, "test")
configuration2 = configuration1.copy()
configuration2.update({"server": {"max_connections": "1112"}}, "test")
assert configuration1.get("server", "max_connections") == 1111
assert configuration2.get("server", "max_connections") == 1112
def test_invalid_section(self):
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.update({"does_not_exist": {"x": "x"}}, "test")
e = exc_info.value
assert "Invalid section 'does_not_exist'" in str(e)
def test_invalid_option(self):
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.update({"server": {"x": "x"}}, "test")
e = exc_info.value
assert "Invalid option 'x'" in str(e)
assert "section 'server'" in str(e)
def test_invalid_option_plugin(self):
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.update({"auth": {"x": "x"}}, "test")
e = exc_info.value
assert "Invalid option 'x'" in str(e)
assert "section 'auth'" in str(e)
def test_invalid_value(self):
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.update({"server": {"max_connections": "x"}}, "test")
e = exc_info.value
assert "Invalid positive_int" in str(e)
assert "option 'max_connections" in str(e)
assert "section 'server" in str(e)
assert "'x'" in str(e)
def test_privileged(self):
configuration = config.load()
configuration.update({"server": {"_internal_server": "True"}},
"test", privileged=True)
with pytest.raises(Exception) as exc_info:
configuration.update(
{"server": {"_internal_server": "True"}}, "test")
e = exc_info.value
assert "Invalid option '_internal_server'" in str(e)
def test_plugin_schema(self):
plugin_schema = {"auth": {"new_option": {"value": "False",
"type": bool}}}
configuration = config.load()
configuration.update({"auth": {"type": "new_plugin"}}, "test")
plugin_configuration = configuration.copy(plugin_schema)
assert plugin_configuration.get("auth", "new_option") is False
configuration.update({"auth": {"new_option": "True"}}, "test")
plugin_configuration = configuration.copy(plugin_schema)
assert plugin_configuration.get("auth", "new_option") is True
def test_plugin_schema_duplicate_option(self):
plugin_schema = {"auth": {"type": {"value": "False",
"type": bool}}}
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.copy(plugin_schema)
e = exc_info.value
assert "option already exists in 'auth': 'type'" in str(e)
def test_plugin_schema_invalid(self):
plugin_schema = {"server": {"new_option": {"value": "False",
"type": bool}}}
configuration = config.load()
with pytest.raises(Exception) as exc_info:
configuration.copy(plugin_schema)
e = exc_info.value
assert "not a plugin section: 'server" in str(e)
def test_plugin_schema_option_invalid(self):
plugin_schema = {"auth": {}}
configuration = config.load()
configuration.update({"auth": {"type": "new_plugin",
"new_option": False}}, "test")
with pytest.raises(Exception) as exc_info:
configuration.copy(plugin_schema)
e = exc_info.value
assert "Invalid option 'new_option'" in str(e)
assert "section 'auth'" in str(e)
|
import logging
import voluptuous as vol
from homeassistant.components.lock import DOMAIN, LockEntity
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, const
_LOGGER = logging.getLogger(__name__)
ATTR_NOTIFICATION = "notification"
ATTR_LOCK_STATUS = "lock_status"
ATTR_CODE_SLOT = "code_slot"
ATTR_USERCODE = "usercode"
CONFIG_ADVANCED = "Advanced"
SERVICE_SET_USERCODE = "set_usercode"
SERVICE_GET_USERCODE = "get_usercode"
SERVICE_CLEAR_USERCODE = "clear_usercode"
POLYCONTROL = 0x10E
DANALOCK_V2_BTZE = 0x2
POLYCONTROL_DANALOCK_V2_BTZE_LOCK = (POLYCONTROL, DANALOCK_V2_BTZE)
WORKAROUND_V2BTZE = 1
WORKAROUND_DEVICE_STATE = 2
WORKAROUND_TRACK_MESSAGE = 4
WORKAROUND_ALARM_TYPE = 8
DEVICE_MAPPINGS = {
POLYCONTROL_DANALOCK_V2_BTZE_LOCK: WORKAROUND_V2BTZE,
# Kwikset 914TRL ZW500 99100-078
(0x0090, 0x440): WORKAROUND_DEVICE_STATE,
(0x0090, 0x446): WORKAROUND_DEVICE_STATE,
(0x0090, 0x238): WORKAROUND_DEVICE_STATE,
# Kwikset 888ZW500-15S Smartcode 888
(0x0090, 0x541): WORKAROUND_DEVICE_STATE,
# Kwikset 916
(0x0090, 0x0001): WORKAROUND_DEVICE_STATE,
# Kwikset Obsidian
(0x0090, 0x0742): WORKAROUND_DEVICE_STATE,
# Yale Locks
# Yale YRD210, YRD220, YRL220
(0x0129, 0x0000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD210, YRD220
(0x0129, 0x0209): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL210, YRL220
(0x0129, 0x0409): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD256
(0x0129, 0x0600): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD110, YRD120
(0x0129, 0x0800): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD446
(0x0129, 0x1000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL220
(0x0129, 0x2132): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
(0x0129, 0x3CAC): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD210, YRD220
(0x0129, 0xAA00): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD220
(0x0129, 0xFFFF): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL256
(0x0129, 0x0F00): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD220 (Older Yale products with incorrect vendor ID)
(0x0109, 0x0000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Schlage BE469
(0x003B, 0x5044): WORKAROUND_DEVICE_STATE | WORKAROUND_TRACK_MESSAGE,
# Schlage FE599NX
(0x003B, 0x504C): WORKAROUND_DEVICE_STATE,
}
LOCK_NOTIFICATION = {
"1": "Manual Lock",
"2": "Manual Unlock",
"5": "Keypad Lock",
"6": "Keypad Unlock",
"11": "Lock Jammed",
"254": "Unknown Event",
}
NOTIFICATION_RF_LOCK = "3"
NOTIFICATION_RF_UNLOCK = "4"
LOCK_NOTIFICATION[NOTIFICATION_RF_LOCK] = "RF Lock"
LOCK_NOTIFICATION[NOTIFICATION_RF_UNLOCK] = "RF Unlock"
LOCK_ALARM_TYPE = {
"9": "Deadbolt Jammed",
"16": "Unlocked by Bluetooth ",
"18": "Locked with Keypad by user ",
"19": "Unlocked with Keypad by user ",
"21": "Manually Locked ",
"22": "Manually Unlocked ",
"27": "Auto re-lock",
"33": "User deleted: ",
"112": "Master code changed or User added: ",
"113": "Duplicate Pin-code: ",
"130": "RF module, power restored",
"144": "Unlocked by NFC Tag or Card by user ",
"161": "Tamper Alarm: ",
"167": "Low Battery",
"168": "Critical Battery Level",
"169": "Battery too low to operate",
}
ALARM_RF_LOCK = "24"
ALARM_RF_UNLOCK = "25"
LOCK_ALARM_TYPE[ALARM_RF_LOCK] = "Locked by RF"
LOCK_ALARM_TYPE[ALARM_RF_UNLOCK] = "Unlocked by RF"
MANUAL_LOCK_ALARM_LEVEL = {
"1": "by Key Cylinder or Inside thumb turn",
"2": "by Touch function (lock and leave)",
}
TAMPER_ALARM_LEVEL = {"1": "Too many keypresses", "2": "Cover removed"}
LOCK_STATUS = {
"1": True,
"2": False,
"3": True,
"4": False,
"5": True,
"6": False,
"9": False,
"18": True,
"19": False,
"21": True,
"22": False,
"24": True,
"25": False,
"27": True,
}
ALARM_TYPE_STD = ["18", "19", "33", "112", "113", "144"]
SET_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
vol.Required(ATTR_USERCODE): cv.string,
}
)
GET_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
}
)
CLEAR_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
}
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Lock from Config Entry."""
@callback
def async_add_lock(lock):
"""Add Z-Wave Lock."""
async_add_entities([lock])
async_dispatcher_connect(hass, "zwave_new_lock", async_add_lock)
network = hass.data[const.DATA_NETWORK]
def set_usercode(service):
"""Set the usercode to index X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
usercode = service.data.get(ATTR_USERCODE)
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
if len(str(usercode)) < 4:
_LOGGER.error(
"Invalid code provided: (%s) "
"usercode must be at least 4 and at most"
" %s digits",
usercode,
len(value.data),
)
break
value.data = str(usercode)
break
def get_usercode(service):
"""Get a usercode at index X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
_LOGGER.info("Usercode at slot %s is: %s", value.index, value.data)
break
def clear_usercode(service):
"""Set usercode to slot X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
data = ""
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
for i in range(len(value.data)):
data += "\0"
i += 1
_LOGGER.debug("Data to clear lock: %s", data)
value.data = data
_LOGGER.info("Usercode at slot %s is cleared", value.index)
break
hass.services.async_register(
DOMAIN, SERVICE_SET_USERCODE, set_usercode, schema=SET_USERCODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_GET_USERCODE, get_usercode, schema=GET_USERCODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CLEAR_USERCODE, clear_usercode, schema=CLEAR_USERCODE_SCHEMA
)
def get_device(node, values, **kwargs):
"""Create Z-Wave entity device."""
return ZwaveLock(values)
class ZwaveLock(ZWaveDeviceEntity, LockEntity):
"""Representation of a Z-Wave Lock."""
def __init__(self, values):
"""Initialize the Z-Wave lock device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._state = None
self._notification = None
self._lock_status = None
self._v2btze = None
self._state_workaround = False
self._track_message_workaround = False
self._previous_message = None
self._alarm_type_workaround = False
# Enable appropriate workaround flags for our device
# Make sure that we have values for the key before converting to int
if self.node.manufacturer_id.strip() and self.node.product_id.strip():
specific_sensor_key = (
int(self.node.manufacturer_id, 16),
int(self.node.product_id, 16),
)
if specific_sensor_key in DEVICE_MAPPINGS:
workaround = DEVICE_MAPPINGS[specific_sensor_key]
if workaround & WORKAROUND_V2BTZE:
self._v2btze = 1
_LOGGER.debug("Polycontrol Danalock v2 BTZE workaround enabled")
if workaround & WORKAROUND_DEVICE_STATE:
self._state_workaround = True
_LOGGER.debug("Notification device state workaround enabled")
if workaround & WORKAROUND_TRACK_MESSAGE:
self._track_message_workaround = True
_LOGGER.debug("Message tracking workaround enabled")
if workaround & WORKAROUND_ALARM_TYPE:
self._alarm_type_workaround = True
_LOGGER.debug("Alarm Type device state workaround enabled")
self.update_properties()
def update_properties(self):
"""Handle data changes for node values."""
self._state = self.values.primary.data
_LOGGER.debug("lock state set to %s", self._state)
if self.values.access_control:
notification_data = self.values.access_control.data
self._notification = LOCK_NOTIFICATION.get(str(notification_data))
if self._state_workaround:
self._state = LOCK_STATUS.get(str(notification_data))
_LOGGER.debug("workaround: lock state set to %s", self._state)
if self._v2btze:
if (
self.values.v2btze_advanced
and self.values.v2btze_advanced.data == CONFIG_ADVANCED
):
self._state = LOCK_STATUS.get(str(notification_data))
_LOGGER.debug(
"Lock state set from Access Control value and is %s, get=%s",
str(notification_data),
self.state,
)
if self._track_message_workaround:
this_message = self.node.stats["lastReceivedMessage"][5]
if this_message == const.COMMAND_CLASS_DOOR_LOCK:
self._state = self.values.primary.data
_LOGGER.debug("set state to %s based on message tracking", self._state)
if self._previous_message == const.COMMAND_CLASS_DOOR_LOCK:
if self._state:
self._notification = LOCK_NOTIFICATION[NOTIFICATION_RF_LOCK]
self._lock_status = LOCK_ALARM_TYPE[ALARM_RF_LOCK]
else:
self._notification = LOCK_NOTIFICATION[NOTIFICATION_RF_UNLOCK]
self._lock_status = LOCK_ALARM_TYPE[ALARM_RF_UNLOCK]
return
self._previous_message = this_message
if not self.values.alarm_type:
return
alarm_type = self.values.alarm_type.data
if self.values.alarm_level:
alarm_level = self.values.alarm_level.data
else:
alarm_level = None
if not alarm_type:
return
if self._alarm_type_workaround:
self._state = LOCK_STATUS.get(str(alarm_type))
_LOGGER.debug(
"workaround: lock state set to %s -- alarm type: %s",
self._state,
str(alarm_type),
)
if alarm_type == 21:
self._lock_status = (
f"{LOCK_ALARM_TYPE.get(str(alarm_type))}"
f"{MANUAL_LOCK_ALARM_LEVEL.get(str(alarm_level))}"
)
return
if str(alarm_type) in ALARM_TYPE_STD:
self._lock_status = f"{LOCK_ALARM_TYPE.get(str(alarm_type))}{alarm_level}"
return
if alarm_type == 161:
self._lock_status = (
f"{LOCK_ALARM_TYPE.get(str(alarm_type))}"
f"{TAMPER_ALARM_LEVEL.get(str(alarm_level))}"
)
return
if alarm_type != 0:
self._lock_status = LOCK_ALARM_TYPE.get(str(alarm_type))
return
@property
def is_locked(self):
"""Return true if device is locked."""
return self._state
def lock(self, **kwargs):
"""Lock the device."""
self.values.primary.data = True
def unlock(self, **kwargs):
"""Unlock the device."""
self.values.primary.data = False
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
data = super().device_state_attributes
if self._notification:
data[ATTR_NOTIFICATION] = self._notification
if self._lock_status:
data[ATTR_LOCK_STATUS] = self._lock_status
return data
|
from datetime import timedelta
from functools import partial
import logging
import random
import aiohue
import async_timeout
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
EFFECT_COLORLOOP,
EFFECT_RANDOM,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.core import callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from homeassistant.util import color
from .const import DOMAIN as HUE_DOMAIN, REQUEST_REFRESH_DELAY
from .helpers import remove_devices
SCAN_INTERVAL = timedelta(seconds=5)
_LOGGER = logging.getLogger(__name__)
SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION
SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS
SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP
SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR
SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR
SUPPORT_HUE = {
"Extended color light": SUPPORT_HUE_EXTENDED,
"Color light": SUPPORT_HUE_COLOR,
"Dimmable light": SUPPORT_HUE_DIMMABLE,
"On/Off plug-in unit": SUPPORT_HUE_ON_OFF,
"Color temperature light": SUPPORT_HUE_COLOR_TEMP,
}
ATTR_IS_HUE_GROUP = "is_hue_group"
GAMUT_TYPE_UNAVAILABLE = "None"
# Minimum Hue Bridge API version to support groups
# 1.4.0 introduced extended group info
# 1.12 introduced the state object for groups
# 1.13 introduced "any_on" to group state objects
GROUP_MIN_API_VERSION = (1, 13, 0)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up Hue lights.
Can only be called when a user accidentally mentions hue platform in their
config. But even in that case it would have been ignored.
"""
def create_light(item_class, coordinator, bridge, is_group, api, item_id):
"""Create the light."""
if is_group:
supported_features = 0
for light_id in api[item_id].lights:
if light_id not in bridge.api.lights:
continue
light = bridge.api.lights[light_id]
supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED)
supported_features = supported_features or SUPPORT_HUE_EXTENDED
else:
supported_features = SUPPORT_HUE.get(api[item_id].type, SUPPORT_HUE_EXTENDED)
return item_class(coordinator, bridge, is_group, api[item_id], supported_features)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Hue lights from a config entry."""
bridge = hass.data[HUE_DOMAIN][config_entry.entry_id]
light_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="light",
update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update),
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=Debouncer(
bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
)
# First do a refresh to see if we can reach the hub.
# Otherwise we will declare not ready.
await light_coordinator.async_refresh()
if not light_coordinator.last_update_success:
raise PlatformNotReady
update_lights = partial(
async_update_items,
bridge,
bridge.api.lights,
{},
async_add_entities,
partial(create_light, HueLight, light_coordinator, bridge, False),
)
# We add a listener after fetching the data, so manually trigger listener
bridge.reset_jobs.append(light_coordinator.async_add_listener(update_lights))
update_lights()
api_version = tuple(int(v) for v in bridge.api.config.apiversion.split("."))
allow_groups = bridge.allow_groups
if allow_groups and api_version < GROUP_MIN_API_VERSION:
_LOGGER.warning("Please update your Hue bridge to support groups")
allow_groups = False
if not allow_groups:
return
group_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="group",
update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update),
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=Debouncer(
bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True
),
)
update_groups = partial(
async_update_items,
bridge,
bridge.api.groups,
{},
async_add_entities,
partial(create_light, HueLight, group_coordinator, bridge, True),
)
bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups))
await group_coordinator.async_refresh()
async def async_safe_fetch(bridge, fetch_method):
"""Safely fetch data."""
try:
with async_timeout.timeout(4):
return await bridge.async_request_call(fetch_method)
except aiohue.Unauthorized as err:
await bridge.handle_unauthorized_error()
raise UpdateFailed("Unauthorized") from err
except (aiohue.AiohueException,) as err:
raise UpdateFailed(f"Hue error: {err}") from err
@callback
def async_update_items(bridge, api, current, async_add_entities, create_item):
"""Update items."""
new_items = []
for item_id in api:
if item_id in current:
continue
current[item_id] = create_item(api, item_id)
new_items.append(current[item_id])
bridge.hass.async_create_task(remove_devices(bridge, api, current))
if new_items:
async_add_entities(new_items)
def hue_brightness_to_hass(value):
"""Convert hue brightness 1..254 to hass format 0..255."""
return min(255, round((value / 254) * 255))
def hass_to_hue_brightness(value):
"""Convert hass brightness 0..255 to hue 1..254 scale."""
return max(1, round((value / 255) * 254))
class HueLight(CoordinatorEntity, LightEntity):
"""Representation of a Hue light."""
def __init__(self, coordinator, bridge, is_group, light, supported_features):
"""Initialize the light."""
super().__init__(coordinator)
self.light = light
self.bridge = bridge
self.is_group = is_group
self._supported_features = supported_features
if is_group:
self.is_osram = False
self.is_philips = False
self.is_innr = False
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
else:
self.is_osram = light.manufacturername == "OSRAM"
self.is_philips = light.manufacturername == "Philips"
self.is_innr = light.manufacturername == "innr"
self.gamut_typ = self.light.colorgamuttype
self.gamut = self.light.colorgamut
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
if self.light.swupdatestate == "readytoinstall":
err = (
"Please check for software updates of the %s "
"bulb in the Philips Hue App."
)
_LOGGER.warning(err, self.name)
if self.gamut:
if not color.check_valid_gamut(self.gamut):
err = "Color gamut of %s: %s, not valid, setting gamut to None."
_LOGGER.warning(err, self.name, str(self.gamut))
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
@property
def unique_id(self):
"""Return the unique ID of this Hue light."""
return self.light.uniqueid
@property
def device_id(self):
"""Return the ID of this Hue light."""
return self.unique_id
@property
def name(self):
"""Return the name of the Hue light."""
return self.light.name
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if self.is_group:
bri = self.light.action.get("bri")
else:
bri = self.light.state.get("bri")
if bri is None:
return bri
return hue_brightness_to_hass(bri)
@property
def _color_mode(self):
"""Return the hue color mode."""
if self.is_group:
return self.light.action.get("colormode")
return self.light.state.get("colormode")
@property
def hs_color(self):
"""Return the hs color value."""
mode = self._color_mode
source = self.light.action if self.is_group else self.light.state
if mode in ("xy", "hs") and "xy" in source:
return color.color_xy_to_hs(*source["xy"], self.gamut)
return None
@property
def color_temp(self):
"""Return the CT color value."""
# Don't return color temperature unless in color temperature mode
if self._color_mode != "ct":
return None
if self.is_group:
return self.light.action.get("ct")
return self.light.state.get("ct")
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
if self.is_group:
return super().min_mireds
min_mireds = self.light.controlcapabilities.get("ct", {}).get("min")
# We filter out '0' too, which can be incorrectly reported by 3rd party buls
if not min_mireds:
return super().min_mireds
return min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
if self.is_group:
return super().max_mireds
max_mireds = self.light.controlcapabilities.get("ct", {}).get("max")
if not max_mireds:
return super().max_mireds
return max_mireds
@property
def is_on(self):
"""Return true if device is on."""
if self.is_group:
return self.light.state["any_on"]
return self.light.state["on"]
@property
def available(self):
"""Return if light is available."""
return self.coordinator.last_update_success and (
self.is_group
or self.bridge.allow_unreachable
or self.light.state["reachable"]
)
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
@property
def effect(self):
"""Return the current effect."""
return self.light.state.get("effect", None)
@property
def effect_list(self):
"""Return the list of supported effects."""
if self.is_osram:
return [EFFECT_RANDOM]
return [EFFECT_COLORLOOP, EFFECT_RANDOM]
@property
def device_info(self):
"""Return the device info."""
if self.light.type in ("LightGroup", "Room", "Luminaire", "LightSource"):
return None
return {
"identifiers": {(HUE_DOMAIN, self.device_id)},
"name": self.name,
"manufacturer": self.light.manufacturername,
# productname added in Hue Bridge API 1.24
# (published 03/05/2018)
"model": self.light.productname or self.light.modelid,
# Not yet exposed as properties in aiohue
"sw_version": self.light.raw["swversion"],
"via_device": (HUE_DOMAIN, self.bridge.api.config.bridgeid),
}
async def async_turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
command = {"on": True}
if ATTR_TRANSITION in kwargs:
command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
if ATTR_HS_COLOR in kwargs:
if self.is_osram:
command["hue"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535)
command["sat"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255)
else:
# Philips hue bulb models respond differently to hue/sat
# requests, so we convert to XY first to ensure a consistent
# color.
xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut)
command["xy"] = xy_color
elif ATTR_COLOR_TEMP in kwargs:
temp = kwargs[ATTR_COLOR_TEMP]
command["ct"] = max(self.min_mireds, min(temp, self.max_mireds))
if ATTR_BRIGHTNESS in kwargs:
command["bri"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS])
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command["alert"] = "lselect"
del command["on"]
elif flash == FLASH_SHORT:
command["alert"] = "select"
del command["on"]
elif not self.is_innr:
command["alert"] = "none"
if ATTR_EFFECT in kwargs:
effect = kwargs[ATTR_EFFECT]
if effect == EFFECT_COLORLOOP:
command["effect"] = "colorloop"
elif effect == EFFECT_RANDOM:
command["hue"] = random.randrange(0, 65535)
command["sat"] = random.randrange(150, 254)
else:
command["effect"] = "none"
if self.is_group:
await self.bridge.async_request_call(
partial(self.light.set_action, **command)
)
else:
await self.bridge.async_request_call(
partial(self.light.set_state, **command)
)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
command = {"on": False}
if ATTR_TRANSITION in kwargs:
command["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command["alert"] = "lselect"
del command["on"]
elif flash == FLASH_SHORT:
command["alert"] = "select"
del command["on"]
elif not self.is_innr:
command["alert"] = "none"
if self.is_group:
await self.bridge.async_request_call(
partial(self.light.set_action, **command)
)
else:
await self.bridge.async_request_call(
partial(self.light.set_state, **command)
)
await self.coordinator.async_request_refresh()
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if not self.is_group:
return {}
return {ATTR_IS_HUE_GROUP: self.is_group}
|
import logging
from api.soma_api import SomaApi
from requests import RequestException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
DEFAULT_PORT = 3000
class SomaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Instantiate config flow."""
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
if user_input is None:
data = {
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
return self.async_show_form(step_id="user", data_schema=vol.Schema(data))
return await self.async_step_creation(user_input)
async def async_step_creation(self, user_input=None):
"""Finish config flow."""
api = SomaApi(user_input["host"], user_input["port"])
try:
result = await self.hass.async_add_executor_job(api.list_devices)
_LOGGER.info("Successfully set up Soma Connect")
if result["result"] == "success":
return self.async_create_entry(
title="Soma Connect",
data={"host": user_input["host"], "port": user_input["port"]},
)
_LOGGER.error(
"Connection to SOMA Connect failed (result:%s)", result["result"]
)
return self.async_abort(reason="result_error")
except RequestException:
_LOGGER.error("Connection to SOMA Connect failed with RequestException")
return self.async_abort(reason="connection_error")
except KeyError:
_LOGGER.error("Connection to SOMA Connect failed with KeyError")
return self.async_abort(reason="connection_error")
async def async_step_import(self, user_input=None):
"""Handle flow start from existing config section."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
return await self.async_step_creation(user_input)
|
import asyncio
import logging
from sisyphus_control import Table
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
_LOGGER = logging.getLogger(__name__)
DATA_SISYPHUS = "sisyphus"
DOMAIN = "sisyphus"
AUTODETECT_SCHEMA = vol.Schema({})
TABLE_SCHEMA = vol.Schema(
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_HOST): cv.string}
)
TABLES_SCHEMA = vol.Schema([TABLE_SCHEMA])
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Any(AUTODETECT_SCHEMA, TABLES_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass, config):
"""Set up the sisyphus component."""
class SocketIONoiseFilter(logging.Filter):
"""Filters out excessively verbose logs from SocketIO."""
def filter(self, record):
if "waiting for connection" in record.msg:
return False
return True
logging.getLogger("socketIO-client").addFilter(SocketIONoiseFilter())
tables = hass.data.setdefault(DATA_SISYPHUS, {})
table_configs = config.get(DOMAIN)
session = async_get_clientsession(hass)
async def add_table(host, name=None):
"""Add platforms for a single table with the given hostname."""
tables[host] = TableHolder(hass, session, host, name)
hass.async_create_task(
async_load_platform(hass, "light", DOMAIN, {CONF_HOST: host}, config)
)
hass.async_create_task(
async_load_platform(hass, "media_player", DOMAIN, {CONF_HOST: host}, config)
)
if isinstance(table_configs, dict): # AUTODETECT_SCHEMA
for ip_address in await Table.find_table_ips(session):
await add_table(ip_address)
else: # TABLES_SCHEMA
for conf in table_configs:
await add_table(conf[CONF_HOST], conf[CONF_NAME])
async def close_tables(*args):
"""Close all table objects."""
tasks = [table.close() for table in tables.values()]
if tasks:
await asyncio.wait(tasks)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_tables)
return True
class TableHolder:
"""Holds table objects and makes them available to platforms."""
def __init__(self, hass, session, host, name):
"""Initialize the table holder."""
self._hass = hass
self._session = session
self._host = host
self._name = name
self._table = None
self._table_task = None
@property
def available(self):
"""Return true if the table is responding to heartbeats."""
if self._table_task and self._table_task.done():
return self._table_task.result().is_connected
return False
@property
def name(self):
"""Return the name of the table."""
return self._name
async def get_table(self):
"""Return the Table held by this holder, connecting to it if needed."""
if self._table:
return self._table
if not self._table_task:
self._table_task = self._hass.async_create_task(self._connect_table())
return await self._table_task
async def _connect_table(self):
try:
self._table = await Table.connect(self._host, self._session)
if self._name is None:
self._name = self._table.name
_LOGGER.debug("Connected to %s at %s", self._name, self._host)
return self._table
finally:
self._table_task = None
async def close(self):
"""Close the table held by this holder, if any."""
if self._table:
await self._table.close()
self._table = None
self._table_task = None
|
import asyncio
import time
from unittest.mock import patch
import pytest
import zigpy.profiles.zha as zha
import zigpy.zcl.clusters.general as general
import zigpy.zcl.clusters.lighting as lighting
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.components.zha.core.group import GroupMember
from homeassistant.components.zha.core.store import TOMBSTONE_LIFETIME
from .common import async_enable_traffic, async_find_group_entity_id, get_zha_gateway
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8"
@pytest.fixture
def zigpy_dev_basic(zigpy_device_mock):
"""Zigpy device with just a basic cluster."""
return zigpy_device_mock(
{
1: {
"in_clusters": [general.Basic.cluster_id],
"out_clusters": [],
"device_type": zha.DeviceType.ON_OFF_SWITCH,
}
}
)
@pytest.fixture
async def zha_dev_basic(hass, zha_device_restored, zigpy_dev_basic):
"""ZHA device with just a basic cluster."""
zha_device = await zha_device_restored(zigpy_dev_basic)
return zha_device
@pytest.fixture
async def coordinator(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee="00:15:8d:00:02:32:4f:32",
nwk=0x0000,
node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_light_1(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
general.LevelControl.cluster_id,
lighting.Color.cluster_id,
general.Groups.cluster_id,
],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee=IEEE_GROUPABLE_DEVICE,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_light_2(hass, zigpy_device_mock, zha_device_joined):
"""Test zha light platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
general.LevelControl.cluster_id,
lighting.Color.cluster_id,
general.Groups.cluster_id,
],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee=IEEE_GROUPABLE_DEVICE2,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
async def test_device_left(hass, zigpy_dev_basic, zha_dev_basic):
"""Device leaving the network should become unavailable."""
assert zha_dev_basic.available is True
get_zha_gateway(hass).device_left(zigpy_dev_basic)
await hass.async_block_till_done()
assert zha_dev_basic.available is False
async def test_gateway_group_methods(hass, device_light_1, device_light_2, coordinator):
"""Test creating a group with 2 members."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
zha_gateway.coordinator_zha_device = coordinator
coordinator._zha_gateway = zha_gateway
device_light_1._zha_gateway = zha_gateway
device_light_2._zha_gateway = zha_gateway
member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee]
members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)]
# test creating a group with 2 members
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
await hass.async_block_till_done()
assert zha_group is not None
assert len(zha_group.members) == 2
for member in zha_group.members:
assert member.device.ieee in member_ieee_addresses
entity_id = async_find_group_entity_id(hass, LIGHT_DOMAIN, zha_group)
assert hass.states.get(entity_id) is not None
# test get group by name
assert zha_group == zha_gateway.async_get_group_by_name(zha_group.name)
# test removing a group
await zha_gateway.async_remove_zigpy_group(zha_group.group_id)
await hass.async_block_till_done()
# we shouldn't have the group anymore
assert zha_gateway.async_get_group_by_name(zha_group.name) is None
# the group entity should be cleaned up
assert entity_id not in hass.states.async_entity_ids(LIGHT_DOMAIN)
# test creating a group with 1 member
zha_group = await zha_gateway.async_create_zigpy_group(
"Test Group", [GroupMember(device_light_1.ieee, 1)]
)
await hass.async_block_till_done()
assert zha_group is not None
assert len(zha_group.members) == 1
for member in zha_group.members:
assert member.device.ieee in [device_light_1.ieee]
# the group entity should not have been cleaned up
assert entity_id not in hass.states.async_entity_ids(LIGHT_DOMAIN)
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
await zha_group.members[0].async_remove_from_group()
assert len(zha_group.members) == 1
for member in zha_group.members:
assert member.device.ieee in [device_light_1.ieee]
async def test_updating_device_store(hass, zigpy_dev_basic, zha_dev_basic):
"""Test saving data after a delay."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
await async_enable_traffic(hass, [zha_dev_basic])
assert zha_dev_basic.last_seen is not None
entry = zha_gateway.zha_storage.async_get_or_create_device(zha_dev_basic)
assert entry.last_seen == zha_dev_basic.last_seen
assert zha_dev_basic.last_seen is not None
last_seen = zha_dev_basic.last_seen
# test that we can't set None as last seen any more
zha_dev_basic.async_update_last_seen(None)
assert last_seen == zha_dev_basic.last_seen
# test that we won't put None in storage
zigpy_dev_basic.last_seen = None
assert zha_dev_basic.last_seen is None
await zha_gateway.async_update_device_storage()
await hass.async_block_till_done()
entry = zha_gateway.zha_storage.async_get_or_create_device(zha_dev_basic)
assert entry.last_seen == last_seen
# test that we can still set a good last_seen
last_seen = time.time()
zha_dev_basic.async_update_last_seen(last_seen)
assert last_seen == zha_dev_basic.last_seen
# test that we still put good values in storage
await zha_gateway.async_update_device_storage()
await hass.async_block_till_done()
entry = zha_gateway.zha_storage.async_get_or_create_device(zha_dev_basic)
assert entry.last_seen == last_seen
async def test_cleaning_up_storage(hass, zigpy_dev_basic, zha_dev_basic, hass_storage):
"""Test cleaning up zha storage and remove stale devices."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
await async_enable_traffic(hass, [zha_dev_basic])
assert zha_dev_basic.last_seen is not None
await zha_gateway.zha_storage.async_save()
await hass.async_block_till_done()
assert hass_storage["zha.storage"]["data"]["devices"]
device = hass_storage["zha.storage"]["data"]["devices"][0]
assert device["ieee"] == str(zha_dev_basic.ieee)
zha_dev_basic.device.last_seen = time.time() - TOMBSTONE_LIFETIME - 1
await zha_gateway.async_update_device_storage()
await hass.async_block_till_done()
await zha_gateway.zha_storage.async_save()
await hass.async_block_till_done()
assert not hass_storage["zha.storage"]["data"]["devices"]
|
from django.test import TestCase
from django.test.utils import override_settings
from zinnia.admin.widgets import MPTTFilteredSelectMultiple
from zinnia.admin.widgets import MiniTextarea
from zinnia.admin.widgets import TagAutoComplete
from zinnia.models.entry import Entry
from zinnia.signals import disconnect_entry_signals
class MPTTFilteredSelectMultipleTestCase(TestCase):
maxDiff = None
def test_optgroups(self):
choices = [
(1, 'Category 1', (1, 1)),
(2, '|-- Category 2', (1, 2))
]
widget = MPTTFilteredSelectMultiple(
'test', False, choices=choices)
optgroups = widget.optgroups('toto', '1')
self.assertEqual(
optgroups,
[
(
None, [
{
'index': '0',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': True,
'attrs': {
'selected': True,
'data-tree-id': 1,
'data-left-value': 1
},
'value': 1,
'label': 'Category 1',
'wrap_label': True
}
], 0
), (
None, [
{
'index': '1',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': False,
'attrs': {
'data-tree-id': 1,
'data-left-value': 2
},
'value': 2,
'label': '|-- Category 2',
'wrap_label': True
}
], 1
)
]
)
optgroups = widget.optgroups('toto', ['2'])
self.assertEqual(
optgroups,
[
(
None, [
{
'index': '0',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': False,
'attrs': {
'data-tree-id': 1,
'data-left-value': 1
},
'value': 1,
'label': 'Category 1',
'wrap_label': True
}
], 0
), (
None, [
{
'index': '1',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': True,
'attrs': {
'selected': True,
'data-tree-id': 1,
'data-left-value': 2
},
'value': 2,
'label': '|-- Category 2',
'wrap_label': True
}
], 1
)
]
)
optgroups = widget.optgroups('toto', '1', {'attribute': 'value'})
self.assertEqual(
optgroups,
[
(
None, [
{
'index': '0',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': True,
'attrs': {
'selected': True,
'attribute': 'value',
'data-tree-id': 1,
'data-left-value': 1
},
'value': 1,
'label': 'Category 1',
'wrap_label': True
}
], 0
), (
None, [
{
'index': '1',
'name': 'toto',
'template_name':
'django/forms/widgets/select_option.html',
'type': 'select',
'selected': False,
'attrs': {
'attribute': 'value',
'data-tree-id': 1,
'data-left-value': 2
},
'value': 2,
'label': '|-- Category 2',
'wrap_label': True
}
], 1
)
]
)
@override_settings(STATIC_URL='/s/')
def test_media(self):
medias = MPTTFilteredSelectMultiple('test', False).media
self.assertEqual(medias._css, {})
self.assertEqual(medias._js, [
'/s/admin/js/core.js',
'/s/zinnia/admin/mptt/js/mptt_m2m_selectbox.js',
'/s/admin/js/SelectFilter2.js'])
class TagAutoCompleteTestCase(TestCase):
def setUp(self):
disconnect_entry_signals()
def test_get_tags(self):
widget = TagAutoComplete()
self.assertEqual(
widget.get_tags(),
[])
params = {'title': 'My entry',
'tags': 'zinnia, test',
'slug': 'my-entry'}
Entry.objects.create(**params)
self.assertEqual(
widget.get_tags(),
['test', 'zinnia'])
def test_render(self):
widget = TagAutoComplete()
params = {'title': 'My entry',
'tags': 'zinnia, test',
'slug': 'my-entry'}
Entry.objects.create(**params)
self.assertHTMLEqual(
widget.render('tag', 'test,'),
'<input class="vTextField" name="tag" type="text" value="test," />'
'\n<script type="text/javascript">\n(function($) {'
'\n $(document).ready(function() {'
'\n $("#id_tag").select2({'
'\n width: "element",'
'\n maximumInputLength: 50,'
'\n tokenSeparators: [",", " "],'
'\n tags: ["test", "zinnia"]'
'\n });\n });'
'\n}(django.jQuery));\n</script>')
def test_render_tag_with_apostrophe(self):
widget = TagAutoComplete()
params = {'title': 'My entry',
'tags': "zinnia, test, apos'trophe",
'slug': 'my-entry'}
Entry.objects.create(**params)
self.maxDiff = None
self.assertHTMLEqual(
widget.render('tag', 'test,'),
'<input class="vTextField" name="tag" type="text" value="test," />'
'\n<script type="text/javascript">\n(function($) {'
'\n $(document).ready(function() {'
'\n $("#id_tag").select2({'
'\n width: "element",'
'\n maximumInputLength: 50,'
'\n tokenSeparators: [",", " "],'
'\n tags: ["apos\'trophe", "test", "zinnia"]'
'\n });\n });'
'\n}(django.jQuery));\n</script>')
@override_settings(STATIC_URL='/s/')
def test_media(self):
medias = TagAutoComplete().media
self.assertEqual(
medias._css,
{'all': ['/s/zinnia/admin/select2/css/select2.css']}
)
self.assertEqual(
medias._js,
['/s/zinnia/admin/select2/js/select2.js']
)
class MiniTextareaTestCase(TestCase):
def test_render(self):
widget = MiniTextarea()
self.assertHTMLEqual(
widget.render('field', 'value'),
'<textarea class="vLargeTextField" '
'cols="40" name="field" rows="2">'
'\r\nvalue</textarea>')
|
import logging
from homeassistant.components.mqtt import (
async_subscribe_connection_status,
is_connected as mqtt_connected,
)
from homeassistant.core import callback
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .discovery import (
TASMOTA_DISCOVERY_ENTITY_UPDATED,
clear_discovery_hash,
set_discovery_hash,
)
_LOGGER = logging.getLogger(__name__)
class TasmotaEntity(Entity):
"""Base class for Tasmota entities."""
def __init__(self, tasmota_entity) -> None:
"""Initialize."""
self._state = None
self._tasmota_entity = tasmota_entity
self._unique_id = tasmota_entity.unique_id
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
self._tasmota_entity.set_on_state_callback(self.state_updated)
await self._subscribe_topics()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
await self._tasmota_entity.unsubscribe_topics()
await super().async_will_remove_from_hass()
async def discovery_update(self, update, write_state=True):
"""Handle updated discovery message."""
self._tasmota_entity.config_update(update)
await self._subscribe_topics()
if write_state:
self.async_write_ha_state()
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
await self._tasmota_entity.subscribe_topics()
@callback
def state_updated(self, state, **kwargs):
"""Handle state updates."""
self._state = state
self.async_write_ha_state()
@property
def device_info(self):
"""Return a device description for device registry."""
return {"connections": {(CONNECTION_NETWORK_MAC, self._tasmota_entity.mac)}}
@property
def name(self):
"""Return the name of the binary sensor."""
return self._tasmota_entity.name
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
class TasmotaAvailability(TasmotaEntity):
"""Mixin used for platforms that report availability."""
def __init__(self, **kwds) -> None:
"""Initialize the availability mixin."""
self._available = False
super().__init__(**kwds)
async def async_added_to_hass(self) -> None:
"""Subscribe to MQTT events."""
self._tasmota_entity.set_on_availability_callback(self.availability_updated)
self.async_on_remove(
async_subscribe_connection_status(self.hass, self.async_mqtt_connected)
)
await super().async_added_to_hass()
@callback
def availability_updated(self, available: bool) -> None:
"""Handle updated availability."""
if available and not self._available:
self._tasmota_entity.poll_status()
self._available = available
self.async_write_ha_state()
@callback
def async_mqtt_connected(self, _):
"""Update state on connection/disconnection to MQTT broker."""
if not self.hass.is_stopping:
if not mqtt_connected(self.hass):
self._available = False
self.async_write_ha_state()
@property
def available(self) -> bool:
"""Return if the device is available."""
return self._available
class TasmotaDiscoveryUpdate(TasmotaEntity):
"""Mixin used to handle updated discovery message."""
def __init__(self, discovery_hash, discovery_update, **kwds) -> None:
"""Initialize the discovery update mixin."""
self._discovery_hash = discovery_hash
self._discovery_update = discovery_update
self._removed_from_hass = False
super().__init__(**kwds)
async def async_added_to_hass(self) -> None:
"""Subscribe to discovery updates."""
self._removed_from_hass = False
await super().async_added_to_hass()
async def discovery_callback(config):
"""Handle discovery update."""
_LOGGER.debug(
"Got update for entity with hash: %s '%s'",
self._discovery_hash,
config,
)
if not self._tasmota_entity.config_same(config):
# Changed payload: Notify component
_LOGGER.debug("Updating component: %s", self.entity_id)
await self._discovery_update(config)
else:
# Unchanged payload: Ignore to avoid changing states
_LOGGER.debug("Ignoring unchanged update for: %s", self.entity_id)
# Set in case the entity has been removed and is re-added, for example when changing entity_id
set_discovery_hash(self.hass, self._discovery_hash)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
TASMOTA_DISCOVERY_ENTITY_UPDATED.format(*self._discovery_hash),
discovery_callback,
)
)
@callback
def add_to_platform_abort(self) -> None:
"""Abort adding an entity to a platform."""
clear_discovery_hash(self.hass, self._discovery_hash)
super().add_to_platform_abort()
async def async_will_remove_from_hass(self) -> None:
"""Stop listening to signal and cleanup discovery data.."""
if not self._removed_from_hass:
clear_discovery_hash(self.hass, self._discovery_hash)
self._removed_from_hass = True
await super().async_will_remove_from_hass()
|
import argparse
import logging
import sys
from typing import Any
from typing import Callable
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
from typing import Union
import a_sync
from marathon import MarathonClient
from marathon.models.task import MarathonTask
from mypy_extensions import Arg
from paasta_tools.kubernetes_tools import get_all_nodes
from paasta_tools.kubernetes_tools import get_all_pods
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import V1Node
from paasta_tools.kubernetes_tools import V1Pod
from paasta_tools.marathon_tools import get_marathon_clients
from paasta_tools.marathon_tools import get_marathon_servers
from paasta_tools.mesos_tools import get_slaves
from paasta_tools.monitoring_tools import ReplicationChecker
from paasta_tools.paasta_service_config_loader import PaastaServiceConfigLoader
from paasta_tools.smartstack_tools import KubeSmartstackEnvoyReplicationChecker
from paasta_tools.smartstack_tools import MesosSmartstackEnvoyReplicationChecker
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import InstanceConfig_T
from paasta_tools.utils import list_services
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import SPACER
from paasta_tools.utils import SystemPaastaConfig
try:
import yelp_meteorite
except ImportError:
yelp_meteorite = None
log = logging.getLogger(__name__)
CheckServiceReplication = Callable[
[
Arg(InstanceConfig_T, "instance_config"),
Arg(Sequence[Union[MarathonTask, V1Pod]], "all_tasks_or_pods"),
Arg(Any, "replication_checker"),
],
Optional[bool],
]
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"--crit",
dest="under_replicated_crit_pct",
type=float,
default=10,
help="The percentage of under replicated service instances past which "
"the script will return a critical status",
)
parser.add_argument(
"--min-count-critical",
dest="min_count_critical",
type=int,
default=5,
help="The script will not return a critical status if the number of "
"under replicated service instances is below this number, even if the "
"percentage is above the critical percentage.",
)
parser.add_argument(
"service_instance_list",
nargs="*",
help="The list of service instances to check",
metavar="SERVICE%sINSTANCE" % SPACER,
)
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False
)
options = parser.parse_args()
return options
def check_services_replication(
soa_dir: str,
cluster: str,
service_instances: Sequence[str],
instance_type_class: Type[InstanceConfig_T],
check_service_replication: CheckServiceReplication,
replication_checker: ReplicationChecker,
all_tasks_or_pods: Sequence[Union[MarathonTask, V1Pod]],
) -> Tuple[int, int]:
service_instances_set = set(service_instances)
replication_statuses: List[bool] = []
for service in list_services(soa_dir=soa_dir):
service_config = PaastaServiceConfigLoader(service=service, soa_dir=soa_dir)
for instance_config in service_config.instance_configs(
cluster=cluster, instance_type_class=instance_type_class
):
if (
service_instances_set
and f"{service}{SPACER}{instance_config.instance}"
not in service_instances_set
):
continue
if instance_config.get_docker_image():
is_well_replicated = check_service_replication(
instance_config=instance_config,
all_tasks_or_pods=all_tasks_or_pods,
replication_checker=replication_checker,
)
if is_well_replicated is not None:
replication_statuses.append(is_well_replicated)
else:
log.debug(
"%s is not deployed. Skipping replication monitoring."
% instance_config.job_id
)
num_under_replicated = len(
[status for status in replication_statuses if status is False]
)
return num_under_replicated, len(replication_statuses)
def emit_cluster_replication_metrics(
pct_under_replicated: float, cluster: str, scheduler: str,
) -> None:
meteorite_dims = {"paasta_cluster": cluster, "scheduler": scheduler}
gauge = yelp_meteorite.create_gauge(
"paasta.pct_services_under_replicated", meteorite_dims
)
gauge.set(pct_under_replicated)
def main(
instance_type_class: Type[InstanceConfig_T],
check_service_replication: CheckServiceReplication,
namespace: str,
mesos: bool = False,
) -> None:
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
system_paasta_config = load_system_paasta_config()
cluster = system_paasta_config.get_cluster()
replication_checker: ReplicationChecker
if mesos:
tasks_or_pods, slaves = get_mesos_tasks_and_slaves(system_paasta_config)
replication_checker = MesosSmartstackEnvoyReplicationChecker(
mesos_slaves=slaves, system_paasta_config=system_paasta_config,
)
else:
tasks_or_pods, nodes = get_kubernetes_pods_and_nodes(namespace)
replication_checker = KubeSmartstackEnvoyReplicationChecker(
nodes=nodes, system_paasta_config=system_paasta_config,
)
count_under_replicated, total = check_services_replication(
soa_dir=args.soa_dir,
cluster=cluster,
service_instances=args.service_instance_list,
instance_type_class=instance_type_class,
check_service_replication=check_service_replication,
replication_checker=replication_checker,
all_tasks_or_pods=tasks_or_pods,
)
pct_under_replicated = 0 if total == 0 else 100 * count_under_replicated / total
if yelp_meteorite is not None:
emit_cluster_replication_metrics(
pct_under_replicated, cluster, scheduler="mesos" if mesos else "kubernetes"
)
if (
pct_under_replicated >= args.under_replicated_crit_pct
and count_under_replicated >= args.min_count_critical
):
log.critical(
f"{pct_under_replicated}% of instances ({count_under_replicated}/{total}) "
f"are under replicated (past {args.under_replicated_crit_pct} is critical)!"
)
sys.exit(2)
else:
sys.exit(0)
def get_mesos_tasks_and_slaves(
system_paasta_config: SystemPaastaConfig,
) -> Tuple[Sequence[MarathonTask], List[Any]]:
clients = get_marathon_clients(get_marathon_servers(system_paasta_config))
all_clients: Sequence[MarathonClient] = clients.get_all_clients()
all_tasks: List[MarathonTask] = []
for client in all_clients:
all_tasks.extend(client.list_tasks())
mesos_slaves = a_sync.block(get_slaves)
return all_tasks, mesos_slaves
def get_kubernetes_pods_and_nodes(
namespace: str,
) -> Tuple[Sequence[V1Pod], Sequence[V1Node]]:
kube_client = KubeClient()
all_pods = get_all_pods(kube_client=kube_client, namespace=namespace)
all_nodes = get_all_nodes(kube_client)
return all_pods, all_nodes
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
# pylint: disable=unused-import
from absl import app
from absl import flags
from absl import logging
from compare_gan import datasets
from compare_gan import runner_lib
# Import GAN types so that they can be used in Gin configs without module names.
from compare_gan.gans.modular_gan import ModularGAN
from compare_gan.gans.s3gan import S3GAN
from compare_gan.gans.ssgan import SSGAN
# Required import to configure core TF classes and functions.
import gin
import gin.tf.external_configurables
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string("model_dir", None, "Where to store files.")
flags.DEFINE_string(
"schedule", "train",
"Schedule to run. Options: train, continuous_eval.")
flags.DEFINE_multi_string(
"gin_config", [],
"List of paths to the config files.")
flags.DEFINE_multi_string(
"gin_bindings", [],
"Newline separated list of Gin parameter bindings.")
flags.DEFINE_string(
"score_filename", "scores.csv",
"Name of the CSV file with evaluation results model_dir.")
flags.DEFINE_integer(
"num_eval_averaging_runs", 3,
"How many times to average FID and IS")
flags.DEFINE_integer(
"eval_every_steps", 5000,
"Evaluate only checkpoints whose step is divisible by this integer")
flags.DEFINE_bool("use_tpu", None, "Whether running on TPU or not.")
def _get_cluster():
if not FLAGS.use_tpu: # pylint: disable=unreachable
return None
if "TPU_NAME" not in os.environ:
raise ValueError("Could not find a TPU. Set TPU_NAME.")
return tf.contrib.cluster_resolver.TPUClusterResolver(
tpu=os.environ["TPU_NAME"],
zone=os.environ.get("TPU_ZONE", None))
@gin.configurable("run_config")
def _get_run_config(tf_random_seed=None,
single_core=False,
iterations_per_loop=1000,
save_checkpoints_steps=5000,
keep_checkpoint_max=1000):
"""Return `RunConfig` for TPUs."""
tpu_config = tf.contrib.tpu.TPUConfig(
num_shards=1 if single_core else None, # None = all cores.
iterations_per_loop=iterations_per_loop)
return tf.contrib.tpu.RunConfig(
model_dir=FLAGS.model_dir,
tf_random_seed=tf_random_seed,
save_checkpoints_steps=save_checkpoints_steps,
keep_checkpoint_max=keep_checkpoint_max,
cluster=_get_cluster(),
tpu_config=tpu_config)
def _get_task_manager():
"""Returns a TaskManager for this experiment."""
score_file = os.path.join(FLAGS.model_dir, FLAGS.score_filename)
return runner_lib.TaskManagerWithCsvResults(
model_dir=FLAGS.model_dir, score_file=score_file)
def main(unused_argv):
logging.info("Gin config: %s\nGin bindings: %s",
FLAGS.gin_config, FLAGS.gin_bindings)
gin.parse_config_files_and_bindings(FLAGS.gin_config, FLAGS.gin_bindings)
if FLAGS.use_tpu is None:
FLAGS.use_tpu = bool(os.environ.get("TPU_NAME", ""))
if FLAGS.use_tpu:
logging.info("Found TPU %s.", os.environ["TPU_NAME"])
run_config = _get_run_config()
task_manager = _get_task_manager()
options = runner_lib.get_options_dict()
runner_lib.run_with_schedule(
schedule=FLAGS.schedule,
run_config=run_config,
task_manager=task_manager,
options=options,
use_tpu=FLAGS.use_tpu,
num_eval_averaging_runs=FLAGS.num_eval_averaging_runs,
eval_every_steps=FLAGS.eval_every_steps)
logging.info("I\"m done with my work, ciao!")
if __name__ == "__main__":
flags.mark_flag_as_required("model_dir")
app.run(main)
|
import os
import unittest
from perfkitbenchmarker.linux_packages import wrk
import six
class WrkParseOutputTestCase(unittest.TestCase):
def setUp(self):
data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
result_path = os.path.join(data_dir, 'wrk_result.txt')
with open(result_path) as result_file:
self.wrk_results = result_file.read()
def testParsesSample(self):
expected = [('p5 latency', 0.162, 'ms'),
('p50 latency', 0.187, 'ms'),
('p90 latency', 0.256, 'ms'),
('p99 latency', 0.519, 'ms'),
('p99.9 latency', 5.196, 'ms'),
('bytes transferred', 150068000.0, 'bytes'),
('errors', 0.0, 'n'),
('requests', 577297.0, 'n'),
('throughput', 9605.69, 'requests/sec')]
actual = list(wrk._ParseOutput(self.wrk_results))
six.assertCountEqual(self, expected, actual)
def testFailsForEmptyString(self):
with self.assertRaisesRegexp(ValueError, 'bar'):
list(wrk._ParseOutput('bar'))
if __name__ == '__main__':
unittest.main()
|
import logging
from kalliope.core import Utils
logging.basicConfig()
logger = logging.getLogger("kalliope")
class TriggerLauncher(object):
def __init__(self):
pass
@staticmethod
def get_trigger(settings, callback):
"""
Start a trigger module
:param callback: Callback function to call when the trigger catch the magic word
:return: The instance of Trigger
:rtype: Trigger
"""
trigger_folder = None
if settings.resources:
trigger_folder = settings.resources.trigger_folder
trigger_instance = None
for trigger in settings.triggers:
if trigger.name == settings.default_trigger_name:
# add the callback method to parameters
trigger.parameters["callback"] = callback
logger.debug(
"TriggerLauncher: Start trigger %s with parameters: %s" % (trigger.name, trigger.parameters))
trigger_instance = Utils.get_dynamic_class_instantiation(package_name="trigger",
module_name=trigger.name,
parameters=trigger.parameters,
resources_dir=trigger_folder)
break
return trigger_instance
|
import pytest
from homeassistant.components.debugpy import (
CONF_HOST,
CONF_PORT,
CONF_START,
CONF_WAIT,
DOMAIN,
SERVICE_START,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
@pytest.fixture
def mock_debugpy():
"""Mock debugpy lib."""
with patch("homeassistant.components.debugpy.debugpy") as mocked_debugpy:
yield mocked_debugpy
async def test_default(hass: HomeAssistant, mock_debugpy) -> None:
"""Test if the default settings work."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
mock_debugpy.listen.assert_called_once_with(("0.0.0.0", 5678))
mock_debugpy.wait_for_client.assert_not_called()
assert len(mock_debugpy.method_calls) == 1
async def test_wait_on_startup(hass: HomeAssistant, mock_debugpy) -> None:
"""Test if the waiting for client is called."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_WAIT: True}})
mock_debugpy.listen.assert_called_once_with(("0.0.0.0", 5678))
mock_debugpy.wait_for_client.assert_called_once()
assert len(mock_debugpy.method_calls) == 2
async def test_on_demand(hass: HomeAssistant, mock_debugpy) -> None:
"""Test on-demand debugging using a service call."""
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_START: False, CONF_HOST: "127.0.0.1", CONF_PORT: 80}},
)
mock_debugpy.listen.assert_not_called()
mock_debugpy.wait_for_client.assert_not_called()
assert len(mock_debugpy.method_calls) == 0
await hass.services.async_call(
DOMAIN,
SERVICE_START,
blocking=True,
)
mock_debugpy.listen.assert_called_once_with(("127.0.0.1", 80))
mock_debugpy.wait_for_client.assert_not_called()
assert len(mock_debugpy.method_calls) == 1
|
from homeassistant.components import mysensors
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import (
CONDUCTIVITY,
DEGREE,
ELECTRICAL_CURRENT_AMPERE,
ELECTRICAL_VOLT_AMPERE,
ENERGY_KILO_WATT_HOUR,
FREQUENCY_HERTZ,
LENGTH_METERS,
LIGHT_LUX,
MASS_KILOGRAMS,
PERCENTAGE,
POWER_WATT,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
VOLT,
VOLUME_CUBIC_METERS,
)
SENSORS = {
"V_TEMP": [None, "mdi:thermometer"],
"V_HUM": [PERCENTAGE, "mdi:water-percent"],
"V_DIMMER": [PERCENTAGE, "mdi:percent"],
"V_PERCENTAGE": [PERCENTAGE, "mdi:percent"],
"V_PRESSURE": [None, "mdi:gauge"],
"V_FORECAST": [None, "mdi:weather-partly-cloudy"],
"V_RAIN": [None, "mdi:weather-rainy"],
"V_RAINRATE": [None, "mdi:weather-rainy"],
"V_WIND": [None, "mdi:weather-windy"],
"V_GUST": [None, "mdi:weather-windy"],
"V_DIRECTION": [DEGREE, "mdi:compass"],
"V_WEIGHT": [MASS_KILOGRAMS, "mdi:weight-kilogram"],
"V_DISTANCE": [LENGTH_METERS, "mdi:ruler"],
"V_IMPEDANCE": ["ohm", None],
"V_WATT": [POWER_WATT, None],
"V_KWH": [ENERGY_KILO_WATT_HOUR, None],
"V_LIGHT_LEVEL": [PERCENTAGE, "mdi:white-balance-sunny"],
"V_FLOW": [LENGTH_METERS, "mdi:gauge"],
"V_VOLUME": [f"{VOLUME_CUBIC_METERS}", None],
"V_LEVEL": {
"S_SOUND": ["dB", "mdi:volume-high"],
"S_VIBRATION": [FREQUENCY_HERTZ, None],
"S_LIGHT_LEVEL": [LIGHT_LUX, "mdi:white-balance-sunny"],
},
"V_VOLTAGE": [VOLT, "mdi:flash"],
"V_CURRENT": [ELECTRICAL_CURRENT_AMPERE, "mdi:flash-auto"],
"V_PH": ["pH", None],
"V_ORP": ["mV", None],
"V_EC": [CONDUCTIVITY, None],
"V_VAR": ["var", None],
"V_VA": [ELECTRICAL_VOLT_AMPERE, None],
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the MySensors platform for sensors."""
mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
MySensorsSensor,
async_add_entities=async_add_entities,
)
class MySensorsSensor(mysensors.device.MySensorsEntity):
"""Representation of a MySensors Sensor child node."""
@property
def force_update(self):
"""Return True if state updates should be forced.
If True, a state change will be triggered anytime the state property is
updated, not just when the value changes.
"""
return True
@property
def state(self):
"""Return the state of the device."""
return self._values.get(self.value_type)
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
_, icon = self._get_sensor_type()
return icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
set_req = self.gateway.const.SetReq
if (
float(self.gateway.protocol_version) >= 1.5
and set_req.V_UNIT_PREFIX in self._values
):
return self._values[set_req.V_UNIT_PREFIX]
unit, _ = self._get_sensor_type()
return unit
def _get_sensor_type(self):
"""Return list with unit and icon of sensor type."""
pres = self.gateway.const.Presentation
set_req = self.gateway.const.SetReq
SENSORS[set_req.V_TEMP.name][0] = (
TEMP_CELSIUS if self.gateway.metric else TEMP_FAHRENHEIT
)
sensor_type = SENSORS.get(set_req(self.value_type).name, [None, None])
if isinstance(sensor_type, dict):
sensor_type = sensor_type.get(pres(self.child_type).name, [None, None])
return sensor_type
|
revision = '8323a5ea723a'
down_revision = 'b33c838cb669'
from alembic import op
from sqlalchemy import text
import sqlalchemy as sa
def upgrade():
op.create_index(
"ix_certificates_cn_lower",
"certificates",
[text("lower(cn)")],
unique=False,
postgresql_ops={"lower(cn)": "gin_trgm_ops"},
postgresql_using="gin",
)
op.create_index(
"ix_certificates_name_lower",
"certificates",
[text("lower(name)")],
unique=False,
postgresql_ops={"lower(name)": "gin_trgm_ops"},
postgresql_using="gin",
)
op.create_index(
"ix_domains_name_lower",
"domains",
[text("lower(name)")],
unique=False,
postgresql_ops={"lower(name)": "gin_trgm_ops"},
postgresql_using="gin",
)
def downgrade():
op.drop_index("ix_certificates_cn_lower", table_name="certificates")
op.drop_index("ix_certificates_name_lower", table_name="certificates")
op.drop_index("ix_domains_name_lower", table_name="domains")
|
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_STATE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import (
CATEGORY_CDC_REPORT,
CATEGORY_USER_REPORT,
DATA_CLIENT,
DOMAIN,
SENSORS,
TOPIC_UPDATE,
TYPE_USER_CHICK,
TYPE_USER_DENGUE,
TYPE_USER_FLU,
TYPE_USER_LEPTO,
TYPE_USER_NO_SYMPTOMS,
TYPE_USER_SYMPTOMS,
TYPE_USER_TOTAL,
)
ATTR_CITY = "city"
ATTR_REPORTED_DATE = "reported_date"
ATTR_REPORTED_LATITUDE = "reported_latitude"
ATTR_REPORTED_LONGITUDE = "reported_longitude"
ATTR_STATE_REPORTS_LAST_WEEK = "state_reports_last_week"
ATTR_STATE_REPORTS_THIS_WEEK = "state_reports_this_week"
ATTR_ZIP_CODE = "zip_code"
DEFAULT_ATTRIBUTION = "Data provided by Flu Near You"
EXTENDED_TYPE_MAPPING = {
TYPE_USER_FLU: "ili",
TYPE_USER_NO_SYMPTOMS: "no_symptoms",
TYPE_USER_TOTAL: "total_surveys",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Flu Near You sensors based on a config entry."""
fny = hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id]
async_add_entities(
[
FluNearYouSensor(fny, sensor_type, name, category, icon, unit)
for category, sensors in SENSORS.items()
for sensor_type, name, icon, unit in sensors
],
True,
)
class FluNearYouSensor(Entity):
"""Define a base Flu Near You sensor."""
def __init__(self, fny, sensor_type, name, category, icon, unit):
"""Initialize the sensor."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._category = category
self._fny = fny
self._icon = icon
self._name = name
self._sensor_type = sensor_type
self._state = None
self._unit = unit
@property
def available(self):
"""Return True if entity is available."""
return bool(self._fny.data[self._category])
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the state."""
return self._state
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self._fny.latitude},{self._fny.longitude}_{self._sensor_type}"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(async_dispatcher_connect(self.hass, TOPIC_UPDATE, update))
await self._fny.async_register_api_interest(self._sensor_type)
self.update_from_latest_data()
async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher listener when removed."""
self._fny.async_deregister_api_interest(self._sensor_type)
@callback
def update_from_latest_data(self):
"""Update the sensor."""
cdc_data = self._fny.data.get(CATEGORY_CDC_REPORT)
user_data = self._fny.data.get(CATEGORY_USER_REPORT)
if self._category == CATEGORY_CDC_REPORT and cdc_data:
self._attrs.update(
{
ATTR_REPORTED_DATE: cdc_data["week_date"],
ATTR_STATE: cdc_data["name"],
}
)
self._state = cdc_data[self._sensor_type]
elif self._category == CATEGORY_USER_REPORT and user_data:
self._attrs.update(
{
ATTR_CITY: user_data["local"]["city"].split("(")[0],
ATTR_REPORTED_LATITUDE: user_data["local"]["latitude"],
ATTR_REPORTED_LONGITUDE: user_data["local"]["longitude"],
ATTR_STATE: user_data["state"]["name"],
ATTR_ZIP_CODE: user_data["local"]["zip"],
}
)
if self._sensor_type in user_data["state"]["data"]:
states_key = self._sensor_type
elif self._sensor_type in EXTENDED_TYPE_MAPPING:
states_key = EXTENDED_TYPE_MAPPING[self._sensor_type]
self._attrs[ATTR_STATE_REPORTS_THIS_WEEK] = user_data["state"]["data"][
states_key
]
self._attrs[ATTR_STATE_REPORTS_LAST_WEEK] = user_data["state"][
"last_week_data"
][states_key]
if self._sensor_type == TYPE_USER_TOTAL:
self._state = sum(
v
for k, v in user_data["local"].items()
if k
in (
TYPE_USER_CHICK,
TYPE_USER_DENGUE,
TYPE_USER_FLU,
TYPE_USER_LEPTO,
TYPE_USER_SYMPTOMS,
)
)
else:
self._state = user_data["local"][self._sensor_type]
|
import os
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.lighting.brightness', 'getBrightness', out_sig='d')
def get_brightness(self):
"""
Get the device's brightness
:return: Brightness
:rtype: float
"""
self.logger.debug("DBus call get_brightness")
return self.zone["backlight"]["brightness"]
@endpoint('razer.device.lighting.brightness', 'setBrightness', in_sig='d')
def set_brightness(self, brightness):
"""
Set the device's brightness
:param brightness: Brightness
:type brightness: int
"""
self.logger.debug("DBus call set_brightness")
driver_path = self.get_driver_path('matrix_brightness')
self.method_args['brightness'] = brightness
if brightness > 100:
brightness = 100
elif brightness < 0:
brightness = 0
self.set_persistence("backlight", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.led.gamemode', 'getGameMode', out_sig='b')
def get_game_mode(self):
"""
Get game mode LED state
:return: Game mode LED state
:rtype: bool
"""
self.logger.debug("DBus call get_game_mode")
driver_path = self.get_driver_path('game_led_state')
with open(driver_path, 'r') as driver_file:
return driver_file.read().strip() == '1'
@endpoint('razer.device.led.gamemode', 'setGameMode', in_sig='b')
def set_game_mode(self, enable):
"""
Set game mode LED state
:param enable: Status of game mode
:type enable: bool
"""
self.logger.debug("DBus call set_game_mode")
driver_path = self.get_driver_path('game_led_state')
for kb_int in self.additional_interfaces:
super_file = os.path.join(kb_int, 'key_super')
alt_tab = os.path.join(kb_int, 'key_alt_tab')
alt_f4 = os.path.join(kb_int, 'key_alt_f4')
if enable:
open(super_file, 'wb').write(b'\x01')
open(alt_tab, 'wb').write(b'\x01')
open(alt_f4, 'wb').write(b'\x01')
else:
open(super_file, 'wb').write(b'\x00')
open(alt_tab, 'wb').write(b'\x00')
open(alt_f4, 'wb').write(b'\x00')
with open(driver_path, 'w') as driver_file:
if enable:
driver_file.write('1')
else:
driver_file.write('0')
@endpoint('razer.device.led.macromode', 'getMacroMode', out_sig='b')
def get_macro_mode(self):
"""
Get macro mode LED state
:return: Status of macro mode
:rtype: bool
"""
self.logger.debug("DBus call get_macro_mode")
driver_path = self.get_driver_path('macro_led_state')
with open(driver_path, 'r') as driver_file:
return driver_file.read().strip() == '1'
@endpoint('razer.device.led.macromode', 'setMacroMode', in_sig='b')
def set_macro_mode(self, enable):
"""
Set macro mode LED state
:param enable: Status of macro mode
:type enable: bool
"""
self.logger.debug("DBus call set_macro_mode")
driver_path = self.get_driver_path('macro_led_state')
with open(driver_path, 'w') as driver_file:
if enable:
driver_file.write('1')
else:
driver_file.write('0')
@endpoint('razer.device.led.macromode', 'getMacroEffect', out_sig='i')
def get_macro_effect(self):
"""
Get the effect on the macro LED
:return: Macro LED effect ID
:rtype: int
"""
self.logger.debug("DBus call get_macro_effect")
driver_path = self.get_driver_path('macro_led_effect')
with open(driver_path, 'r') as driver_file:
return int(driver_file.read().strip())
@endpoint('razer.device.led.macromode', 'setMacroEffect', in_sig='y')
def set_macro_effect(self, effect):
"""
Set the effect on the macro LED
:param effect: Macro LED effect ID
:type effect: int
"""
self.logger.debug("DBus call set_macro_effect")
driver_path = self.get_driver_path('macro_led_effect')
with open(driver_path, 'w') as driver_file:
driver_file.write(str(int(effect)))
@endpoint('razer.device.lighting.chroma', 'setWave', in_sig='i')
def set_wave_effect(self, direction):
"""
Set the wave effect on the device
:param direction: 1 - left to right, 2 right to left
:type direction: int
"""
self.logger.debug("DBus call set_wave_effect")
# Notify others
self.send_effect_event('setWave', direction)
# remember effect
self.set_persistence("backlight", "effect", 'wave')
self.set_persistence("backlight", "wave_dir", int(direction))
driver_path = self.get_driver_path('matrix_effect_wave')
if direction not in self.WAVE_DIRS:
direction = self.WAVE_DIRS[0]
with open(driver_path, 'w') as driver_file:
driver_file.write(str(direction))
@endpoint('razer.device.lighting.chroma', 'setStatic', in_sig='yyy')
def set_static_effect(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_static_effect")
# Notify others
self.send_effect_event('setStatic', red, green, blue)
# remember effect
self.set_persistence("backlight", "effect", 'static')
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
driver_path = self.get_driver_path('matrix_effect_static')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setBlinking', in_sig='yyy')
def set_blinking_effect(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_blinking_effect")
# Notify others
self.send_effect_event('setBlinking', red, green, blue)
# remember effect
self.set_persistence("backlight", "effect", 'blinking')
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
driver_path = self.get_driver_path('matrix_effect_blinking')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setSpectrum')
def set_spectrum_effect(self):
"""
Set the device to spectrum mode
"""
self.logger.debug("DBus call set_spectrum_effect")
# Notify others
self.send_effect_event('setSpectrum')
# remember effect
self.set_persistence("backlight", "effect", 'spectrum')
driver_path = self.get_driver_path('matrix_effect_spectrum')
with open(driver_path, 'w') as driver_file:
driver_file.write('1')
@endpoint('razer.device.lighting.chroma', 'setNone')
def set_none_effect(self):
"""
Set the device to spectrum mode
"""
self.logger.debug("DBus call set_none_effect")
# Notify others
self.send_effect_event('setNone')
# remember effect
self.set_persistence("backlight", "effect", 'none')
driver_path = self.get_driver_path('matrix_effect_none')
with open(driver_path, 'w') as driver_file:
driver_file.write('1')
@endpoint('razer.device.misc', 'triggerReactive')
def trigger_reactive_effect(self):
"""
Trigger reactive on Firefly
"""
self.logger.debug("DBus call trigger_reactive_effect")
# Notify others
self.send_effect_event('triggerReactive')
driver_path = self.get_driver_path('matrix_reactive_trigger')
with open(driver_path, 'w') as driver_file:
driver_file.write('1')
@endpoint('razer.device.lighting.chroma', 'setReactive', in_sig='yyyy')
def set_reactive_effect(self, red, green, blue, speed):
"""
Set the device to reactive effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
:param speed: Speed
:type speed: int
"""
self.logger.debug("DBus call set_reactive_effect")
driver_path = self.get_driver_path('matrix_effect_reactive')
# Notify others
self.send_effect_event('setReactive', red, green, blue, speed)
# remember effect
self.set_persistence("backlight", "effect", 'reactive')
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
if speed not in (1, 2, 3, 4):
speed = 4
self.set_persistence("backlight", "speed", int(speed))
payload = bytes([speed, red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setBreathRandom')
def set_breath_random_effect(self):
"""
Set the device to random colour breathing effect
"""
self.logger.debug("DBus call set_breath_random_effect")
# Notify others
self.send_effect_event('setBreathRandom')
# remember effect
self.set_persistence("backlight", "effect", 'breathRandom')
driver_path = self.get_driver_path('matrix_effect_breath')
payload = b'1'
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setBreathSingle', in_sig='yyy')
def set_breath_single_effect(self, red, green, blue):
"""
Set the device to single colour breathing effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_breath_single_effect")
# Notify others
self.send_effect_event('setBreathSingle', red, green, blue)
# remember effect
self.set_persistence("backlight", "effect", 'breathSingle')
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
driver_path = self.get_driver_path('matrix_effect_breath')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setBreathDual', in_sig='yyyyyy')
def set_breath_dual_effect(self, red1, green1, blue1, red2, green2, blue2):
"""
Set the device to dual colour breathing effect
:param red1: Red component
:type red1: int
:param green1: Green component
:type green1: int
:param blue1: Blue component
:type blue1: int
:param red2: Red component
:type red2: int
:param green2: Green component
:type green2: int
:param blue2: Blue component
:type blue2: int
"""
self.logger.debug("DBus call set_breath_dual_effect")
# Notify others
self.send_effect_event('setBreathDual', red1, green1, blue1, red2, green2, blue2)
# remember effect
self.set_persistence("backlight", "effect", 'breathDual')
self.zone["backlight"]["colors"][0:6] = int(red1), int(green1), int(blue1), int(red2), int(green2), int(blue2)
driver_path = self.get_driver_path('matrix_effect_breath')
payload = bytes([red1, green1, blue1, red2, green2, blue2])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setBreathTriple', in_sig='yyyyyyyyy')
def set_breath_triple_effect(self, red1, green1, blue1, red2, green2, blue2, red3, green3, blue3):
"""
Set the device to triple colour breathing effect
:param red1: Red component
:type red1: int
:param green1: Green component
:type green1: int
:param blue1: Blue component
:type blue1: int
:param red2: Red component
:type red2: int
:param green2: Green component
:type green2: int
:param blue2: Blue component
:type blue2: int
:param red3: Red component
:type red3: int
:param green3: Green component
:type green3: int
:param blue3: Blue component
:type blue3: int
"""
self.logger.debug("DBus call set_breath_triple_effect")
# Notify others
self.send_effect_event('setBreathTriple', red1, green1, blue1, red2, green2, blue2, red3, green3, blue3)
# remember effect
self.set_persistence("backlight", "effect", 'breathTriple')
self.zone["backlight"]["colors"][0:9] = int(red1), int(green1), int(blue1), int(red2), int(green2), int(blue2), int(red3), int(green3), int(blue3)
driver_path = self.get_driver_path('matrix_effect_breath')
payload = bytes([red1, green1, blue1, red2, green2, blue2, red3, green3, blue3])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setCustom')
def set_custom_effect(self):
"""
Set the device to use custom LED matrix
"""
# TODO uncomment
# self.logger.debug("DBus call set_custom_effect")
driver_path = self.get_driver_path('matrix_effect_custom')
payload = b'1'
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.chroma', 'setKeyRow', in_sig='ay', byte_arrays=True)
def set_key_row(self, payload):
"""
Set the RGB matrix on the device
Byte array like
[1, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00,
255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 255, 00, 255, 00, 00]
First byte is row, on firefly its always 1, on keyboard its 0-5
Then its 3byte groups of RGB
:param payload: Binary payload
:type payload: bytes
"""
# TODO uncomment
# self.logger.debug("DBus call set_key_row")
driver_path = self.get_driver_path('matrix_custom_frame')
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.custom', 'setRipple', in_sig='yyyd')
def set_ripple_effect(self, red, green, blue, refresh_rate):
"""
Set the daemon to serve a ripple effect of the specified colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
:param refresh_rate: Refresh rate
:type refresh_rate: int
"""
self.logger.debug("DBus call set_ripple_effect")
# Notify others
self.send_effect_event('setRipple', red, green, blue, refresh_rate)
# remember effect
self.set_persistence("backlight", "effect", 'ripple')
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
@endpoint('razer.device.lighting.custom', 'setRippleRandomColour', in_sig='d')
def set_ripple_effect_random_colour(self, refresh_rate):
"""
Set the daemon to serve a ripple effect of random colours
:param refresh_rate: Refresh rate
:type refresh_rate: int
"""
self.logger.debug("DBus call set_ripple_effect")
# Notify others
self.send_effect_event('setRipple', None, None, None, refresh_rate)
# remember effect
self.set_persistence("backlight", "effect", 'rippleRandomColour')
@endpoint('razer.device.lighting.chroma', 'setStarlightRandom', in_sig='y')
def set_starlight_random_effect(self, speed):
"""
Set startlight random mode
"""
self.logger.debug("DBus call set_starlight_random")
driver_path = self.get_driver_path('matrix_effect_starlight')
with open(driver_path, 'wb') as driver_file:
driver_file.write(bytes([speed]))
# Notify others
self.send_effect_event('setStarlightRandom')
# remember effect
self.set_persistence("backlight", "effect", 'starlightRandom')
self.set_persistence("backlight", "speed", int(speed))
@endpoint('razer.device.lighting.chroma', 'setStarlightSingle', in_sig='yyyy')
def set_starlight_single_effect(self, red, green, blue, speed):
"""
Set starlight mode
"""
self.logger.debug("DBus call set_starlight_single")
driver_path = self.get_driver_path('matrix_effect_starlight')
with open(driver_path, 'wb') as driver_file:
driver_file.write(bytes([speed, red, green, blue]))
# Notify others
self.send_effect_event('setStarlightSingle', red, green, blue, speed)
# remember effect
self.set_persistence("backlight", "effect", 'starlightSingle')
self.set_persistence("backlight", "speed", int(speed))
self.zone["backlight"]["colors"][0:3] = int(red), int(green), int(blue)
@endpoint('razer.device.lighting.chroma', 'setStarlightDual', in_sig='yyyyyyy')
def set_starlight_dual_effect(self, red1, green1, blue1, red2, green2, blue2, speed):
"""
Set starlight dual mode
"""
self.logger.debug("DBus call set_starlight_dual")
driver_path = self.get_driver_path('matrix_effect_starlight')
with open(driver_path, 'wb') as driver_file:
driver_file.write(bytes([speed, red1, green1, blue1, red2, green2, blue2]))
# Notify others
self.send_effect_event('setStarlightDual', red1, green1, blue1, red2, green2, blue2, speed)
# remember effect
self.set_persistence("backlight", "effect", 'starlightDual')
self.set_persistence("backlight", "speed", int(speed))
self.zone["backlight"]["colors"][0:6] = int(red1), int(green1), int(blue1), int(red2), int(green2), int(blue2)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from memory import MemoryCollector
##########################################################################
class TestMemoryCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MemoryCollector', {
'interval': 10,
'byte_unit': 'kilobyte'
})
self.collector = MemoryCollector(config, None)
def test_import(self):
self.assertTrue(MemoryCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_meminfo(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/meminfo')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
MemoryCollector.PROC = self.getFixturePath('proc_meminfo')
self.collector.collect()
metrics = {
'MemTotal': 49554212,
'MemFree': 35194496,
'MemAvailable': 35194496,
'MemUsedPercentage': 28.98,
'Buffers': 1526304,
'Cached': 10726736,
'Active': 10022168,
'Dirty': 24748,
'Inactive': 2524928,
'Shmem': 276,
'SwapTotal': 262143996,
'SwapFree': 262143996,
'SwapCached': 0,
'VmallocTotal': 34359738367,
'VmallocUsed': 445452,
'VmallocChunk': 34311049240
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from . import async_setup_auth
from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI
async def async_get_code(hass, aiohttp_client):
"""Return authorization code for link user tests."""
config = [
{
"name": "Example",
"type": "insecure_example",
"users": [
{"username": "test-user", "password": "test-pass", "name": "Test Name"}
],
},
{
"name": "Example",
"id": "2nd auth",
"type": "insecure_example",
"users": [
{"username": "2nd-user", "password": "2nd-pass", "name": "2nd Name"}
],
},
]
client = await async_setup_auth(hass, aiohttp_client, config)
user = await hass.auth.async_create_user(name="Hello")
refresh_token = await hass.auth.async_create_refresh_token(user, CLIENT_ID)
access_token = hass.auth.async_create_access_token(refresh_token)
# Now authenticate with the 2nd flow
resp = await client.post(
"/auth/login_flow",
json={
"client_id": CLIENT_ID,
"handler": ["insecure_example", "2nd auth"],
"redirect_uri": CLIENT_REDIRECT_URI,
"type": "link_user",
},
)
assert resp.status == 200
step = await resp.json()
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={"client_id": CLIENT_ID, "username": "2nd-user", "password": "2nd-pass"},
)
assert resp.status == 200
step = await resp.json()
return {
"user": user,
"code": step["result"],
"client": client,
"access_token": access_token,
}
async def test_link_user(hass, aiohttp_client):
"""Test linking a user to new credentials."""
info = await async_get_code(hass, aiohttp_client)
client = info["client"]
code = info["code"]
# Link user
resp = await client.post(
"/auth/link_user",
json={"client_id": CLIENT_ID, "code": code},
headers={"authorization": f"Bearer {info['access_token']}"},
)
assert resp.status == 200
assert len(info["user"].credentials) == 1
async def test_link_user_invalid_client_id(hass, aiohttp_client):
"""Test linking a user to new credentials."""
info = await async_get_code(hass, aiohttp_client)
client = info["client"]
code = info["code"]
# Link user
resp = await client.post(
"/auth/link_user",
json={"client_id": "invalid", "code": code},
headers={"authorization": f"Bearer {info['access_token']}"},
)
assert resp.status == 400
assert len(info["user"].credentials) == 0
async def test_link_user_invalid_code(hass, aiohttp_client):
"""Test linking a user to new credentials."""
info = await async_get_code(hass, aiohttp_client)
client = info["client"]
# Link user
resp = await client.post(
"/auth/link_user",
json={"client_id": CLIENT_ID, "code": "invalid"},
headers={"authorization": f"Bearer {info['access_token']}"},
)
assert resp.status == 400
assert len(info["user"].credentials) == 0
async def test_link_user_invalid_auth(hass, aiohttp_client):
"""Test linking a user to new credentials."""
info = await async_get_code(hass, aiohttp_client)
client = info["client"]
code = info["code"]
# Link user
resp = await client.post(
"/auth/link_user",
json={"client_id": CLIENT_ID, "code": code},
headers={"authorization": "Bearer invalid"},
)
assert resp.status == 401
assert len(info["user"].credentials) == 0
|
import logging
from typing import Any, Dict, Optional
from aiohttp import ContentTypeError
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN # pylint: disable=unused-import
from .utils import load_plum
_LOGGER = logging.getLogger(__name__)
class PlumLightpadConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for Plum Lightpad integration."""
VERSION = 1
def _show_form(self, errors=None):
schema = {
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(schema),
errors=errors or {},
)
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initialized by the user or redirected to by import."""
if not user_input:
return self._show_form()
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
# load Plum just so we know username/password work
try:
await load_plum(username, password, self.hass)
except (ContentTypeError, ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect/authenticate to Plum cloud: %s", str(ex))
return self._show_form({"base": "cannot_connect"})
await self.async_set_unique_id(username)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=username, data={CONF_USERNAME: username, CONF_PASSWORD: password}
)
async def async_step_import(
self, import_config: Optional[ConfigType]
) -> Dict[str, Any]:
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
|
import logging
import unittest
import numpy as np
from gensim import utils
from gensim.test.utils import datapath, get_tmpfile
class TestIsCorpus(unittest.TestCase):
def test_None(self):
# test None
result = utils.is_corpus(None)
expected = (False, None)
self.assertEqual(expected, result)
def test_simple_lists_of_tuples(self):
# test list words
# one document, one word
potentialCorpus = [[(0, 4.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
# one document, several words
potentialCorpus = [[(0, 4.), (1, 2.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
potentialCorpus = [[(0, 4.), (1, 2.), (2, 5.), (3, 8.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
# several documents, one word
potentialCorpus = [[(0, 4.)], [(1, 2.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
potentialCorpus = [[(0, 4.)], [(1, 2.)], [(2, 5.)], [(3, 8.)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
def test_int_tuples(self):
potentialCorpus = [[(0, 4)]]
result = utils.is_corpus(potentialCorpus)
expected = (True, potentialCorpus)
self.assertEqual(expected, result)
def test_invalid_formats(self):
# test invalid formats
# these are no corpus, because they do not consists of 2-tuples with
# the form(int, float).
potentials = list()
potentials.append(["human"])
potentials.append("human")
potentials.append(["human", "star"])
potentials.append([1, 2, 3, 4, 5, 5])
potentials.append([[(0, 'string')]])
for noCorpus in potentials:
result = utils.is_corpus(noCorpus)
expected = (False, noCorpus)
self.assertEqual(expected, result)
class TestUtils(unittest.TestCase):
def test_decode_entities(self):
# create a string that fails to decode with unichr on narrow python builds
body = u'It’s the Year of the Horse. YES VIN DIESEL 🙌 💯'
expected = u'It\x92s the Year of the Horse. YES VIN DIESEL \U0001f64c \U0001f4af'
self.assertEqual(utils.decode_htmlentities(body), expected)
def test_open_file_existent_file(self):
number_of_lines_in_file = 30
with utils.open_file(datapath('testcorpus.mm')) as infile:
self.assertEqual(sum(1 for _ in infile), number_of_lines_in_file)
def test_open_file_non_existent_file(self):
with self.assertRaises(Exception):
with utils.open_file('non_existent_file.txt'):
pass
def test_open_file_existent_file_object(self):
number_of_lines_in_file = 30
file_obj = open(datapath('testcorpus.mm'))
with utils.open_file(file_obj) as infile:
self.assertEqual(sum(1 for _ in infile), number_of_lines_in_file)
def test_open_file_non_existent_file_object(self):
file_obj = None
with self.assertRaises(Exception):
with utils.open_file(file_obj):
pass
class TestSampleDict(unittest.TestCase):
def test_sample_dict(self):
d = {1: 2, 2: 3, 3: 4, 4: 5}
expected_dict = [(1, 2), (2, 3)]
expected_dict_random = [(k, v) for k, v in d.items()]
sampled_dict = utils.sample_dict(d, 2, False)
self.assertEqual(sampled_dict, expected_dict)
sampled_dict_random = utils.sample_dict(d, 2)
if sampled_dict_random in expected_dict_random:
self.assertTrue(True)
class TestTrimVocabByFreq(unittest.TestCase):
def test_trim_vocab(self):
d = {"word1": 5, "word2": 1, "word3": 2}
expected_dict = {"word1": 5, "word3": 2}
utils.trim_vocab_by_freq(d, topk=2)
self.assertEqual(d, expected_dict)
d = {"word1": 5, "word2": 2, "word3": 2, "word4": 1}
expected_dict = {"word1": 5, "word2": 2, "word3": 2}
utils.trim_vocab_by_freq(d, topk=2)
self.assertEqual(d, expected_dict)
class TestMergeDicts(unittest.TestCase):
def test_merge_dicts(self):
d1 = {"word1": 5, "word2": 1, "word3": 2}
d2 = {"word1": 2, "word3": 3, "word4": 10}
res_dict = utils.merge_counts(d1, d2)
expected_dict = {"word1": 7, "word2": 1, "word3": 5, "word4": 10}
self.assertEqual(res_dict, expected_dict)
class TestWindowing(unittest.TestCase):
arr10_5 = np.array([
[0, 1, 2, 3, 4],
[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 8],
[5, 6, 7, 8, 9]
])
def _assert_arrays_equal(self, expected, actual):
self.assertEqual(expected.shape, actual.shape)
self.assertTrue((actual == expected).all())
def test_strided_windows1(self):
out = utils.strided_windows(range(5), 2)
expected = np.array([
[0, 1],
[1, 2],
[2, 3],
[3, 4]
])
self._assert_arrays_equal(expected, out)
def test_strided_windows2(self):
input_arr = np.arange(10)
out = utils.strided_windows(input_arr, 5)
expected = self.arr10_5.copy()
self._assert_arrays_equal(expected, out)
out[0, 0] = 10
self.assertEqual(10, input_arr[0], "should make view rather than copy")
def test_strided_windows_window_size_exceeds_size(self):
input_arr = np.array(['this', 'is', 'test'], dtype='object')
out = utils.strided_windows(input_arr, 4)
expected = np.ndarray((0, 0))
self._assert_arrays_equal(expected, out)
def test_strided_windows_window_size_equals_size(self):
input_arr = np.array(['this', 'is', 'test'], dtype='object')
out = utils.strided_windows(input_arr, 3)
expected = np.array([input_arr.copy()])
self._assert_arrays_equal(expected, out)
def test_iter_windows_include_below_window_size(self):
texts = [['this', 'is', 'a'], ['test', 'document']]
out = utils.iter_windows(texts, 3, ignore_below_size=False)
windows = [list(w) for w in out]
self.assertEqual(texts, windows)
out = utils.iter_windows(texts, 3)
windows = [list(w) for w in out]
self.assertEqual([texts[0]], windows)
def test_iter_windows_list_texts(self):
texts = [['this', 'is', 'a'], ['test', 'document']]
windows = list(utils.iter_windows(texts, 2))
list_windows = [list(iterable) for iterable in windows]
expected = [['this', 'is'], ['is', 'a'], ['test', 'document']]
self.assertListEqual(list_windows, expected)
def test_iter_windows_uses_views(self):
texts = [np.array(['this', 'is', 'a'], dtype='object'), ['test', 'document']]
windows = list(utils.iter_windows(texts, 2))
list_windows = [list(iterable) for iterable in windows]
expected = [['this', 'is'], ['is', 'a'], ['test', 'document']]
self.assertListEqual(list_windows, expected)
windows[0][0] = 'modified'
self.assertEqual('modified', texts[0][0])
def test_iter_windows_with_copy(self):
texts = [
np.array(['this', 'is', 'a'], dtype='object'),
np.array(['test', 'document'], dtype='object')
]
windows = list(utils.iter_windows(texts, 2, copy=True))
windows[0][0] = 'modified'
self.assertEqual('this', texts[0][0])
windows[2][0] = 'modified'
self.assertEqual('test', texts[1][0])
def test_flatten_nested(self):
nested_list = [[[1, 2, 3], [4, 5]], 6]
expected = [1, 2, 3, 4, 5, 6]
self.assertEqual(utils.flatten(nested_list), expected)
def test_flatten_not_nested(self):
not_nested = [1, 2, 3, 4, 5, 6]
expected = [1, 2, 3, 4, 5, 6]
self.assertEqual(utils.flatten(not_nested), expected)
class TestSaveAsLineSentence(unittest.TestCase):
def test_save_as_line_sentence_en(self):
corpus_file = get_tmpfile('gensim_utils.tst')
ref_sentences = [
line.split()
for line in utils.any2unicode('hello world\nhow are you').split('\n')
]
utils.save_as_line_sentence(ref_sentences, corpus_file)
with utils.open(corpus_file, 'rb', encoding='utf8') as fin:
sentences = [line.strip().split() for line in fin.read().strip().split('\n')]
self.assertEqual(sentences, ref_sentences)
def test_save_as_line_sentence_ru(self):
corpus_file = get_tmpfile('gensim_utils.tst')
ref_sentences = [
line.split()
for line in utils.any2unicode('привет мир\nкак ты поживаешь').split('\n')
]
utils.save_as_line_sentence(ref_sentences, corpus_file)
with utils.open(corpus_file, 'rb', encoding='utf8') as fin:
sentences = [line.strip().split() for line in fin.read().strip().split('\n')]
self.assertEqual(sentences, ref_sentences)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
from gogogate2_api import GogoGate2Api
import pytest
from homeassistant.components.gogogate2 import DEVICE_TYPE_GOGOGATE2, async_setup_entry
from homeassistant.components.gogogate2.common import DeviceDataUpdateCoordinator
from homeassistant.components.gogogate2.const import DEVICE_TYPE_ISMARTGATE, DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import (
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_config_update(gogogate2api_mock, hass: HomeAssistant) -> None:
"""Test config setup where the config is updated."""
api = MagicMock(GogoGate2Api)
api.info.side_effect = Exception("Error")
gogogate2api_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data={
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert not await hass.config_entries.async_setup(entry_id=config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.data == {
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
}
@patch("homeassistant.components.gogogate2.common.ISmartGateApi")
async def test_config_no_update(ismartgateapi_mock, hass: HomeAssistant) -> None:
"""Test config setup where the data is not updated."""
api = MagicMock(GogoGate2Api)
api.info.side_effect = Exception("Error")
ismartgateapi_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data={
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert not await hass.config_entries.async_setup(entry_id=config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.data == {
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
}
async def test_auth_fail(hass: HomeAssistant) -> None:
"""Test authorization failures."""
coordinator_mock: DeviceDataUpdateCoordinator = MagicMock(
spec=DeviceDataUpdateCoordinator
)
coordinator_mock.last_update_success = False
config_entry = MockConfigEntry()
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.gogogate2.get_data_update_coordinator",
return_value=coordinator_mock,
), pytest.raises(ConfigEntryNotReady):
await async_setup_entry(hass, config_entry)
|
from io import BytesIO
import responses
from django.urls import reverse
from PIL import Image
from weblate.accounts import avatar
from weblate.auth.models import User
from weblate.trans.tests.test_views import FixtureTestCase
TEST_URL = (
"https://www.gravatar.com/avatar/"
"55502f40dc8b7c769880b10874abc9d0?d=identicon&s=32"
)
class AvatarTest(FixtureTestCase):
def setUp(self):
super().setUp()
self.user.email = "[email protected]"
self.user.save()
def test_avatar_for_email(self):
url = avatar.avatar_for_email(self.user.email, size=32)
self.assertEqual(TEST_URL, url)
@responses.activate
def test_avatar(self):
image = Image.new("RGB", (32, 32))
storage = BytesIO()
image.save(storage, "PNG")
imagedata = storage.getvalue()
responses.add(responses.GET, TEST_URL, body=imagedata)
# Real user
response = self.client.get(
reverse("user_avatar", kwargs={"user": self.user.username, "size": 32})
)
self.assert_png(response)
self.assertEqual(response.content, imagedata)
# Test caching
response = self.client.get(
reverse("user_avatar", kwargs={"user": self.user.username, "size": 32})
)
self.assert_png(response)
self.assertEqual(response.content, imagedata)
@responses.activate
def test_avatar_error(self):
responses.add(responses.GET, TEST_URL, status=503)
# Choose different username to avoid using cache
self.user.username = "test2"
self.user.save()
response = self.client.get(
reverse("user_avatar", kwargs={"user": self.user.username, "size": 32})
)
self.assert_png(response)
def test_anonymous_avatar(self):
anonymous = User.objects.get(username="anonymous")
# Anonymous user
response = self.client.get(
reverse("user_avatar", kwargs={"user": anonymous.username, "size": 32})
)
self.assertRedirects(
response, "/static/weblate-32.png", fetch_redirect_response=False
)
def test_fallback_avatar(self):
self.assert_png_data(avatar.get_fallback_avatar(32))
|
import os
import pytest
from molecule import config
from molecule.driver import digitalocean
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return digitalocean.DigitalOcean(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'digitalocean' == _instance.name
def test_options_property(_instance):
expected_options = {'managed': True}
assert expected_options == _instance.options
def test_login_cmd_template_property(_instance):
expected_ssh_command = ('ssh {address} '
'-l {user} -p {port} -i {identity_file} '
'-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no')
assert expected_ssh_command == _instance.login_cmd_template
def test_safe_files_property(_instance):
expected_safe_files = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert expected_safe_files == _instance.safe_files
def test_default_safe_files_property(_instance):
expected_default_safe_files = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert expected_default_safe_files == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
expected_ssh_defaults = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert expected_ssh_defaults == _instance.default_ssh_connection_options
def test_login_options(mocker, _instance):
m = mocker.patch(
'molecule.driver.digitalocean.DigitalOcean._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
expected_login_data = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
assert expected_login_data == _instance.login_options('foo')
def test_ansible_connection_options(mocker, _instance):
m = mocker.patch(
'molecule.driver.digitalocean.DigitalOcean._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
expected_cnx_data = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'cloud-user',
'ansible_private_key_file':
'/foo/bar',
'connection':
'ssh',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
}
assert expected_cnx_data == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
expected_config_file = os.path.join(
_instance._config.scenario.ephemeral_directory, 'instance_config.yml')
assert expected_config_file == _instance.instance_config
def test_ssh_connection_options_property(_instance):
expected_ssh_options = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert expected_ssh_options == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'digitalocean'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'digitalocean'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
expected_instance = {
'instance': 'foo',
}
assert expected_instance == _instance._get_instance_config('foo')
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
|
import functools
import operator
from paasta_tools.tron import tron_timeutils
def build_context(object, parent):
"""Construct a CommandContext for object. object must have a property
'context_class'.
"""
return CommandContext(object.context_class(object), parent)
def build_filled_context(*context_objects):
"""Create a CommandContext chain from context_objects, using a Filler
object to pass to each CommandContext. Can be used to validate a format
string.
"""
if not context_objects:
return CommandContext()
filler = Filler()
def build(current, next):
return CommandContext(next(filler), current)
return functools.reduce(build, context_objects, None)
class CommandContext:
"""A CommandContext object is a wrapper around any object which has values
to be used to render a command for execution. It looks up values by name.
It's lookup order is:
base[name],
base.__getattr__(name),
next[name],
next.__getattr__(name)
"""
def __init__(self, base=None, next=None):
"""
base - Object to look for attributes in
next - Next place to look for more pieces of context
Generally this will be another instance of CommandContext
"""
self.base = base or {}
self.next = next or {}
def get(self, name, default=None):
try:
return self.__getitem__(name)
except KeyError:
return default
def __getitem__(self, name):
getters = [operator.itemgetter(name), operator.attrgetter(name)]
for target in [self.base, self.next]:
for getter in getters:
try:
return getter(target)
except (KeyError, TypeError, AttributeError):
pass
raise KeyError(name)
def __eq__(self, other):
return self.base == other.base and self.next == other.next
def __ne__(self, other):
return not self == other
class JobContext:
"""A class which exposes properties for rendering commands."""
def __init__(self, job):
self.job = job
@property
def name(self):
return self.job.name
def __getitem__(self, item):
date_name, date_spec = self._get_date_spec_parts(item)
if not date_spec:
raise KeyError(item)
if date_name == "last_success":
last_success = self.job.runs.last_success
last_success = last_success.run_time if last_success else None
time_value = tron_timeutils.DateArithmetic.parse(date_spec, last_success)
if time_value:
return time_value
raise KeyError(item)
def _get_date_spec_parts(self, name):
parts = name.rsplit(":", 1)
if len(parts) != 2:
return name, None
return parts
class JobRunContext:
def __init__(self, job_run):
self.job_run = job_run
@property
def runid(self):
return self.job_run.id
@property
def cleanup_job_status(self):
"""Provide 'SUCCESS' or 'FAILURE' to a cleanup action context based on
the status of the other steps
"""
if self.job_run.action_runs.is_failed:
return "FAILURE"
elif self.job_run.action_runs.is_complete_without_cleanup:
return "SUCCESS"
return "UNKNOWN"
def __getitem__(self, name):
"""Attempt to parse date arithmetic syntax and apply to run_time."""
run_time = self.job_run.run_time
time_value = tron_timeutils.DateArithmetic.parse(name, run_time)
if time_value:
return time_value
raise KeyError(name)
class ActionRunContext:
"""Context object that gives us access to data about the action run."""
def __init__(self, action_run):
self.action_run = action_run
@property
def actionname(self):
return self.action_run.action_name
@property
def node(self):
return self.action_run.node.hostname
class ServiceInstancePidContext:
def __init__(self, service_instance):
self.service_instance = service_instance
@property
def instance_number(self):
return self.service_instance.instance_number
@property
def node(self):
return self.service_instance.node.hostname
@property
def name(self):
return self.service_instance.config.name
class ServiceInstanceContext(ServiceInstancePidContext):
@property
def pid_file(self):
context = CommandContext(self, self.service_instance.parent_context)
return self.service_instance.config.pid_file % context
class Filler:
"""Filler object for using CommandContext during config parsing. This class
is used as a substitute for objects that would be passed to Context objects.
This allows the Context objects to be used directly for config validation.
"""
def __getattr__(self, _):
return self
def __str__(self):
return "%(...)s"
def __mod__(self, _):
return self
def __nonzero__(self):
return False
|
__docformat__ = "restructuredtext en"
import sys
from logilab.common import flatten
from logilab.common.visitor import VisitedMixIn, FilteredIterator, no_filter
## Exceptions #################################################################
class NodeNotFound(Exception):
"""raised when a node has not been found"""
EX_SIBLING_NOT_FOUND = "No such sibling as '%s'"
EX_CHILD_NOT_FOUND = "No such child as '%s'"
EX_NODE_NOT_FOUND = "No such node as '%s'"
# Base node ###################################################################
class Node(object):
"""a basic tree node, characterized by an id"""
def __init__(self, nid=None) :
self.id = nid
# navigation
self.parent = None
self.children = []
def __iter__(self):
return iter(self.children)
def __str__(self, indent=0):
s = ['%s%s %s' % (' '*indent, self.__class__.__name__, self.id)]
indent += 2
for child in self.children:
try:
s.append(child.__str__(indent))
except TypeError:
s.append(child.__str__())
return '\n'.join(s)
def is_leaf(self):
return not self.children
def append(self, child):
"""add a node to children"""
self.children.append(child)
child.parent = self
def remove(self, child):
"""remove a child node"""
self.children.remove(child)
child.parent = None
def insert(self, index, child):
"""insert a child node"""
self.children.insert(index, child)
child.parent = self
def replace(self, old_child, new_child):
"""replace a child node with another"""
i = self.children.index(old_child)
self.children.pop(i)
self.children.insert(i, new_child)
new_child.parent = self
def get_sibling(self, nid):
"""return the sibling node that has given id"""
try:
return self.parent.get_child_by_id(nid)
except NodeNotFound :
raise NodeNotFound(EX_SIBLING_NOT_FOUND % nid)
def next_sibling(self):
"""
return the next sibling for this node if any
"""
parent = self.parent
if parent is None:
# root node has no sibling
return None
index = parent.children.index(self)
try:
return parent.children[index+1]
except IndexError:
return None
def previous_sibling(self):
"""
return the previous sibling for this node if any
"""
parent = self.parent
if parent is None:
# root node has no sibling
return None
index = parent.children.index(self)
if index > 0:
return parent.children[index-1]
return None
def get_node_by_id(self, nid):
"""
return node in whole hierarchy that has given id
"""
root = self.root()
try:
return root.get_child_by_id(nid, 1)
except NodeNotFound :
raise NodeNotFound(EX_NODE_NOT_FOUND % nid)
def get_child_by_id(self, nid, recurse=None):
"""
return child of given id
"""
if self.id == nid:
return self
for c in self.children :
if recurse:
try:
return c.get_child_by_id(nid, 1)
except NodeNotFound :
continue
if c.id == nid :
return c
raise NodeNotFound(EX_CHILD_NOT_FOUND % nid)
def get_child_by_path(self, path):
"""
return child of given path (path is a list of ids)
"""
if len(path) > 0 and path[0] == self.id:
if len(path) == 1 :
return self
else :
for c in self.children :
try:
return c.get_child_by_path(path[1:])
except NodeNotFound :
pass
raise NodeNotFound(EX_CHILD_NOT_FOUND % path)
def depth(self):
"""
return depth of this node in the tree
"""
if self.parent is not None:
return 1 + self.parent.depth()
else :
return 0
def depth_down(self):
"""
return depth of the tree from this node
"""
if self.children:
return 1 + max([c.depth_down() for c in self.children])
return 1
def width(self):
"""
return the width of the tree from this node
"""
return len(self.leaves())
def root(self):
"""
return the root node of the tree
"""
if self.parent is not None:
return self.parent.root()
return self
def leaves(self):
"""
return a list with all the leaves nodes descendant from this node
"""
leaves = []
if self.children:
for child in self.children:
leaves += child.leaves()
return leaves
else:
return [self]
def flatten(self, _list=None):
"""
return a list with all the nodes descendant from this node
"""
if _list is None:
_list = []
_list.append(self)
for c in self.children:
c.flatten(_list)
return _list
def lineage(self):
"""
return list of parents up to root node
"""
lst = [self]
if self.parent is not None:
lst.extend(self.parent.lineage())
return lst
class VNode(Node, VisitedMixIn):
"""a visitable node
"""
pass
class BinaryNode(VNode):
"""a binary node (i.e. only two children
"""
def __init__(self, lhs=None, rhs=None) :
VNode.__init__(self)
if lhs is not None or rhs is not None:
assert lhs and rhs
self.append(lhs)
self.append(rhs)
def remove(self, child):
"""remove the child and replace this node with the other child
"""
self.children.remove(child)
self.parent.replace(self, self.children[0])
def get_parts(self):
"""
return the left hand side and the right hand side of this node
"""
return self.children[0], self.children[1]
if sys.version_info[0:2] >= (2, 2):
list_class = list
else:
from UserList import UserList
list_class = UserList
class ListNode(VNode, list_class):
"""Used to manipulate Nodes as Lists
"""
def __init__(self):
list_class.__init__(self)
VNode.__init__(self)
self.children = self
def __str__(self, indent=0):
return '%s%s %s' % (indent*' ', self.__class__.__name__,
', '.join([str(v) for v in self]))
def append(self, child):
"""add a node to children"""
list_class.append(self, child)
child.parent = self
def insert(self, index, child):
"""add a node to children"""
list_class.insert(self, index, child)
child.parent = self
def remove(self, child):
"""add a node to children"""
list_class.remove(self, child)
child.parent = None
def pop(self, index):
"""add a node to children"""
child = list_class.pop(self, index)
child.parent = None
def __iter__(self):
return list_class.__iter__(self)
# construct list from tree ####################################################
def post_order_list(node, filter_func=no_filter):
"""
create a list with tree nodes for which the <filter> function returned true
in a post order fashion
"""
l, stack = [], []
poped, index = 0, 0
while node:
if filter_func(node):
if node.children and not poped:
stack.append((node, index))
index = 0
node = node.children[0]
else:
l.append(node)
index += 1
try:
node = stack[-1][0].children[index]
except IndexError:
node = None
else:
node = None
poped = 0
if node is None and stack:
node, index = stack.pop()
poped = 1
return l
def pre_order_list(node, filter_func=no_filter):
"""
create a list with tree nodes for which the <filter> function returned true
in a pre order fashion
"""
l, stack = [], []
poped, index = 0, 0
while node:
if filter_func(node):
if not poped:
l.append(node)
if node.children and not poped:
stack.append((node, index))
index = 0
node = node.children[0]
else:
index += 1
try:
node = stack[-1][0].children[index]
except IndexError:
node = None
else:
node = None
poped = 0
if node is None and len(stack) > 1:
node, index = stack.pop()
poped = 1
return l
class PostfixedDepthFirstIterator(FilteredIterator):
"""a postfixed depth first iterator, designed to be used with visitors
"""
def __init__(self, node, filter_func=None):
FilteredIterator.__init__(self, node, post_order_list, filter_func)
class PrefixedDepthFirstIterator(FilteredIterator):
"""a prefixed depth first iterator, designed to be used with visitors
"""
def __init__(self, node, filter_func=None):
FilteredIterator.__init__(self, node, pre_order_list, filter_func)
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.linux_benchmarks import fio_benchmark as linux_fio
from perfkitbenchmarker.windows_packages import fio
FLAGS = flags.FLAGS
_FIO_MEDIUM_SIZE = 2
_FIO_LARGE_SIZE = 2 * _FIO_MEDIUM_SIZE
_MAX_SIZE = 100000
_SIZE_EXPLANATION = ('This is the size of I/O for this job. fio will run until'
' this many bytes have been transferred. The default is '
'{size} * (System Memory) or {max_size}GB, whichever is'
' smaller.')
_DEFAULT_SIZE_LARGE_EXPLANATION = _SIZE_EXPLANATION.format(
size=_FIO_LARGE_SIZE, max_size=(_MAX_SIZE / 1000))
_DEFAULT_SIZE_MEDIUM_EXPLANATION = _SIZE_EXPLANATION.format(
size=_FIO_MEDIUM_SIZE, max_size=(_MAX_SIZE / 1000))
flags.DEFINE_integer('fio_file_size', None,
('"filesize" field of the global section of the '
'fio config. This is the size of the individual files. '
'Default is {large} * (System Memory) or {max_size}GB, '
'whichever is smaller.').format(
large=_FIO_LARGE_SIZE, max_size=_MAX_SIZE / 1000))
flags.DEFINE_integer('fio_sequential_write_size', None,
('"size" field of the sequential_write section of the '
'fio config. {explanation}').format(
explanation=_DEFAULT_SIZE_LARGE_EXPLANATION))
flags.DEFINE_integer('fio_sequential_read_size', None,
('"size" field of the sequential_read section of the '
'fio config. {explanation}').format(
explanation=_DEFAULT_SIZE_LARGE_EXPLANATION))
flags.DEFINE_integer('fio_random_write_size', None,
('"size" field of the random_write section of the '
'fio config. {explanation}').format(
explanation=_DEFAULT_SIZE_MEDIUM_EXPLANATION))
flags.DEFINE_integer('fio_random_read_size', None,
('"size" field of the random_read section of the '
'fio config. {explanation}').format(
explanation=_DEFAULT_SIZE_MEDIUM_EXPLANATION))
flags.DEFINE_integer('fio_random_read_parallel_size', None,
('"size" field of the random_read_parallel section of the '
'fio config. {explanation}').format(
explanation=_DEFAULT_SIZE_MEDIUM_EXPLANATION))
BENCHMARK_NAME = 'fio'
BENCHMARK_CONFIG = """
fio:
description: Runs fio in sequential, random, read and write modes.
vm_groups:
default:
vm_spec: *default_single_core
disk_spec: *default_500_gb
"""
DEFAULT_JOB = """
[global]
filesize={filesize}m
filename=fio_test_file
ioengine=windowsaio
runtime={runtime}s
time_based
[sequential_write]
overwrite=0
rw=write
blocksize=512k
size={seq_write_size}m
iodepth=64
direct=1
end_fsync=1
[sequential_read]
stonewall
invalidate=1
overwrite=0
rw=read
blocksize=512k
size={seq_read_size}m
iodepth=64
direct=1
[random_write_test]
stonewall
overwrite=1
rw=randwrite
blocksize=4k
iodepth=1
size={rand_write_size}m
direct=1
[random_read_test]
invalidate=1
stonewall
rw=randread
blocksize=4k
iodepth=1
size={rand_read_size}m
direct=1
[random_read_test_parallel]
invalidate=1
stonewall
rw=randread
blocksize=4k
iodepth=64
size={rand_read_parallel_size}m
direct=1
"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS.fio_target_mode != linux_fio.AGAINST_FILE_WITHOUT_FILL_MODE:
disk_spec = config['vm_groups']['default']['disk_spec']
for cloud in disk_spec:
disk_spec[cloud]['mount_point'] = None
return config
def Prepare(benchmark_spec):
vm = benchmark_spec.vms[0]
exec_path = fio.GetFioExec(vm)
linux_fio.PrepareWithExec(benchmark_spec, exec_path)
def Run(benchmark_spec):
"""Runs the fio benchmark test.
Args:
benchmark_spec: specification of the benchmark.
Returns:
List of samples produced by the test.
"""
vm = benchmark_spec.vms[0]
fio_exec = fio.GetFioExec(vm)
remote_job_file_path = fio.GetRemoteJobFilePath(vm)
total_memory_mb = vm.GetTotalMemoryMb()
size_medium = min(_MAX_SIZE, total_memory_mb * _FIO_MEDIUM_SIZE)
size_large = min(_MAX_SIZE, total_memory_mb * _FIO_LARGE_SIZE)
filesize = FLAGS.fio_file_size or size_large
seq_write_size = FLAGS.fio_sequential_write_size or size_large
seq_read_size = FLAGS.fio_sequential_read_size or size_large
rand_write_size = FLAGS.fio_random_write_size or size_medium
rand_read_size = FLAGS.fio_random_read_size or size_medium
rand_read_parallel_size = FLAGS.fio_random_read_parallel_size or size_medium
job_file_contents = DEFAULT_JOB.format(
filesize=filesize,
runtime=FLAGS.fio_runtime,
seq_write_size=seq_write_size,
seq_read_size=seq_read_size,
rand_write_size=rand_write_size,
rand_read_size=rand_read_size,
rand_read_parallel_size=rand_read_parallel_size)
return linux_fio.RunWithExec(benchmark_spec, fio_exec, remote_job_file_path,
job_file_contents)
def Cleanup(unused_benchmark_spec):
pass
|
from homeassistant.exceptions import HomeAssistantError
class UnifiException(HomeAssistantError):
"""Base class for UniFi exceptions."""
class AlreadyConfigured(UnifiException):
"""Controller is already configured."""
class AuthenticationRequired(UnifiException):
"""Unknown error occurred."""
class CannotConnect(UnifiException):
"""Unable to connect to the controller."""
class LoginRequired(UnifiException):
"""Component got logged out."""
class UserLevel(UnifiException):
"""User level too low."""
|
from homeassistant.components.ipp.const import CONF_BASE_PATH, CONF_UUID, DOMAIN
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SSL
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from . import (
MOCK_USER_INPUT,
MOCK_ZEROCONF_IPP_SERVICE_INFO,
MOCK_ZEROCONF_IPPS_SERVICE_INFO,
init_integration,
mock_connection,
)
from tests.async_mock import patch
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_show_user_form(hass: HomeAssistant) -> None:
"""Test that the user set up form is served."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "user"
assert result["type"] == RESULT_TYPE_FORM
async def test_show_zeroconf_form(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test that the zeroconf confirmation form is served."""
mock_connection(aioclient_mock)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == RESULT_TYPE_FORM
assert result["description_placeholders"] == {CONF_NAME: "EPSON XP-6000 Series"}
async def test_connection_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we show user form on IPP connection error."""
mock_connection(aioclient_mock, conn_error=True)
user_input = MOCK_USER_INPUT.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["step_id"] == "user"
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_zeroconf_connection_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP connection error."""
mock_connection(aioclient_mock, conn_error=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
async def test_zeroconf_confirm_connection_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP connection error."""
mock_connection(aioclient_mock, conn_error=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
async def test_user_connection_upgrade_required(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we show the user form if connection upgrade required by server."""
mock_connection(aioclient_mock, conn_upgrade_error=True)
user_input = MOCK_USER_INPUT.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["step_id"] == "user"
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "connection_upgrade"}
async def test_zeroconf_connection_upgrade_required(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP connection error."""
mock_connection(aioclient_mock, conn_upgrade_error=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "connection_upgrade"
async def test_user_parse_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort user flow on IPP parse error."""
mock_connection(aioclient_mock, parse_error=True)
user_input = MOCK_USER_INPUT.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "parse_error"
async def test_zeroconf_parse_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP parse error."""
mock_connection(aioclient_mock, parse_error=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "parse_error"
async def test_user_ipp_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort the user flow on IPP error."""
mock_connection(aioclient_mock, ipp_error=True)
user_input = MOCK_USER_INPUT.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "ipp_error"
async def test_zeroconf_ipp_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP error."""
mock_connection(aioclient_mock, ipp_error=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "ipp_error"
async def test_user_ipp_version_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort user flow on IPP version not supported error."""
mock_connection(aioclient_mock, version_not_supported=True)
user_input = {**MOCK_USER_INPUT}
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "ipp_version_error"
async def test_zeroconf_ipp_version_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on IPP version not supported error."""
mock_connection(aioclient_mock, version_not_supported=True)
discovery_info = {**MOCK_ZEROCONF_IPP_SERVICE_INFO}
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "ipp_version_error"
async def test_user_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort user flow if printer already configured."""
await init_integration(hass, aioclient_mock, skip_setup=True)
user_input = MOCK_USER_INPUT.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=user_input,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_zeroconf_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if printer already configured."""
await init_integration(hass, aioclient_mock, skip_setup=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_zeroconf_with_uuid_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if printer already configured."""
await init_integration(hass, aioclient_mock, skip_setup=True)
discovery_info = {
**MOCK_ZEROCONF_IPP_SERVICE_INFO,
"properties": {
**MOCK_ZEROCONF_IPP_SERVICE_INFO["properties"],
"UUID": "cfe92100-67c4-11d4-a45f-f8d027761251",
},
}
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_zeroconf_empty_unique_id(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test zeroconf flow if printer lacks (empty) unique identification."""
mock_connection(aioclient_mock, no_unique_id=True)
discovery_info = {
**MOCK_ZEROCONF_IPP_SERVICE_INFO,
"properties": {**MOCK_ZEROCONF_IPP_SERVICE_INFO["properties"], "UUID": ""},
}
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_FORM
async def test_zeroconf_no_unique_id(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test zeroconf flow if printer lacks unique identification."""
mock_connection(aioclient_mock, no_unique_id=True)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_FORM
async def test_full_user_flow_implementation(
hass: HomeAssistant, aioclient_mock
) -> None:
"""Test the full manual user flow from start to finish."""
mock_connection(aioclient_mock)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "user"
assert result["type"] == RESULT_TYPE_FORM
with patch(
"homeassistant.components.ipp.async_setup_entry", return_value=True
), patch("homeassistant.components.ipp.async_setup", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_HOST: "192.168.1.31", CONF_BASE_PATH: "/ipp/print"},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "192.168.1.31"
assert result["data"]
assert result["data"][CONF_HOST] == "192.168.1.31"
assert result["data"][CONF_UUID] == "cfe92100-67c4-11d4-a45f-f8d027761251"
assert result["result"]
assert result["result"].unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251"
async def test_full_zeroconf_flow_implementation(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the full manual user flow from start to finish."""
mock_connection(aioclient_mock)
discovery_info = MOCK_ZEROCONF_IPP_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == RESULT_TYPE_FORM
with patch(
"homeassistant.components.ipp.async_setup_entry", return_value=True
), patch("homeassistant.components.ipp.async_setup", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "EPSON XP-6000 Series"
assert result["data"]
assert result["data"][CONF_HOST] == "192.168.1.31"
assert result["data"][CONF_NAME] == "EPSON XP-6000 Series"
assert result["data"][CONF_UUID] == "cfe92100-67c4-11d4-a45f-f8d027761251"
assert not result["data"][CONF_SSL]
assert result["result"]
assert result["result"].unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251"
async def test_full_zeroconf_tls_flow_implementation(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the full manual user flow from start to finish."""
mock_connection(aioclient_mock, ssl=True)
discovery_info = MOCK_ZEROCONF_IPPS_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == RESULT_TYPE_FORM
assert result["description_placeholders"] == {CONF_NAME: "EPSON XP-6000 Series"}
with patch(
"homeassistant.components.ipp.async_setup_entry", return_value=True
), patch("homeassistant.components.ipp.async_setup", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "EPSON XP-6000 Series"
assert result["data"]
assert result["data"][CONF_HOST] == "192.168.1.31"
assert result["data"][CONF_NAME] == "EPSON XP-6000 Series"
assert result["data"][CONF_UUID] == "cfe92100-67c4-11d4-a45f-f8d027761251"
assert result["data"][CONF_SSL]
assert result["result"]
assert result["result"].unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251"
|
from django.test import SimpleTestCase
from weblate.utils.html import extract_bleach
class HtmlTestCase(SimpleTestCase):
def test_noattr(self):
self.assertEqual(
extract_bleach("<b>text</b>"), {"tags": {"b"}, "attributes": {"b": set()}}
)
def test_attrs(self):
self.assertEqual(
extract_bleach('<a href="#">t</a>'),
{"tags": {"a"}, "attributes": {"a": {"href"}}},
)
def test_noclose(self):
self.assertEqual(
extract_bleach("<br>"), {"tags": {"br"}, "attributes": {"br": set()}}
)
|
from pyruckus.exceptions import AuthenticationError
from homeassistant.components.ruckus_unleashed import (
API_AP,
API_DEVICE_NAME,
API_ID,
API_MAC,
API_MODEL,
API_SYSTEM_OVERVIEW,
API_VERSION,
DOMAIN,
MANUFACTURER,
)
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from tests.async_mock import patch
from tests.components.ruckus_unleashed import (
DEFAULT_AP_INFO,
DEFAULT_SYSTEM_INFO,
DEFAULT_TITLE,
init_integration,
mock_config_entry,
)
async def test_setup_entry_login_error(hass):
"""Test entry setup failed due to login error."""
entry = mock_config_entry()
with patch(
"homeassistant.components.ruckus_unleashed.Ruckus",
side_effect=AuthenticationError,
):
entry.add_to_hass(hass)
result = await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert result is False
async def test_setup_entry_connection_error(hass):
"""Test entry setup failed due to connection error."""
entry = mock_config_entry()
with patch(
"homeassistant.components.ruckus_unleashed.Ruckus",
side_effect=ConnectionError,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_router_device_setup(hass):
"""Test a router device is created."""
await init_integration(hass)
device_info = DEFAULT_AP_INFO[API_AP][API_ID]["1"]
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get_device(
identifiers={(CONNECTION_NETWORK_MAC, device_info[API_MAC])},
connections={(CONNECTION_NETWORK_MAC, device_info[API_MAC])},
)
assert device
assert device.manufacturer == MANUFACTURER
assert device.model == device_info[API_MODEL]
assert device.name == device_info[API_DEVICE_NAME]
assert device.sw_version == DEFAULT_SYSTEM_INFO[API_SYSTEM_OVERVIEW][API_VERSION]
assert device.via_device_id is None
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
async def test_config_not_ready_during_setup(hass):
"""Test we throw a ConfigNotReady if Coordinator update fails."""
entry = mock_config_entry()
with patch(
"homeassistant.components.ruckus_unleashed.Ruckus.connect",
return_value=None,
), patch(
"homeassistant.components.ruckus_unleashed.Ruckus.mesh_name",
return_value=DEFAULT_TITLE,
), patch(
"homeassistant.components.ruckus_unleashed.Ruckus.system_info",
return_value=DEFAULT_SYSTEM_INFO,
), patch(
"homeassistant.components.ruckus_unleashed.RuckusUnleashedDataUpdateCoordinator._async_update_data",
side_effect=ConnectionError,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_SETUP_RETRY
|
from collections import OrderedDict
from homeassistant import helpers
def test_extract_domain_configs():
"""Test the extraction of domain configuration."""
config = {
"zone": None,
"zoner": None,
"zone ": None,
"zone Hallo": None,
"zone 100": None,
}
assert {"zone", "zone Hallo", "zone 100"} == set(
helpers.extract_domain_configs(config, "zone")
)
def test_config_per_platform():
"""Test config per platform method."""
config = OrderedDict(
[
("zone", {"platform": "hello"}),
("zoner", None),
("zone Hallo", [1, {"platform": "hello 2"}]),
("zone 100", None),
]
)
assert [
("hello", config["zone"]),
(None, 1),
("hello 2", config["zone Hallo"][1]),
] == list(helpers.config_per_platform(config, "zone"))
|
import asyncio
import logging
import queue
import pytest
import homeassistant.util.logging as logging_util
from tests.async_mock import patch
def test_sensitive_data_filter():
"""Test the logging sensitive data filter."""
log_filter = logging_util.HideSensitiveDataFilter("mock_sensitive")
clean_record = logging.makeLogRecord({"msg": "clean log data"})
log_filter.filter(clean_record)
assert clean_record.msg == "clean log data"
sensitive_record = logging.makeLogRecord({"msg": "mock_sensitive log"})
log_filter.filter(sensitive_record)
assert sensitive_record.msg == "******* log"
async def test_logging_with_queue_handler():
"""Test logging with HomeAssistantQueueHandler."""
simple_queue = queue.SimpleQueue() # type: ignore
handler = logging_util.HomeAssistantQueueHandler(simple_queue)
log_record = logging.makeLogRecord({"msg": "Test Log Record"})
handler.emit(log_record)
with pytest.raises(asyncio.CancelledError), patch.object(
handler, "enqueue", side_effect=asyncio.CancelledError
):
handler.emit(log_record)
with patch.object(handler, "emit") as emit_mock:
handler.handle(log_record)
emit_mock.assert_called_once()
with patch.object(handler, "filter") as filter_mock, patch.object(
handler, "emit"
) as emit_mock:
filter_mock.return_value = False
handler.handle(log_record)
emit_mock.assert_not_called()
with patch.object(handler, "enqueue", side_effect=OSError), patch.object(
handler, "handleError"
) as mock_handle_error:
handler.emit(log_record)
mock_handle_error.assert_called_once()
handler.close()
assert simple_queue.get_nowait().msg == "Test Log Record"
assert simple_queue.empty()
async def test_migrate_log_handler(hass):
"""Test migrating log handlers."""
logging_util.async_activate_log_queue_handler(hass)
assert len(logging.root.handlers) == 1
assert isinstance(logging.root.handlers[0], logging_util.HomeAssistantQueueHandler)
@pytest.mark.no_fail_on_log_exception
async def test_async_create_catching_coro(hass, caplog):
"""Test exception logging of wrapped coroutine."""
async def job():
raise Exception("This is a bad coroutine")
hass.async_create_task(logging_util.async_create_catching_coro(job()))
await hass.async_block_till_done()
assert "This is a bad coroutine" in caplog.text
assert "in test_async_create_catching_coro" in caplog.text
|
from datetime import timedelta
import logging
from homeassistant.const import ATTR_BATTERY_LEVEL, LIGHT_LUX, PERCENTAGE, TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=60)
ATTR_RSSI_LEVEL = "rssi_level"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tahoma controller devices."""
if discovery_info is None:
return
controller = hass.data[TAHOMA_DOMAIN]["controller"]
devices = []
for device in hass.data[TAHOMA_DOMAIN]["devices"]["sensor"]:
devices.append(TahomaSensor(device, controller))
add_entities(devices, True)
class TahomaSensor(TahomaDevice, Entity):
"""Representation of a Tahoma Sensor."""
def __init__(self, tahoma_device, controller):
"""Initialize the sensor."""
self.current_value = None
self._available = False
super().__init__(tahoma_device, controller)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self.tahoma_device.type == "io:TemperatureIOSystemSensor":
return TEMP_CELSIUS
if self.tahoma_device.type == "io:SomfyContactIOSystemSensor":
return None
if self.tahoma_device.type == "io:SomfyBasicContactIOSystemSensor":
return None
if self.tahoma_device.type == "io:LightIOSystemSensor":
return LIGHT_LUX
if self.tahoma_device.type == "Humidity Sensor":
return PERCENTAGE
if self.tahoma_device.type == "rtds:RTDSContactSensor":
return None
if self.tahoma_device.type == "rtds:RTDSMotionSensor":
return None
if (
self.tahoma_device.type
== "somfythermostat:SomfyThermostatTemperatureSensor"
):
return TEMP_CELSIUS
if self.tahoma_device.type == "somfythermostat:SomfyThermostatHumiditySensor":
return PERCENTAGE
def update(self):
"""Update the state."""
self.controller.get_states([self.tahoma_device])
if self.tahoma_device.type == "io:LightIOSystemSensor":
self.current_value = self.tahoma_device.active_states["core:LuminanceState"]
self._available = bool(
self.tahoma_device.active_states.get("core:StatusState") == "available"
)
if self.tahoma_device.type == "io:SomfyContactIOSystemSensor":
self.current_value = self.tahoma_device.active_states["core:ContactState"]
self._available = bool(
self.tahoma_device.active_states.get("core:StatusState") == "available"
)
if self.tahoma_device.type == "io:SomfyBasicContactIOSystemSensor":
self.current_value = self.tahoma_device.active_states["core:ContactState"]
self._available = bool(
self.tahoma_device.active_states.get("core:StatusState") == "available"
)
if self.tahoma_device.type == "rtds:RTDSContactSensor":
self.current_value = self.tahoma_device.active_states["core:ContactState"]
self._available = True
if self.tahoma_device.type == "rtds:RTDSMotionSensor":
self.current_value = self.tahoma_device.active_states["core:OccupancyState"]
self._available = True
if self.tahoma_device.type == "io:TemperatureIOSystemSensor":
self.current_value = round(
float(self.tahoma_device.active_states["core:TemperatureState"]), 1
)
self._available = True
if (
self.tahoma_device.type
== "somfythermostat:SomfyThermostatTemperatureSensor"
):
self.current_value = float(
f"{self.tahoma_device.active_states['core:TemperatureState']:.2f}"
)
self._available = True
if self.tahoma_device.type == "somfythermostat:SomfyThermostatHumiditySensor":
self.current_value = float(
f"{self.tahoma_device.active_states['core:RelativeHumidityState']:.2f}"
)
self._available = True
_LOGGER.debug("Update %s, value: %d", self._name, self.current_value)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attr = {}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
if "core:RSSILevelState" in self.tahoma_device.active_states:
attr[ATTR_RSSI_LEVEL] = self.tahoma_device.active_states[
"core:RSSILevelState"
]
if "core:SensorDefectState" in self.tahoma_device.active_states:
attr[ATTR_BATTERY_LEVEL] = self.tahoma_device.active_states[
"core:SensorDefectState"
]
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._available
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import mnist_benchmark
from perfkitbenchmarker.sample import Sample
class Inception3BenchmarkTestCase(unittest.TestCase,
test_util.SamplesTestMixin):
def setUp(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'inception3_output.txt')
with open(path) as fp:
self.contents = fp.read()
self.metadata_input = {'num_examples_per_epoch': 1251.1,
'train_batch_size': 1024}
self.metadata_output = {'num_examples_per_epoch': 1251.1,
'train_batch_size': 1024, 'step': 4000,
'epoch': 3.197186475901207, 'elapsed_seconds': 0}
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testTrainResults(self):
samples = mnist_benchmark.MakeSamplesFromTrainOutput(
self.metadata_input, self.contents, 0, 4000)
golden = [
Sample('Loss', 5.7193503, '', self.metadata_output),
Sample('Global Steps Per Second', 1.4384171428571428,
'global_steps/sec', self.metadata_output),
Sample('Examples Per Second', 1472.9414285714283,
'examples/sec', self.metadata_output)
]
self.assertEqual(samples, golden)
if __name__ == '__main__':
unittest.main()
|
from unittest import mock
import pytest
from gi.repository import GtkSource
@pytest.mark.parametrize("text, newline, expected_text", [
# For the following tests, newlines and text match
# Basic CRLF tests
("ree\r\neee", GtkSource.NewlineType.CR_LF, 'ree'),
("ree\r\neee\r\n", GtkSource.NewlineType.CR_LF, 'ree\r\neee'),
# Basic CR tests
("ree\neee", GtkSource.NewlineType.CR, 'ree'),
("ree\neee\n", GtkSource.NewlineType.CR, 'ree\neee'),
# Basic LF tests
("ree\reee", GtkSource.NewlineType.LF, 'ree'),
("ree\reee\r", GtkSource.NewlineType.LF, 'ree\reee'),
# Mismatched newline and text
("ree\r\neee", GtkSource.NewlineType.CR, 'ree'),
# Mismatched newline types within text
("ree\r\neee\n", GtkSource.NewlineType.CR_LF, 'ree\r\neee'),
("ree\r\neee\nqqq", GtkSource.NewlineType.CR_LF, 'ree\r\neee'),
("ree\r\neee\nqqq\r\n", GtkSource.NewlineType.CR_LF, 'ree\r\neee\nqqq'),
])
def test_delete_last_line_crlf(text, newline, expected_text):
import meld.meldbuffer
from meld.filediff import FileDiff
from meld.matchers.myers import DiffChunk
filediff = mock.Mock(FileDiff)
with mock.patch('meld.meldbuffer.bind_settings', mock.DEFAULT):
meldbuffer = meld.meldbuffer.MeldBuffer()
meldbuffer.set_text(text)
def make_last_line_chunk(buf):
end = buf.get_line_count()
last = end - 1
return DiffChunk('delete', last, end, last, end)
start, end = meldbuffer.get_bounds()
buf_text = meldbuffer.get_text(start, end, False)
print(repr(buf_text))
with mock.patch.object(
meldbuffer.data.sourcefile,
'get_newline_type', return_value=newline):
filediff.textbuffer = [meldbuffer]
filediff.textview = [mock.Mock()]
FileDiff.delete_chunk(filediff, 0, make_last_line_chunk(meldbuffer))
start, end = meldbuffer.get_bounds()
buf_text = meldbuffer.get_text(start, end, False)
print(repr(buf_text))
assert buf_text == expected_text
|
from django.db import migrations
def update_index(apps, schema_editor):
if schema_editor.connection.vendor != "postgresql":
return
# This ensures that extensions are loaded into the session. Without that
# the next ALTER database fails unless we're running as superuser (which
# is allowed to set non existing parameters, so missing extension doesn't
# matter)
# See https://www.postgresql.org/message-id/6376.1533675236%40sss.pgh.pa.us
schema_editor.execute("SELECT show_limit()")
settings = schema_editor.connection.settings_dict
schema_editor.execute(
"ALTER ROLE {} SET pg_trgm.similarity_threshold = 0.5".format(
schema_editor.quote_name(settings.get("ALTER_ROLE", settings["USER"]))
)
)
class Migration(migrations.Migration):
dependencies = [
("memory", "0007_use_trigram"),
]
operations = [
migrations.RunPython(
update_index, migrations.RunPython.noop, elidable=False, atomic=False
)
]
|
import keras
from keras.activations import softmax
from keras.initializers import RandomUniform
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine import hyper_spaces
class ANMM(BaseModel):
"""
ANMM Model.
Examples:
>>> model = ANMM()
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params.add(Param(
name='dropout_rate', value=0.1,
desc="The dropout rate.",
hyper_space=hyper_spaces.quniform(0, 1, 0.05)
))
params.add(Param(
name='num_layers', value=2,
desc="Number of hidden layers in the MLP "
"layer."
))
params.add(Param(
name='hidden_sizes', value=[30, 30],
desc="Number of hidden size for each hidden"
" layer"
))
return params
def build(self):
"""
Build model structure.
aNMM model based on bin weighting and query term attentions
"""
# query is [batch_size, left_text_len]
# doc is [batch_size, right_text_len, bin_num]
query, doc = self._make_inputs()
embedding = self._make_embedding_layer()
q_embed = embedding(query)
q_attention = keras.layers.Dense(
1, kernel_initializer=RandomUniform(), use_bias=False)(q_embed)
q_text_len = self._params['input_shapes'][0][0]
q_attention = keras.layers.Lambda(
lambda x: softmax(x, axis=1),
output_shape=(q_text_len,)
)(q_attention)
d_bin = keras.layers.Dropout(
rate=self._params['dropout_rate'])(doc)
for layer_id in range(self._params['num_layers'] - 1):
d_bin = keras.layers.Dense(
self._params['hidden_sizes'][layer_id],
kernel_initializer=RandomUniform())(d_bin)
d_bin = keras.layers.Activation('tanh')(d_bin)
d_bin = keras.layers.Dense(
self._params['hidden_sizes'][self._params['num_layers'] - 1])(
d_bin)
d_bin = keras.layers.Reshape((q_text_len,))(d_bin)
q_attention = keras.layers.Reshape((q_text_len,))(q_attention)
score = keras.layers.Dot(axes=[1, 1])([d_bin, q_attention])
x_out = self._make_output_layer()(score)
self._backend = keras.Model(inputs=[query, doc], outputs=x_out)
|
from typing import List, Optional
from aioesphomeapi import FanInfo, FanSpeed, FanState
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import (
EsphomeEntity,
esphome_map_enum,
esphome_state_property,
platform_async_setup_entry,
)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up ESPHome fans based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="fan",
info_type=FanInfo,
entity_type=EsphomeFan,
state_type=FanState,
)
@esphome_map_enum
def _fan_speeds():
return {
FanSpeed.LOW: SPEED_LOW,
FanSpeed.MEDIUM: SPEED_MEDIUM,
FanSpeed.HIGH: SPEED_HIGH,
}
class EsphomeFan(EsphomeEntity, FanEntity):
"""A fan implementation for ESPHome."""
@property
def _static_info(self) -> FanInfo:
return super()._static_info
@property
def _state(self) -> Optional[FanState]:
return super()._state
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
await self._client.fan_command(
self._static_info.key, speed=_fan_speeds.from_hass(speed)
)
async def async_turn_on(self, speed: Optional[str] = None, **kwargs) -> None:
"""Turn on the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
data = {"key": self._static_info.key, "state": True}
if speed is not None:
data["speed"] = _fan_speeds.from_hass(speed)
await self._client.fan_command(**data)
async def async_turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
await self._client.fan_command(key=self._static_info.key, state=False)
async def async_oscillate(self, oscillating: bool) -> None:
"""Oscillate the fan."""
await self._client.fan_command(
key=self._static_info.key, oscillating=oscillating
)
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
# pylint: disable=invalid-overridden-method
@esphome_state_property
def is_on(self) -> Optional[bool]:
"""Return true if the entity is on."""
return self._state.state
@esphome_state_property
def speed(self) -> Optional[str]:
"""Return the current speed."""
if not self._static_info.supports_speed:
return None
return _fan_speeds.from_esphome(self._state.speed)
@esphome_state_property
def oscillating(self) -> None:
"""Return the oscillation state."""
if not self._static_info.supports_oscillation:
return None
return self._state.oscillating
@property
def speed_list(self) -> Optional[List[str]]:
"""Get the list of available speeds."""
if not self._static_info.supports_speed:
return None
return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
@property
def supported_features(self) -> int:
"""Flag supported features."""
flags = 0
if self._static_info.supports_oscillation:
flags |= SUPPORT_OSCILLATE
if self._static_info.supports_speed:
flags |= SUPPORT_SET_SPEED
return flags
|
_DEB_REPO_FILE = '/etc/apt/sources.list.d/azure-cli.list'
_DEB_REPO_KEY = 'https://packages.microsoft.com/keys/microsoft.asc'
_DEB_REPO = ('deb [arch=amd64] https://packages.microsoft.com/repos/azure-cli/ '
'{az_repo} main')
# RedHat info
_YUM_REPO_FILE = '/etc/yum.repos.d/azure-cli.repo'
_YUM_REPO_KEY = 'https://packages.microsoft.com/keys/microsoft.asc'
_YUM_REPO = """[azure-cli]
name=Azure CLI
baseurl=https://packages.microsoft.com/yumrepos/azure-cli
enabled=1
gpgcheck=1
gpgkey={key}
""".format(key=_YUM_REPO_KEY)
def AptInstall(vm):
"""Installs the azure-cli package on the VM for Debian systems.
Args:
vm: Virtual Machine to install on.
"""
vm.Install('python')
vm.Install('lsb_release')
vm.Install('curl')
vm.InstallPackages('apt-transport-https')
az_repo, _ = vm.RemoteCommand('lsb_release -cs')
_CreateFile(vm, _DEB_REPO.format(az_repo=az_repo.strip()), _DEB_REPO_FILE)
vm.RemoteCommand(
'curl -L {key} | sudo apt-key add -'.format(key=_DEB_REPO_KEY))
vm.RemoteCommand('sudo apt-get update')
vm.InstallPackages('azure-cli')
def YumInstall(vm):
"""Installs the azure-cli package on the VM for RedHat systems.
Args:
vm: Virtual Machine to install on.
"""
_CreateFile(vm, _YUM_REPO, _YUM_REPO_FILE)
vm.RemoteCommand('sudo rpm --import {key}'.format(key=_YUM_REPO_KEY))
vm.InstallPackages('azure-cli')
def _CreateFile(vm, content, file_path):
"""Creates the repository file on the remote server.
Args:
vm: Remote virtual machine.
content: Text to put into the file.
file_path: Path to text output file.
"""
vm.RemoteCommand('echo "{content}" | sudo tee {file_path}'.format(
content=content, file_path=file_path))
|
from socket import timeout as TimeoutError # noqa
from amqp import ChannelError, ConnectionError, ResourceError
__all__ = (
'reraise', 'KombuError', 'OperationalError',
'NotBoundError', 'MessageStateError', 'TimeoutError',
'LimitExceeded', 'ConnectionLimitExceeded',
'ChannelLimitExceeded', 'ConnectionError', 'ChannelError',
'VersionMismatch', 'SerializerNotInstalled', 'ResourceError',
'SerializationError', 'EncodeError', 'DecodeError', 'HttpError',
'InconsistencyError',
)
def reraise(tp, value, tb=None):
"""Reraise exception."""
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
class KombuError(Exception):
"""Common subclass for all Kombu exceptions."""
class OperationalError(KombuError):
"""Recoverable message transport connection error."""
class SerializationError(KombuError):
"""Failed to serialize/deserialize content."""
class EncodeError(SerializationError):
"""Cannot encode object."""
class DecodeError(SerializationError):
"""Cannot decode object."""
class NotBoundError(KombuError):
"""Trying to call channel dependent method on unbound entity."""
class MessageStateError(KombuError):
"""The message has already been acknowledged."""
class LimitExceeded(KombuError):
"""Limit exceeded."""
class ConnectionLimitExceeded(LimitExceeded):
"""Maximum number of simultaneous connections exceeded."""
class ChannelLimitExceeded(LimitExceeded):
"""Maximum number of simultaneous channels exceeded."""
class VersionMismatch(KombuError):
"""Library dependency version mismatch."""
class SerializerNotInstalled(KombuError):
"""Support for the requested serialization type is not installed."""
class ContentDisallowed(SerializerNotInstalled):
"""Consumer does not allow this content-type."""
class InconsistencyError(ConnectionError):
"""Data or environment has been found to be inconsistent.
Depending on the cause it may be possible to retry the operation.
"""
class HttpError(Exception):
"""HTTP Client Error."""
def __init__(self, code, message=None, response=None):
self.code = code
self.message = message
self.response = response
super().__init__(code, message, response)
def __str__(self):
return 'HTTP {0.code}: {0.message}'.format(self)
|
import random
from collections import namedtuple
from pathlib import Path
import weakref
import pytest
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core import config as config_module, drivers
__all__ = [
"override_data_path",
"coroutine",
"driver",
"config",
"config_fr",
"red",
"guild_factory",
"empty_guild",
"empty_channel",
"empty_member",
"empty_message",
"empty_role",
"empty_user",
"member_factory",
"user_factory",
"ctx",
]
@pytest.fixture(autouse=True)
def override_data_path(tmpdir):
from redbot.core import data_manager
data_manager.basic_config = data_manager.basic_config_default
data_manager.basic_config["DATA_PATH"] = str(tmpdir)
@pytest.fixture()
def coroutine():
async def some_coro(*args, **kwargs):
return args, kwargs
return some_coro
@pytest.fixture()
def driver(tmpdir_factory):
import uuid
rand = str(uuid.uuid4())
path = Path(str(tmpdir_factory.mktemp(rand)))
return drivers.get_driver("PyTest", str(random.randint(1, 999999)), data_path_override=path)
@pytest.fixture()
def config(driver):
config_module._config_cache = weakref.WeakValueDictionary()
conf = Config(cog_name="PyTest", unique_identifier=driver.unique_cog_identifier, driver=driver)
yield conf
@pytest.fixture()
def config_fr(driver):
"""
Mocked config object with force_register enabled.
"""
config_module._config_cache = weakref.WeakValueDictionary()
conf = Config(
cog_name="PyTest",
unique_identifier=driver.unique_cog_identifier,
driver=driver,
force_registration=True,
)
yield conf
# region Dpy Mocks
@pytest.fixture()
def guild_factory():
mock_guild = namedtuple("Guild", "id members")
class GuildFactory:
def get(self):
return mock_guild(random.randint(1, 999999999), [])
return GuildFactory()
@pytest.fixture()
def empty_guild(guild_factory):
return guild_factory.get()
@pytest.fixture(scope="module")
def empty_channel():
mock_channel = namedtuple("Channel", "id")
return mock_channel(random.randint(1, 999999999))
@pytest.fixture(scope="module")
def empty_role():
mock_role = namedtuple("Role", "id")
return mock_role(random.randint(1, 999999999))
@pytest.fixture()
def member_factory(guild_factory):
mock_member = namedtuple("Member", "id guild display_name")
class MemberFactory:
def get(self):
return mock_member(random.randint(1, 999999999), guild_factory.get(), "Testing_Name")
return MemberFactory()
@pytest.fixture()
def empty_member(member_factory):
return member_factory.get()
@pytest.fixture()
def user_factory():
mock_user = namedtuple("User", "id")
class UserFactory:
def get(self):
return mock_user(random.randint(1, 999999999))
return UserFactory()
@pytest.fixture()
def empty_user(user_factory):
return user_factory.get()
@pytest.fixture(scope="module")
def empty_message():
mock_msg = namedtuple("Message", "content")
return mock_msg("No content.")
@pytest.fixture()
def ctx(empty_member, empty_channel, red):
mock_ctx = namedtuple("Context", "author guild channel message bot")
return mock_ctx(empty_member, empty_member.guild, empty_channel, empty_message, red)
# endregion
# region Red Mock
@pytest.fixture()
def red(config_fr):
from redbot.core.cli import parse_cli_flags
cli_flags = parse_cli_flags(["ignore_me"])
description = "Red v3 - Alpha"
Config.get_core_conf = lambda *args, **kwargs: config_fr
red = Red(cli_flags=cli_flags, description=description, dm_help=None, owner_ids=set())
yield red
# endregion
|
import numpy as np
from ..source_estimate import SourceEstimate, VolSourceEstimate
from ..source_space import _ensure_src
from ..fixes import rng_uniform
from ..utils import check_random_state, warn, _check_option, fill_doc
from ..label import Label
from ..surface import _compute_nearest
@fill_doc
def select_source_in_label(src, label, random_state=None, location='random',
subject=None, subjects_dir=None, surf='sphere'):
"""Select source positions using a label.
Parameters
----------
src : list of dict
The source space.
label : Label
The label.
%(random_state)s
location : str
The label location to choose. Can be 'random' (default) or 'center'
to use :func:`mne.Label.center_of_mass` (restricting to vertices
both in the label and in the source space). Note that for 'center'
mode the label values are used as weights.
.. versionadded:: 0.13
subject : str | None
The subject the label is defined for.
Only used with ``location='center'``.
.. versionadded:: 0.13
%(subjects_dir)s
.. versionadded:: 0.13
surf : str
The surface to use for Euclidean distance center of mass
finding. The default here is "sphere", which finds the center
of mass on the spherical surface to help avoid potential issues
with cortical folding.
.. versionadded:: 0.13
Returns
-------
lh_vertno : list
Selected source coefficients on the left hemisphere.
rh_vertno : list
Selected source coefficients on the right hemisphere.
"""
lh_vertno = list()
rh_vertno = list()
_check_option('location', location, ['random', 'center'])
rng = check_random_state(random_state)
if label.hemi == 'lh':
vertno = lh_vertno
hemi_idx = 0
else:
vertno = rh_vertno
hemi_idx = 1
src_sel = np.intersect1d(src[hemi_idx]['vertno'], label.vertices)
if location == 'random':
idx = src_sel[rng_uniform(rng)(0, len(src_sel), 1)[0]]
else: # 'center'
idx = label.center_of_mass(
subject, restrict_vertices=src_sel, subjects_dir=subjects_dir,
surf=surf)
vertno.append(idx)
return lh_vertno, rh_vertno
@fill_doc
def simulate_sparse_stc(src, n_dipoles, times,
data_fun=lambda t: 1e-7 * np.sin(20 * np.pi * t),
labels=None, random_state=None, location='random',
subject=None, subjects_dir=None, surf='sphere'):
"""Generate sparse (n_dipoles) sources time courses from data_fun.
This function randomly selects ``n_dipoles`` vertices in the whole
cortex or one single vertex (randomly in or in the center of) each
label if ``labels is not None``. It uses ``data_fun`` to generate
waveforms for each vertex.
Parameters
----------
src : instance of SourceSpaces
The source space.
n_dipoles : int
Number of dipoles to simulate.
times : array
Time array.
data_fun : callable
Function to generate the waveforms. The default is a 100 nAm, 10 Hz
sinusoid as ``1e-7 * np.sin(20 * pi * t)``. The function should take
as input the array of time samples in seconds and return an array of
the same length containing the time courses.
labels : None | list of Label
The labels. The default is None, otherwise its size must be n_dipoles.
%(random_state)s
location : str
The label location to choose. Can be 'random' (default) or 'center'
to use :func:`mne.Label.center_of_mass`. Note that for 'center'
mode the label values are used as weights.
.. versionadded:: 0.13
subject : str | None
The subject the label is defined for.
Only used with ``location='center'``.
.. versionadded:: 0.13
%(subjects_dir)s
.. versionadded:: 0.13
surf : str
The surface to use for Euclidean distance center of mass
finding. The default here is "sphere", which finds the center
of mass on the spherical surface to help avoid potential issues
with cortical folding.
.. versionadded:: 0.13
Returns
-------
stc : SourceEstimate
The generated source time courses.
See Also
--------
simulate_raw
simulate_evoked
simulate_stc
Notes
-----
.. versionadded:: 0.10.0
"""
rng = check_random_state(random_state)
src = _ensure_src(src, verbose=False)
subject_src = src._subject
if subject is None:
subject = subject_src
elif subject_src is not None and subject != subject_src:
raise ValueError('subject argument (%s) did not match the source '
'space subject_his_id (%s)' % (subject, subject_src))
data = np.zeros((n_dipoles, len(times)))
for i_dip in range(n_dipoles):
data[i_dip, :] = data_fun(times)
if labels is None:
# can be vol or surface source space
offsets = np.linspace(0, n_dipoles, len(src) + 1).astype(int)
n_dipoles_ss = np.diff(offsets)
# don't use .choice b/c not on old numpy
vs = [s['vertno'][np.sort(rng.permutation(np.arange(s['nuse']))[:n])]
for n, s in zip(n_dipoles_ss, src)]
datas = data
elif n_dipoles > len(labels):
raise ValueError('Number of labels (%d) smaller than n_dipoles (%d) '
'is not allowed.' % (len(labels), n_dipoles))
else:
if n_dipoles != len(labels):
warn('The number of labels is different from the number of '
'dipoles. %s dipole(s) will be generated.'
% min(n_dipoles, len(labels)))
labels = labels[:n_dipoles] if n_dipoles < len(labels) else labels
vertno = [[], []]
lh_data = [np.empty((0, data.shape[1]))]
rh_data = [np.empty((0, data.shape[1]))]
for i, label in enumerate(labels):
lh_vertno, rh_vertno = select_source_in_label(
src, label, rng, location, subject, subjects_dir, surf)
vertno[0] += lh_vertno
vertno[1] += rh_vertno
if len(lh_vertno) != 0:
lh_data.append(data[i][np.newaxis])
elif len(rh_vertno) != 0:
rh_data.append(data[i][np.newaxis])
else:
raise ValueError('No vertno found.')
vs = [np.array(v) for v in vertno]
datas = [np.concatenate(d) for d in [lh_data, rh_data]]
# need to sort each hemi by vertex number
for ii in range(2):
order = np.argsort(vs[ii])
vs[ii] = vs[ii][order]
if len(order) > 0: # fix for old numpy
datas[ii] = datas[ii][order]
datas = np.concatenate(datas)
tmin, tstep = times[0], np.diff(times[:2])[0]
assert datas.shape == data.shape
cls = SourceEstimate if len(vs) == 2 else VolSourceEstimate
stc = cls(datas, vertices=vs, tmin=tmin, tstep=tstep, subject=subject)
return stc
def simulate_stc(src, labels, stc_data, tmin, tstep, value_fun=None,
allow_overlap=False):
"""Simulate sources time courses from waveforms and labels.
This function generates a source estimate with extended sources by
filling the labels with the waveforms given in stc_data.
Parameters
----------
src : instance of SourceSpaces
The source space.
labels : list of Label
The labels.
stc_data : array, shape (n_labels, n_times)
The waveforms.
tmin : float
The beginning of the timeseries.
tstep : float
The time step (1 / sampling frequency).
value_fun : callable | None
Function to apply to the label values to obtain the waveform
scaling for each vertex in the label. If None (default), uniform
scaling is used.
allow_overlap : bool
Allow overlapping labels or not. Default value is False.
.. versionadded:: 0.18
Returns
-------
stc : SourceEstimate
The generated source time courses.
See Also
--------
simulate_raw
simulate_evoked
simulate_sparse_stc
"""
if len(labels) != len(stc_data):
raise ValueError('labels and stc_data must have the same length')
vertno = [[], []]
stc_data_extended = [[], []]
hemi_to_ind = {'lh': 0, 'rh': 1}
for i, label in enumerate(labels):
hemi_ind = hemi_to_ind[label.hemi]
src_sel = np.intersect1d(src[hemi_ind]['vertno'],
label.vertices)
if len(src_sel) == 0:
idx = src[hemi_ind]['inuse'].astype('bool')
xhs = src[hemi_ind]['rr'][idx]
rr = src[hemi_ind]['rr'][label.vertices]
closest_src = _compute_nearest(xhs, rr)
src_sel = src[hemi_ind]['vertno'][np.unique(closest_src)]
if value_fun is not None:
idx_sel = np.searchsorted(label.vertices, src_sel)
values_sel = np.array([value_fun(v) for v in
label.values[idx_sel]])
data = np.outer(values_sel, stc_data[i])
else:
data = np.tile(stc_data[i], (len(src_sel), 1))
# If overlaps are allowed, deal with them
if allow_overlap:
# Search for duplicate vertex indices
# in the existing vertex matrix vertex.
duplicates = []
for src_ind, vertex_ind in enumerate(src_sel):
ind = np.where(vertex_ind == vertno[hemi_ind])[0]
if len(ind) > 0:
assert (len(ind) == 1)
# Add the new data to the existing one
stc_data_extended[hemi_ind][ind[0]] += data[src_ind]
duplicates.append(src_ind)
# Remove the duplicates from both data and selected vertices
data = np.delete(data, duplicates, axis=0)
src_sel = list(np.delete(np.array(src_sel), duplicates))
# Extend the existing list instead of appending it so that we can
# index its elements
vertno[hemi_ind].extend(src_sel)
stc_data_extended[hemi_ind].extend(np.atleast_2d(data))
vertno = [np.array(v) for v in vertno]
if not allow_overlap:
for v, hemi in zip(vertno, ('left', 'right')):
d = len(v) - len(np.unique(v))
if d > 0:
raise RuntimeError('Labels had %s overlaps in the %s '
'hemisphere, '
'they must be non-overlapping' % (d, hemi))
# the data is in the order left, right
data = list()
for i in range(2):
if len(stc_data_extended[i]) != 0:
stc_data_extended[i] = np.vstack(stc_data_extended[i])
# Order the indices of each hemisphere
idx = np.argsort(vertno[i])
data.append(stc_data_extended[i][idx])
vertno[i] = vertno[i][idx]
stc = SourceEstimate(np.concatenate(data), vertices=vertno, tmin=tmin,
tstep=tstep, subject=src._subject)
return stc
class SourceSimulator(object):
"""Class to generate simulated Source Estimates.
Parameters
----------
src : instance of SourceSpaces
Source space.
tstep : float
Time step between successive samples in data. Default is 0.001 sec.
duration : float | None
Time interval during which the simulation takes place in seconds.
If None, it is computed using existing events and waveform lengths.
Attributes
----------
duration : float
The duration of the simulation in seconds.
n_times : int
The number of time samples of the simulation.
"""
def __init__(self, src, tstep=1e-3, duration=None):
self._src = src
self._tstep = tstep
self._labels = []
self._waveforms = []
self._events = np.empty((0, 3), dtype=int)
self._duration = duration
self._last_samples = []
self._chk_duration = 1000
@property
def duration(self):
"""Duration of the simulation"""
# If not, the precomputed maximum last sample is used
if self._duration is None:
return np.max(self._last_samples) * self._tstep
return self._duration
@property
def n_times(self):
"""Number of time samples in the simulation"""
return int(self.duration / self._tstep)
def add_data(self, label, waveform, events):
"""Add data to the simulation.
Data should be added in the form of a triplet of
Label (Where) - Waveform(s) (What) - Event(s) (When)
Parameters
----------
label : instance of Label
The label (as created for example by mne.read_label). If the label
does not match any sources in the SourceEstimate, a ValueError is
raised.
waveform : array, shape (n_times,) or (n_events, n_times) | list
The waveform(s) describing the activity on the label vertices.
If list, it must have the same length as events.
events : array of int, shape (n_events, 3)
Events associated to the waveform(s) to specify when the activity
should occur.
"""
if not isinstance(label, Label):
raise ValueError('label must be a Label,'
'not %s' % type(label))
# If it is not a list then make it one
if not isinstance(waveform, list) and np.ndim(waveform) == 2:
waveform = list(waveform)
if not isinstance(waveform, list) and np.ndim(waveform) == 1:
waveform = [waveform]
if len(waveform) == 1:
waveform = waveform * len(events)
# The length is either equal to the length of events, or 1
if len(waveform) != len(events):
raise ValueError('Number of waveforms and events should match or '
'there should be a single waveform (%d != %d).' %
(len(waveform), len(events)))
# Update the maximum duration possible based on the events
self._labels.extend([label] * len(events))
self._waveforms.extend(waveform)
self._events = np.vstack([self._events, events])
# First sample per waveform is the first column of events
# Last is computed below
self._last_samples = np.array([self._events[i, 0] + len(w)
for i, w in enumerate(self._waveforms)])
def get_stim_channel(self, start_sample=0, stop_sample=None):
"""Get the stim channel from the provided data.
Returns the stim channel data according to the simulation parameters
which should be added through function add_data. If both start_sample
and stop_sample are not specified, the entire duration is used.
Parameters
----------
start_sample : int
First sample in chunk. Default is 0.
stop_sample : int | None
The stop sample of the returned stc. This sample is not part of the
output to follow slicing semantics. If None, then all samples past
start_sample is returned.
Returns
-------
stim_data : ndarray of int, shape (n_samples,)
The stimulation channel data.
"""
if stop_sample is None:
stop_sample = self.n_times
n_samples = stop_sample - start_sample
# Initialize the stim data array
stim_data = np.zeros(n_samples, dtype=int)
# Select only events in the time chunk
stim_ind = np.where(np.logical_and(
self._events[:, 0] >= start_sample,
self._events[:, 0] < stop_sample))[0]
if len(stim_ind) > 0:
relative_ind = self._events[stim_ind, 0].astype(int) - start_sample
stim_data[relative_ind] = self._events[stim_ind, 2]
return stim_data
def get_stc(self, start_sample=0, stop_sample=None):
"""Simulate a SourceEstimate from the provided data.
Returns a SourceEstimate object constructed according to the simulation
parameters which should be added through function add_data. If both
start_sample and stop_sample are not specified, the entire duration is
used.
Parameters
----------
start_sample : int
First sample in chunk. Default is 0.
stop_sample : int | None
The stop sample of the returned stc. This sample is not part of the
output to follow slicing semantics. If None, then all samples past
start_sample is returned.
Returns
-------
stc : SourceEstimate object
The generated source time courses.
"""
if len(self._labels) == 0:
raise ValueError('No simulation parameters were found. Please use '
'function add_data to add simulation parameters.')
if stop_sample is None:
stop_sample = self.n_times
n_samples = stop_sample - start_sample
# Initialize the stc_data array
stc_data = np.zeros((len(self._labels), n_samples))
# Select only the indices that have events in the time chunk
ind = np.where(np.logical_and(self._last_samples >= start_sample,
self._events[:, 0] < stop_sample))[0]
# Loop only over the items that are in the time chunk
subset_waveforms = [self._waveforms[i] for i in ind]
for i, (waveform, event) in enumerate(zip(subset_waveforms,
self._events[ind])):
# We retrieve the first and last sample of each waveform
# According to the corresponding event
wf_start = event[0]
wf_stop = self._last_samples[ind[i]]
# Recover the indices of the event that should be in the chunk
waveform_ind = np.in1d(np.arange(wf_start, wf_stop),
np.arange(start_sample, stop_sample))
# Recover the indices that correspond to the overlap
stc_ind = np.in1d(np.arange(start_sample, stop_sample),
np.arange(wf_start, wf_stop))
# add the resulting waveform chunk to the corresponding label
stc_data[ind[i]][stc_ind] += waveform[waveform_ind]
stc = simulate_stc(self._src, self._labels, stc_data,
start_sample * self._tstep, self._tstep,
allow_overlap=True)
return stc
def __iter__(self):
"""Iterate over 1 second STCs."""
# Arbitrary chunk size, can be modified later to something else
# Loop over chunks of 1 second - or, maximum sample size.
# Can be modified to a different value.
n_times = self.n_times
for start_sample in range(0, n_times, self._chk_duration):
stop_sample = min(start_sample + self._chk_duration, n_times)
yield (self.get_stc(start_sample, stop_sample),
self.get_stim_channel(start_sample, stop_sample))
|
import html
import json
import os
import time
import textwrap
import urllib
import collections
import secrets
from typing import TypeVar, Callable, Dict, List, Optional, Union, Sequence, Tuple
from PyQt5.QtCore import QUrlQuery, QUrl
import qutebrowser
from qutebrowser.browser import pdfjs, downloads, history
from qutebrowser.config import config, configdata, configexc, configdiff
from qutebrowser.utils import (version, utils, jinja, log, message, docutils,
objreg, standarddir)
from qutebrowser.qt import sip
pyeval_output = ":pyeval was never called"
spawn_output = ":spawn was never called"
csrf_token = None
_HANDLERS = {}
class Error(Exception):
"""Exception for generic errors on a qute:// page."""
class NotFoundError(Error):
"""Raised when the given URL was not found."""
class SchemeOSError(Error):
"""Raised when there was an OSError inside a handler."""
class UrlInvalidError(Error):
"""Raised when an invalid URL was opened."""
class RequestDeniedError(Error):
"""Raised when the request is forbidden."""
class Redirect(Exception):
"""Exception to signal a redirect should happen.
Attributes:
url: The URL to redirect to, as a QUrl.
"""
def __init__(self, url: QUrl):
super().__init__(url.toDisplayString())
self.url = url
# Return value: (mimetype, data) (encoded as utf-8 if a str is returned)
_HandlerRet = Tuple[str, Union[str, bytes]]
_Handler = TypeVar('_Handler', bound=Callable[[QUrl], _HandlerRet])
class add_handler: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to register a qute://* URL handler.
Attributes:
_name: The 'foo' part of qute://foo
"""
def __init__(self, name):
self._name = name
self._function: Optional[Callable] = None
def __call__(self, function: _Handler) -> _Handler:
self._function = function
_HANDLERS[self._name] = self.wrapper
return function
def wrapper(self, *args, **kwargs):
"""Call the underlying function."""
assert self._function is not None
return self._function(*args, **kwargs)
def data_for_url(url: QUrl) -> Tuple[str, bytes]:
"""Get the data to show for the given URL.
Args:
url: The QUrl to show.
Return:
A (mimetype, data) tuple.
"""
norm_url = url.adjusted(
QUrl.NormalizePathSegments | # type: ignore[arg-type]
QUrl.StripTrailingSlash)
if norm_url != url:
raise Redirect(norm_url)
path = url.path()
host = url.host()
query = url.query()
# A url like "qute:foo" is split as "scheme:path", not "scheme:host".
log.misc.debug("url: {}, path: {}, host {}".format(
url.toDisplayString(), path, host))
if not path or not host:
new_url = QUrl()
new_url.setScheme('qute')
# When path is absent, e.g. qute://help (with no trailing slash)
if host:
new_url.setHost(host)
# When host is absent, e.g. qute:help
else:
new_url.setHost(path)
new_url.setPath('/')
if query:
new_url.setQuery(query)
if new_url.host(): # path was a valid host
raise Redirect(new_url)
try:
handler = _HANDLERS[host]
except KeyError:
raise NotFoundError("No handler found for {}".format(
url.toDisplayString()))
try:
mimetype, data = handler(url)
except OSError as e:
raise SchemeOSError(e)
assert mimetype is not None, url
if mimetype == 'text/html' and isinstance(data, str):
# We let handlers return HTML as text
data = data.encode('utf-8', errors='xmlcharrefreplace')
return mimetype, data
@add_handler('bookmarks')
def qute_bookmarks(_url: QUrl) -> _HandlerRet:
"""Handler for qute://bookmarks. Display all quickmarks / bookmarks."""
bookmarks = sorted(objreg.get('bookmark-manager').marks.items(),
key=lambda x: x[1]) # Sort by title
quickmarks = sorted(objreg.get('quickmark-manager').marks.items(),
key=lambda x: x[0]) # Sort by name
src = jinja.render('bookmarks.html',
title='Bookmarks',
bookmarks=bookmarks,
quickmarks=quickmarks)
return 'text/html', src
@add_handler('tabs')
def qute_tabs(_url: QUrl) -> _HandlerRet:
"""Handler for qute://tabs. Display information about all open tabs."""
tabs: Dict[str, List[Tuple[str, str]]] = collections.defaultdict(list)
for win_id, window in objreg.window_registry.items():
if sip.isdeleted(window):
continue
tabbed_browser = objreg.get('tabbed-browser',
scope='window',
window=win_id)
for tab in tabbed_browser.widgets():
if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]:
urlstr = tab.url().toDisplayString()
tabs[str(win_id)].append((tab.title(), urlstr))
src = jinja.render('tabs.html',
title='Tabs',
tab_list_by_window=tabs)
return 'text/html', src
def history_data(
start_time: float,
offset: int = None
) -> Sequence[Dict[str, Union[str, int]]]:
"""Return history data.
Arguments:
start_time: select history starting from this timestamp.
offset: number of items to skip
"""
# history atimes are stored as ints, ensure start_time is not a float
start_time = int(start_time)
if offset is not None:
entries = history.web_history.entries_before(start_time, limit=1000,
offset=offset)
else:
# end is 24hrs earlier than start
end_time = start_time - 24*60*60
entries = history.web_history.entries_between(end_time, start_time)
return [{"url": e.url,
"title": html.escape(e.title) or html.escape(e.url),
"time": e.atime} for e in entries]
@add_handler('history')
def qute_history(url: QUrl) -> _HandlerRet:
"""Handler for qute://history. Display and serve history."""
if url.path() == '/data':
q_offset = QUrlQuery(url).queryItemValue("offset")
try:
offset = int(q_offset) if q_offset else None
except ValueError:
raise UrlInvalidError("Query parameter offset is invalid")
# Use start_time in query or current time.
q_start_time = QUrlQuery(url).queryItemValue("start_time")
try:
start_time = float(q_start_time) if q_start_time else time.time()
except ValueError:
raise UrlInvalidError("Query parameter start_time is invalid")
return 'text/html', json.dumps(history_data(start_time, offset))
else:
return 'text/html', jinja.render(
'history.html',
title='History',
gap_interval=config.val.history_gap_interval
)
@add_handler('javascript')
def qute_javascript(url: QUrl) -> _HandlerRet:
"""Handler for qute://javascript.
Return content of file given as query parameter.
"""
path = url.path()
if path:
path = "javascript" + os.sep.join(path.split('/'))
return 'text/html', utils.read_file(path, binary=False)
else:
raise UrlInvalidError("No file specified")
@add_handler('pyeval')
def qute_pyeval(_url: QUrl) -> _HandlerRet:
"""Handler for qute://pyeval."""
src = jinja.render('pre.html', title='pyeval', content=pyeval_output)
return 'text/html', src
@add_handler('spawn-output')
def qute_spawn_output(_url: QUrl) -> _HandlerRet:
"""Handler for qute://spawn-output."""
src = jinja.render('pre.html', title='spawn output', content=spawn_output)
return 'text/html', src
@add_handler('version')
@add_handler('verizon')
def qute_version(_url):
"""Handler for qute://version."""
src = jinja.render('version.html', title='Version info',
version=version.version_info(),
copyright=qutebrowser.__copyright__)
return 'text/html', src
@add_handler('log')
def qute_log(url: QUrl) -> _HandlerRet:
"""Handler for qute://log.
There are three query parameters:
- level: The minimum log level to print.
For example, qute://log?level=warning prints warnings and errors.
Level can be one of: vdebug, debug, info, warning, error, critical.
- plain: If given (and not 'false'), plaintext is shown.
- logfilter: A filter string like the --logfilter commandline argument
accepts.
"""
query = QUrlQuery(url)
plain = (query.hasQueryItem('plain') and
query.queryItemValue('plain').lower() != 'false')
if log.ram_handler is None:
content = "Log output was disabled." if plain else None
else:
level = query.queryItemValue('level')
if not level:
level = 'vdebug'
filter_str = query.queryItemValue('logfilter')
try:
logfilter = (log.LogFilter.parse(filter_str, only_debug=False)
if filter_str else None)
except log.InvalidLogFilterError as e:
raise UrlInvalidError(e)
content = log.ram_handler.dump_log(html=not plain,
level=level, logfilter=logfilter)
template = 'pre.html' if plain else 'log.html'
src = jinja.render(template, title='log', content=content)
return 'text/html', src
@add_handler('gpl')
def qute_gpl(_url: QUrl) -> _HandlerRet:
"""Handler for qute://gpl. Return HTML content as string."""
return 'text/html', utils.read_file('html/license.html')
def _asciidoc_fallback_path(html_path: str) -> Optional[str]:
"""Fall back to plaintext asciidoc if the HTML is unavailable."""
path = html_path.replace('.html', '.asciidoc')
try:
return utils.read_file(path)
except OSError:
return None
@add_handler('help')
def qute_help(url: QUrl) -> _HandlerRet:
"""Handler for qute://help."""
urlpath = url.path()
if not urlpath or urlpath == '/':
urlpath = 'index.html'
else:
urlpath = urlpath.lstrip('/')
if not docutils.docs_up_to_date(urlpath):
message.error("Your documentation is outdated! Please re-run "
"scripts/asciidoc2html.py.")
path = 'html/doc/{}'.format(urlpath)
if not urlpath.endswith('.html'):
try:
bdata = utils.read_file(path, binary=True)
except OSError as e:
raise SchemeOSError(e)
mimetype = utils.guess_mimetype(urlpath)
return mimetype, bdata
try:
data = utils.read_file(path)
except OSError:
asciidoc = _asciidoc_fallback_path(path)
if asciidoc is None:
raise
preamble = textwrap.dedent("""
There was an error loading the documentation!
This most likely means the documentation was not generated
properly. If you are running qutebrowser from the git repository,
please (re)run scripts/asciidoc2html.py and reload this page.
If you're running a released version this is a bug, please use
:report to report it.
Falling back to the plaintext version.
---------------------------------------------------------------
""")
return 'text/plain', (preamble + asciidoc).encode('utf-8')
else:
return 'text/html', data
def _qute_settings_set(url: QUrl) -> _HandlerRet:
"""Handler for qute://settings/set."""
query = QUrlQuery(url)
option = query.queryItemValue('option', QUrl.FullyDecoded)
value = query.queryItemValue('value', QUrl.FullyDecoded)
# https://github.com/qutebrowser/qutebrowser/issues/727
if option == 'content.javascript.enabled' and value == 'false':
msg = ("Refusing to disable javascript via qute://settings "
"as it needs javascript support.")
message.error(msg)
return 'text/html', b'error: ' + msg.encode('utf-8')
try:
config.instance.set_str(option, value, save_yaml=True)
return 'text/html', b'ok'
except configexc.Error as e:
message.error(str(e))
return 'text/html', b'error: ' + str(e).encode('utf-8')
@add_handler('settings')
def qute_settings(url: QUrl) -> _HandlerRet:
"""Handler for qute://settings. View/change qute configuration."""
global csrf_token
if url.path() == '/set':
if url.password() != csrf_token:
message.error("Invalid CSRF token for qute://settings!")
raise RequestDeniedError("Invalid CSRF token!")
return _qute_settings_set(url)
# Requests to qute://settings/set should only be allowed from
# qute://settings. As an additional security precaution, we generate a CSRF
# token to use here.
csrf_token = secrets.token_urlsafe()
src = jinja.render('settings.html', title='settings',
configdata=configdata,
confget=config.instance.get_str,
csrf_token=csrf_token)
return 'text/html', src
@add_handler('bindings')
def qute_bindings(_url: QUrl) -> _HandlerRet:
"""Handler for qute://bindings. View keybindings."""
bindings = {}
defaults = config.val.bindings.default
config_modes = set(defaults.keys()).union(config.val.bindings.commands)
config_modes.remove('normal')
modes = ['normal'] + sorted(config_modes)
for mode in modes:
bindings[mode] = config.key_instance.get_bindings_for(mode)
src = jinja.render('bindings.html', title='Bindings',
bindings=bindings)
return 'text/html', src
@add_handler('back')
def qute_back(url: QUrl) -> _HandlerRet:
"""Handler for qute://back.
Simple page to free ram / lazy load a site, goes back on focusing the tab.
"""
src = jinja.render(
'back.html',
title='Suspended: ' + urllib.parse.unquote(url.fragment()))
return 'text/html', src
@add_handler('configdiff')
def qute_configdiff(url: QUrl) -> _HandlerRet:
"""Handler for qute://configdiff."""
if url.path() == '/old':
try:
return 'text/html', configdiff.get_diff()
except OSError as e:
error = (b'Failed to read old config: ' +
str(e.strerror).encode('utf-8'))
return 'text/plain', error
else:
data = config.instance.dump_userconfig().encode('utf-8')
return 'text/plain', data
@add_handler('pastebin-version')
def qute_pastebin_version(_url: QUrl) -> _HandlerRet:
"""Handler that pastebins the version string."""
version.pastebin_version()
return 'text/plain', b'Paste called.'
def _pdf_path(filename: str) -> str:
"""Get the path of a temporary PDF file."""
return os.path.join(downloads.temp_download_manager.get_tmpdir().name,
filename)
@add_handler('pdfjs')
def qute_pdfjs(url: QUrl) -> _HandlerRet:
"""Handler for qute://pdfjs.
Return the pdf.js viewer or redirect to original URL if the file does not
exist.
"""
if url.path() == '/file':
filename = QUrlQuery(url).queryItemValue('filename')
if not filename:
raise UrlInvalidError("Missing filename")
if '/' in filename or os.sep in filename:
raise RequestDeniedError("Path separator in filename.")
path = _pdf_path(filename)
with open(path, 'rb') as f:
data = f.read()
mimetype = utils.guess_mimetype(filename, fallback=True)
return mimetype, data
if url.path() == '/web/viewer.html':
query = QUrlQuery(url)
filename = query.queryItemValue("filename")
if not filename:
raise UrlInvalidError("Missing filename")
path = _pdf_path(filename)
if not os.path.isfile(path):
source = query.queryItemValue('source')
if not source: # This may happen with old URLs stored in history
raise UrlInvalidError("Missing source")
raise Redirect(QUrl(source))
data = pdfjs.generate_pdfjs_page(filename, url)
return 'text/html', data
try:
data = pdfjs.get_pdfjs_res(url.path())
except pdfjs.PDFJSNotFound as e:
# Logging as the error might get lost otherwise since we're not showing
# the error page if a single asset is missing. This way we don't lose
# information, as the failed pdfjs requests are still in the log.
log.misc.warning(
"pdfjs resource requested but not found: {}".format(e.path))
raise NotFoundError("Can't find pdfjs resource '{}'".format(e.path))
else:
mimetype = utils.guess_mimetype(url.fileName(), fallback=True)
return mimetype, data
@add_handler('warning')
def qute_warning(url: QUrl) -> _HandlerRet:
"""Handler for qute://warning."""
path = url.path()
if path == '/webkit':
src = jinja.render('warning-webkit.html',
title='QtWebKit backend warning')
elif path == '/sessions':
src = jinja.render('warning-sessions.html',
title='Qt 5.15 sessions warning',
datadir=standarddir.data(),
sep=os.sep)
else:
raise NotFoundError("Invalid warning page {}".format(path))
return 'text/html', src
|
import functools
import hashlib
from flask import jsonify, request, make_response
from sandman2.exception import BadRequestException
def etag(func):
"""Return a decorator that generates proper ETag values for a response.
:param func: view function
"""
@functools.wraps(func)
def wrapped(*args, **kwargs):
"""Call the view function and generate an ETag value, checking the
headers to determine what response to send."""
# only for HEAD and GET requests
assert request.method in ['HEAD', 'GET'],\
'@etag is only supported for GET requests'
response = func(*args, **kwargs)
response = make_response(response)
etag_value = '"' + hashlib.md5(response.get_data()).hexdigest() + '"'
response.headers['ETag'] = etag_value
if_match = request.headers.get('If-Match')
if_none_match = request.headers.get('If-None-Match')
if if_match:
etag_list = [tag.strip() for tag in if_match.split(',')]
if etag_value not in etag_list and '*' not in etag_list:
response = precondition_failed()
elif if_none_match:
etag_list = [tag.strip() for tag in if_none_match.split(',')]
if etag_value in etag_list or '*' in etag_list:
response = not_modified()
return response
return wrapped
def not_modified():
"""Return an HTTP 304 response if the resource hasn't been modified based
on the ETag value."""
response = jsonify({'status': 304, 'status': 'not modified'})
response.status_code = 304
return response
def precondition_failed():
"""Return an HTTP 412 if no ETags match on an If-Match."""
response = jsonify({'status': 412, 'error': 'precondition failed'})
response.status_code = 412
return response
def validate_fields(func):
"""A decorator to automatically detect missing required fields from
json data."""
@functools.wraps(func)
def decorated(instance, *args, **kwargs):
"""The decorator function."""
data = request.get_json(force=True, silent=True)
if not data:
raise BadRequestException('No data received from request')
for key in data:
if key not in (
instance.__model__.required() +
instance.__model__.optional()):
raise BadRequestException('Unknown field [{}]'.format(key))
missing = set(instance.__model__.required()) - set(data)
if missing:
message = 'The following required fields are missing: ' + ', '.join(missing)
raise BadRequestException(message)
return func(instance, *args, **kwargs)
return decorated
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_VARIABLES,
CONF_NAME,
CONF_RESOURCE,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
HTTP_OK,
)
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
CONF_FUNCTIONS = "functions"
CONF_PINS = "pins"
DEFAULT_NAME = "aREST sensor"
PIN_VARIABLE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.url,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PINS, default={}): vol.Schema(
{cv.string: PIN_VARIABLE_SCHEMA}
),
vol.Optional(CONF_MONITORED_VARIABLES, default={}): vol.Schema(
{cv.string: PIN_VARIABLE_SCHEMA}
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the aREST sensor."""
resource = config[CONF_RESOURCE]
var_conf = config[CONF_MONITORED_VARIABLES]
pins = config[CONF_PINS]
try:
response = requests.get(resource, timeout=10).json()
except requests.exceptions.MissingSchema:
_LOGGER.error(
"Missing resource or schema in configuration. Add http:// to your URL"
)
return False
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device at %s", resource)
return False
arest = ArestData(resource)
def make_renderer(value_template):
"""Create a renderer based on variable_template value."""
if value_template is None:
return lambda value: value
value_template.hass = hass
def _render(value):
try:
return value_template.async_render({"value": value}, parse_result=False)
except TemplateError:
_LOGGER.exception("Error parsing value")
return value
return _render
dev = []
if var_conf is not None:
for variable, var_data in var_conf.items():
if variable not in response["variables"]:
_LOGGER.error("Variable: %s does not exist", variable)
continue
renderer = make_renderer(var_data.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
arest,
resource,
config.get(CONF_NAME, response[CONF_NAME]),
var_data.get(CONF_NAME, variable),
variable=variable,
unit_of_measurement=var_data.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
if pins is not None:
for pinnum, pin in pins.items():
renderer = make_renderer(pin.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
ArestData(resource, pinnum),
resource,
config.get(CONF_NAME, response[CONF_NAME]),
pin.get(CONF_NAME),
pin=pinnum,
unit_of_measurement=pin.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
add_entities(dev, True)
class ArestSensor(Entity):
"""Implementation of an aREST sensor for exposed variables."""
def __init__(
self,
arest,
resource,
location,
name,
variable=None,
pin=None,
unit_of_measurement=None,
renderer=None,
):
"""Initialize the sensor."""
self.arest = arest
self._resource = resource
self._name = f"{location.title()} {name.title()}"
self._variable = variable
self._pin = pin
self._state = None
self._unit_of_measurement = unit_of_measurement
self._renderer = renderer
if self._pin is not None:
request = requests.get(f"{self._resource}/mode/{self._pin}/i", timeout=10)
if request.status_code != HTTP_OK:
_LOGGER.error("Can't set mode of %s", self._resource)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
values = self.arest.data
if "error" in values:
return values["error"]
value = self._renderer(values.get("value", values.get(self._variable, None)))
return value
def update(self):
"""Get the latest data from aREST API."""
self.arest.update()
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.arest.available
class ArestData:
"""The Class for handling the data retrieval for variables."""
def __init__(self, resource, pin=None):
"""Initialize the data object."""
self._resource = resource
self._pin = pin
self.data = {}
self.available = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from aREST device."""
try:
if self._pin is None:
response = requests.get(self._resource, timeout=10)
self.data = response.json()["variables"]
else:
try:
if str(self._pin[0]) == "A":
response = requests.get(
f"{self._resource}/analog/{self._pin[1:]}", timeout=10
)
self.data = {"value": response.json()["return_value"]}
except TypeError:
response = requests.get(
f"{self._resource}/digital/{self._pin}", timeout=10
)
self.data = {"value": response.json()["return_value"]}
self.available = True
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device %s", self._resource)
self.available = False
|
from homeassistant import auth
from homeassistant.setup import async_setup_component
from tests.common import ensure_auth_manager_loaded
BASE_CONFIG = [
{
"name": "Example",
"type": "insecure_example",
"users": [
{"username": "test-user", "password": "test-pass", "name": "Test Name"}
],
}
]
EMPTY_CONFIG = []
async def async_setup_auth(
hass,
aiohttp_client,
provider_configs=BASE_CONFIG,
module_configs=EMPTY_CONFIG,
setup_api=False,
):
"""Set up authentication and create an HTTP client."""
hass.auth = await auth.auth_manager_from_config(
hass, provider_configs, module_configs
)
ensure_auth_manager_loaded(hass.auth)
await async_setup_component(hass, "auth", {})
if setup_api:
await async_setup_component(hass, "api", {})
return await aiohttp_client(hass.http.app)
|
import os
import importlib
import pytest
import cherrypy
from cherrypy.test import helper
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
class RoutesDispatchTest(helper.CPWebCase):
"""Routes dispatcher test suite."""
@staticmethod
def setup_server():
"""Set up cherrypy test instance."""
try:
importlib.import_module('routes')
except ImportError:
pytest.skip('Install routes to test RoutesDispatcher code')
class Dummy:
def index(self):
return 'I said good day!'
class City:
def __init__(self, name):
self.name = name
self.population = 10000
@cherrypy.config(**{
'tools.response_headers.on': True,
'tools.response_headers.headers': [
('Content-Language', 'en-GB'),
],
})
def index(self, **kwargs):
return 'Welcome to %s, pop. %s' % (self.name, self.population)
def update(self, **kwargs):
self.population = kwargs['pop']
return 'OK'
d = cherrypy.dispatch.RoutesDispatcher()
d.connect(action='index', name='hounslow', route='/hounslow',
controller=City('Hounslow'))
d.connect(
name='surbiton', route='/surbiton', controller=City('Surbiton'),
action='index', conditions=dict(method=['GET']))
d.mapper.connect('/surbiton', controller='surbiton',
action='update', conditions=dict(method=['POST']))
d.connect('main', ':action', controller=Dummy())
conf = {'/': {'request.dispatch': d}}
cherrypy.tree.mount(root=None, config=conf)
def test_Routes_Dispatch(self):
"""Check that routes package based URI dispatching works correctly."""
self.getPage('/hounslow')
self.assertStatus('200 OK')
self.assertBody('Welcome to Hounslow, pop. 10000')
self.getPage('/foo')
self.assertStatus('404 Not Found')
self.getPage('/surbiton')
self.assertStatus('200 OK')
self.assertBody('Welcome to Surbiton, pop. 10000')
self.getPage('/surbiton', method='POST', body='pop=1327')
self.assertStatus('200 OK')
self.assertBody('OK')
self.getPage('/surbiton')
self.assertStatus('200 OK')
self.assertHeader('Content-Language', 'en-GB')
self.assertBody('Welcome to Surbiton, pop. 1327')
|
from __future__ import division
import unittest
import numpy as np
from chainercv.evaluations import calc_semantic_segmentation_confusion
from chainercv.evaluations import calc_semantic_segmentation_iou
from chainercv.evaluations import eval_semantic_segmentation
from chainercv.utils import testing
@testing.parameterize(
{'pred_labels': iter(np.repeat([[[1, 1, 0], [0, 0, 1]]], 2, axis=0)),
'gt_labels': iter(np.repeat([[[1, 0, 0], [0, -1, 1]]], 2, axis=0)),
'iou': np.array([4 / 6, 4 / 6]),
'pixel_accuracy': 4 / 5,
'class_accuracy': np.array([2 / 3, 2 / 2]),
},
{'pred_labels': np.array([[[0, 0, 0], [0, 0, 0]]]),
'gt_labels': np.array([[[1, 1, 1], [1, 1, 1]]]),
'iou': np.array([0, 0]),
'pixel_accuracy': 0 / 6,
'class_accuracy': np.array([np.nan, 0])
}
)
class TestEvalSemanticSegmentation(unittest.TestCase):
def test_eval_semantic_segmentation(self):
result = eval_semantic_segmentation(
self.pred_labels, self.gt_labels)
np.testing.assert_equal(result['iou'], self.iou)
np.testing.assert_equal(result['pixel_accuracy'], self.pixel_accuracy)
np.testing.assert_equal(result['class_accuracy'], self.class_accuracy)
np.testing.assert_equal(result['miou'], np.nanmean(self.iou))
np.testing.assert_equal(
result['mean_class_accuracy'], np.nanmean(self.class_accuracy))
class TestCalcSemanticSegmentationConfusion(unittest.TestCase):
def test_calc_semantic_segmentation_confusion(self):
n_class = 2
pred_labels = np.random.randint(0, n_class, size=(10, 16, 16))
gt_labels = np.random.randint(-1, n_class, size=(10, 16, 16))
expected = np.zeros((n_class, n_class), dtype=np.int64)
expected[0, 0] = np.sum(
np.logical_and(gt_labels == 0, pred_labels == 0))
expected[0, 1] = np.sum(
np.logical_and(gt_labels == 0, pred_labels == 1))
expected[1, 0] = np.sum(
np.logical_and(gt_labels == 1, pred_labels == 0))
expected[1, 1] = np.sum(
np.logical_and(gt_labels == 1, pred_labels == 1))
confusion = calc_semantic_segmentation_confusion(
pred_labels, gt_labels)
np.testing.assert_equal(confusion, expected)
def test_calc_semantic_segmentation_confusion_shape(self):
n_class = 30
pred_labels = np.random.randint(0, n_class, size=(2, 3, 3))
gt_labels = np.random.randint(-1, n_class, size=(2, 3, 3))
confusion = calc_semantic_segmentation_confusion(
pred_labels, gt_labels)
size = (np.max((pred_labels + 1, gt_labels + 1)))
self.assertEqual(confusion.shape, (size, size))
class TestCalcSemanticSegmentationIou(unittest.TestCase):
n_class = 2
def test_calc_semantic_segmentation_iou(self):
c = np.random.randint(0, 100, size=(self.n_class, self.n_class))
expected = np.array(
[c[0, 0] / (c[0, 0] + c[0, 1] + c[1, 0]),
c[1, 1] / (c[1, 1] + c[1, 0] + c[0, 1])])
iou = calc_semantic_segmentation_iou(c)
np.testing.assert_equal(iou, expected)
testing.run_module(__name__, __file__)
|
from PyQt5.QtCore import QObject, QEvent, Qt, QTimer
from qutebrowser.config import config
from qutebrowser.utils import message, log, usertypes, qtutils
from qutebrowser.misc import objects
from qutebrowser.keyinput import modeman
class ChildEventFilter(QObject):
"""An event filter re-adding TabEventFilter on ChildEvent.
This is needed because QtWebEngine likes to randomly change its
focusProxy...
FIXME:qtwebengine Add a test for this happening
Attributes:
_filter: The event filter to install.
_widget: The widget expected to send out childEvents.
"""
def __init__(self, *, eventfilter, widget=None, parent=None):
super().__init__(parent)
self._filter = eventfilter
self._widget = widget
def eventFilter(self, obj, event):
"""Act on ChildAdded events."""
if event.type() == QEvent.ChildAdded:
child = event.child()
log.misc.debug("{} got new child {}, installing filter"
.format(obj, child))
# Additional sanity check, but optional
if self._widget is not None:
assert obj is self._widget
child.installEventFilter(self._filter)
elif event.type() == QEvent.ChildRemoved:
child = event.child()
log.misc.debug("{}: removed child {}".format(obj, child))
return False
class TabEventFilter(QObject):
"""Handle mouse/keyboard events on a tab.
Attributes:
_tab: The browsertab object this filter is installed on.
_handlers: A dict of handler functions for the handled events.
_ignore_wheel_event: Whether to ignore the next wheelEvent.
_check_insertmode_on_release: Whether an insertmode check should be
done when the mouse is released.
"""
def __init__(self, tab, *, parent=None):
super().__init__(parent)
self._tab = tab
self._handlers = {
QEvent.MouseButtonPress: self._handle_mouse_press,
QEvent.MouseButtonRelease: self._handle_mouse_release,
QEvent.Wheel: self._handle_wheel,
QEvent.KeyRelease: self._handle_key_release,
}
self._ignore_wheel_event = False
self._check_insertmode_on_release = False
def _handle_mouse_press(self, e):
"""Handle pressing of a mouse button.
Args:
e: The QMouseEvent.
Return:
True if the event should be filtered, False otherwise.
"""
is_rocker_gesture = (config.val.input.mouse.rocker_gestures and
e.buttons() == Qt.LeftButton | Qt.RightButton)
if e.button() in [Qt.XButton1, Qt.XButton2] or is_rocker_gesture:
self._mousepress_backforward(e)
return True
self._ignore_wheel_event = True
pos = e.pos()
if pos.x() < 0 or pos.y() < 0:
log.mouse.warning("Ignoring invalid click at {}".format(pos))
return False
if e.button() != Qt.NoButton:
self._tab.elements.find_at_pos(pos, self._mousepress_insertmode_cb)
return False
def _handle_mouse_release(self, _e):
"""Handle releasing of a mouse button.
Args:
e: The QMouseEvent.
Return:
True if the event should be filtered, False otherwise.
"""
# We want to make sure we check the focus element after the WebView is
# updated completely.
QTimer.singleShot(0, self._mouserelease_insertmode)
return False
def _handle_wheel(self, e):
"""Zoom on Ctrl-Mousewheel.
Args:
e: The QWheelEvent.
Return:
True if the event should be filtered, False otherwise.
"""
if self._ignore_wheel_event:
# See https://github.com/qutebrowser/qutebrowser/issues/395
self._ignore_wheel_event = False
return True
# Don't allow scrolling while hinting
mode = modeman.instance(self._tab.win_id).mode
if mode == usertypes.KeyMode.hint:
return True
elif e.modifiers() & Qt.ControlModifier:
if mode == usertypes.KeyMode.passthrough:
return False
divider = config.val.zoom.mouse_divider
if divider == 0:
# Disable mouse zooming
return True
factor = self._tab.zoom.factor() + (e.angleDelta().y() / divider)
if factor < 0:
return True
perc = int(100 * factor)
message.info("Zoom level: {}%".format(perc), replace=True)
self._tab.zoom.set_factor(factor)
return True
return False
def _handle_key_release(self, e):
"""Ignore repeated key release events going to the website.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-77208
Args:
e: The QKeyEvent.
Return:
True if the event should be filtered, False otherwise.
"""
return (e.isAutoRepeat() and
not qtutils.version_check('5.14', compiled=False) and
objects.backend == usertypes.Backend.QtWebEngine)
def _mousepress_insertmode_cb(self, elem):
"""Check if the clicked element is editable."""
if elem is None:
# Something didn't work out, let's find the focus element after
# a mouse release.
log.mouse.debug("Got None element, scheduling check on "
"mouse release")
self._check_insertmode_on_release = True
return
if elem.is_editable():
log.mouse.debug("Clicked editable element!")
if config.val.input.insert_mode.auto_enter:
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'click', only_if_normal=True)
else:
log.mouse.debug("Clicked non-editable element!")
if config.val.input.insert_mode.auto_leave:
modeman.leave(self._tab.win_id, usertypes.KeyMode.insert,
'click', maybe=True)
def _mouserelease_insertmode(self):
"""If we have an insertmode check scheduled, handle it."""
if not self._check_insertmode_on_release:
return
self._check_insertmode_on_release = False
def mouserelease_insertmode_cb(elem):
"""Callback which gets called from JS."""
if elem is None:
log.mouse.debug("Element vanished!")
return
if elem.is_editable():
log.mouse.debug("Clicked editable element (delayed)!")
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'click-delayed', only_if_normal=True)
else:
log.mouse.debug("Clicked non-editable element (delayed)!")
if config.val.input.insert_mode.auto_leave:
modeman.leave(self._tab.win_id, usertypes.KeyMode.insert,
'click-delayed', maybe=True)
self._tab.elements.find_focused(mouserelease_insertmode_cb)
def _mousepress_backforward(self, e):
"""Handle back/forward mouse button presses.
Args:
e: The QMouseEvent.
Return:
True if the event should be filtered, False otherwise.
"""
if (not config.val.input.mouse.back_forward_buttons and
e.button() in [Qt.XButton1, Qt.XButton2]):
# Back and forward on mice are disabled
return
if e.button() in [Qt.XButton1, Qt.LeftButton]:
# Back button on mice which have it, or rocker gesture
if self._tab.history.can_go_back():
self._tab.history.back()
else:
message.error("At beginning of history.")
elif e.button() in [Qt.XButton2, Qt.RightButton]:
# Forward button on mice which have it, or rocker gesture
if self._tab.history.can_go_forward():
self._tab.history.forward()
else:
message.error("At end of history.")
def eventFilter(self, obj, event):
"""Filter events going to a QWeb(Engine)View.
Return:
True if the event should be filtered, False otherwise.
"""
evtype = event.type()
if evtype not in self._handlers:
return False
if obj is not self._tab.private_api.event_target():
log.mouse.debug("Ignoring {} to {}".format(
event.__class__.__name__, obj))
return False
return self._handlers[evtype](event)
|
import os
import pytest
import sh
from molecule import config
from molecule.dependency import gilt
@pytest.fixture
def _patched_gilt_has_requirements_file(mocker):
m = mocker.patch('molecule.dependency.gilt.Gilt._has_requirements_file')
m.return_value = True
return m
@pytest.fixture
def _dependency_section_data():
return {
'dependency': {
'name': 'gilt',
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'bar',
}
}
}
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(_dependency_section_data, patched_config_validate,
config_instance):
return gilt.Gilt(config_instance)
@pytest.fixture
def gilt_config(_instance):
return os.path.join(_instance._config.scenario.directory, 'gilt.yml')
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_default_options_property(gilt_config, _instance):
x = {'config': gilt_config}
assert x == _instance.default_options
def test_default_env_property(_instance):
assert 'MOLECULE_FILE' in _instance.default_env
assert 'MOLECULE_INVENTORY_FILE' in _instance.default_env
assert 'MOLECULE_SCENARIO_DIRECTORY' in _instance.default_env
assert 'MOLECULE_INSTANCE_CONFIG' in _instance.default_env
@pytest.mark.parametrize(
'config_instance', ['_dependency_section_data'], indirect=True)
def test_name_property(_instance):
assert 'gilt' == _instance.name
def test_enabled_property(_instance):
assert _instance.enabled
@pytest.mark.parametrize(
'config_instance', ['_dependency_section_data'], indirect=True)
def test_options_property(gilt_config, _instance):
x = {'config': gilt_config, 'foo': 'bar'}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_dependency_section_data'], indirect=True)
def test_options_property_handles_cli_args(gilt_config, _instance):
_instance._config.args = {'debug': True}
x = {'config': gilt_config, 'foo': 'bar', 'debug': True}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_dependency_section_data'], indirect=True)
def test_env_property(_instance):
assert 'bar' == _instance.env['FOO']
@pytest.mark.parametrize(
'config_instance', ['_dependency_section_data'], indirect=True)
def test_bake(gilt_config, _instance):
_instance.bake()
x = [
str(sh.gilt), '--foo=bar', '--config={}'.format(gilt_config), 'overlay'
]
result = str(_instance._sh_command).split()
assert sorted(x) == sorted(result)
def test_execute(patched_run_command, _patched_gilt_has_requirements_file,
patched_logger_success, _instance):
_instance._sh_command = 'patched-command'
_instance.execute()
patched_run_command.assert_called_once_with('patched-command', debug=False)
msg = 'Dependency completed successfully.'
patched_logger_success.assert_called_once_with(msg)
def test_execute_does_not_execute_when_disabled(
patched_run_command, patched_logger_warn, _instance):
_instance._config.config['dependency']['enabled'] = False
_instance.execute()
assert not patched_run_command.called
msg = 'Skipping, dependency is disabled.'
patched_logger_warn.assert_called_once_with(msg)
def test_execute_does_not_execute_when_no_requirements_file(
patched_run_command, _patched_gilt_has_requirements_file,
patched_logger_warn, _instance):
_patched_gilt_has_requirements_file.return_value = False
_instance.execute()
assert not patched_run_command.called
msg = 'Skipping, missing the requirements file.'
patched_logger_warn.assert_called_once_with(msg)
def test_execute_bakes(patched_run_command, gilt_config,
_patched_gilt_has_requirements_file, _instance):
_instance.execute()
assert _instance._sh_command is not None
assert 1 == patched_run_command.call_count
def test_executes_catches_and_exits_return_code(
patched_run_command, _patched_gilt_has_requirements_file, _instance):
patched_run_command.side_effect = sh.ErrorReturnCode_1(sh.gilt, b'', b'')
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
def test_config_file(_instance, gilt_config):
assert gilt_config == _instance._config_file()
def test_has_requirements_file(_instance):
assert not _instance._has_requirements_file()
|
import logging
from urllib.parse import ParseResult, urlparse
from requests.exceptions import HTTPError, Timeout
from sunwatcher.solarlog.solarlog import SolarLog
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_HOST, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from .const import DEFAULT_HOST, DEFAULT_NAME, DOMAIN, SCAN_INTERVAL, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Import YAML configuration when available."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=dict(config)
)
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Add solarlog entry."""
host_entry = entry.data[CONF_HOST]
url = urlparse(host_entry, "http")
netloc = url.netloc or url.path
path = url.path if url.netloc else ""
url = ParseResult("http", netloc, path, *url[3:])
host = url.geturl()
platform_name = entry.title
try:
api = await hass.async_add_executor_job(SolarLog, host)
_LOGGER.debug("Connected to Solar-Log device, setting up entries")
except (OSError, HTTPError, Timeout):
_LOGGER.error(
"Could not connect to Solar-Log device at %s, check host ip address", host
)
return
# Create solarlog data service which will retrieve and update the data.
data = await hass.async_add_executor_job(SolarlogData, hass, api, host)
# Create a new sensor for each sensor type.
entities = []
for sensor_key in SENSOR_TYPES:
sensor = SolarlogSensor(entry.entry_id, platform_name, sensor_key, data)
entities.append(sensor)
async_add_entities(entities, True)
return True
class SolarlogSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, entry_id, platform_name, sensor_key, data):
"""Initialize the sensor."""
self.platform_name = platform_name
self.sensor_key = sensor_key
self.data = data
self.entry_id = entry_id
self._state = None
self._json_key = SENSOR_TYPES[self.sensor_key][0]
self._label = SENSOR_TYPES[self.sensor_key][1]
self._unit_of_measurement = SENSOR_TYPES[self.sensor_key][2]
self._icon = SENSOR_TYPES[self.sensor_key][3]
@property
def unique_id(self):
"""Return the unique id."""
return f"{self.entry_id}_{self.sensor_key}"
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.platform_name} {self._label}"
@property
def unit_of_measurement(self):
"""Return the state of the sensor."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the sensor icon."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Get the latest data from the sensor and update the state."""
self.data.update()
self._state = self.data.data[self._json_key]
class SolarlogData:
"""Get and update the latest data."""
def __init__(self, hass, api, host):
"""Initialize the data object."""
self.api = api
self.hass = hass
self.host = host
self.update = Throttle(SCAN_INTERVAL)(self._update)
self.data = {}
def _update(self):
"""Update the data from the SolarLog device."""
try:
self.api = SolarLog(self.host)
response = self.api.time
_LOGGER.debug(
"Connection to Solarlog successful. Retrieving latest Solarlog update of %s",
response,
)
except (OSError, Timeout, HTTPError):
_LOGGER.error("Connection error, Could not retrieve data, skipping update")
return
try:
self.data["TIME"] = self.api.time
self.data["powerAC"] = self.api.power_ac
self.data["powerDC"] = self.api.power_dc
self.data["voltageAC"] = self.api.voltage_ac
self.data["voltageDC"] = self.api.voltage_dc
self.data["yieldDAY"] = self.api.yield_day / 1000
self.data["yieldYESTERDAY"] = self.api.yield_yesterday / 1000
self.data["yieldMONTH"] = self.api.yield_month / 1000
self.data["yieldYEAR"] = self.api.yield_year / 1000
self.data["yieldTOTAL"] = self.api.yield_total / 1000
self.data["consumptionAC"] = self.api.consumption_ac
self.data["consumptionDAY"] = self.api.consumption_day / 1000
self.data["consumptionYESTERDAY"] = self.api.consumption_yesterday / 1000
self.data["consumptionMONTH"] = self.api.consumption_month / 1000
self.data["consumptionYEAR"] = self.api.consumption_year / 1000
self.data["consumptionTOTAL"] = self.api.consumption_total / 1000
self.data["totalPOWER"] = self.api.total_power
self.data["alternatorLOSS"] = self.api.alternator_loss
self.data["CAPACITY"] = round(self.api.capacity * 100, 0)
self.data["EFFICIENCY"] = round(self.api.efficiency * 100, 0)
self.data["powerAVAILABLE"] = self.api.power_available
self.data["USAGE"] = self.api.usage
_LOGGER.debug("Updated Solarlog overview data: %s", self.data)
except AttributeError:
_LOGGER.error("Missing details data in Solarlog response")
|
from pykwb import kwb
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
DEFAULT_RAW = False
DEFAULT_NAME = "KWB"
MODE_SERIAL = 0
MODE_TCP = 1
CONF_TYPE = "type"
CONF_RAW = "raw"
SERIAL_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_RAW, default=DEFAULT_RAW): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_DEVICE): cv.string,
vol.Required(CONF_TYPE): "serial",
}
)
ETHERNET_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_RAW, default=DEFAULT_RAW): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_TYPE): "tcp",
}
)
PLATFORM_SCHEMA = vol.Schema(vol.Any(SERIAL_SCHEMA, ETHERNET_SCHEMA))
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the KWB component."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
device = config.get(CONF_DEVICE)
connection_type = config.get(CONF_TYPE)
raw = config.get(CONF_RAW)
client_name = config.get(CONF_NAME)
if connection_type == "serial":
easyfire = kwb.KWBEasyfire(MODE_SERIAL, "", 0, device)
elif connection_type == "tcp":
easyfire = kwb.KWBEasyfire(MODE_TCP, host, port)
else:
return False
easyfire.run_thread()
sensors = []
for sensor in easyfire.get_sensors():
if (sensor.sensor_type != kwb.PROP_SENSOR_RAW) or (
sensor.sensor_type == kwb.PROP_SENSOR_RAW and raw
):
sensors.append(KWBSensor(easyfire, sensor, client_name))
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: easyfire.stop_thread())
add_entities(sensors)
class KWBSensor(Entity):
"""Representation of a KWB Easyfire sensor."""
def __init__(self, easyfire, sensor, client_name):
"""Initialize the KWB sensor."""
self._easyfire = easyfire
self._sensor = sensor
self._client_name = client_name
self._name = self._sensor.name
@property
def name(self):
"""Return the name."""
return f"{self._client_name} {self._name}"
@property
def available(self) -> bool:
"""Return if sensor is available."""
return self._sensor.available
@property
def state(self):
"""Return the state of value."""
if self._sensor.value is not None and self._sensor.available:
return self._sensor.value
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._sensor.unit_of_measurement
|
import pickle
import pytest
from itertools import count
from unittest.mock import Mock
from case import mock
from kombu.utils import functional as utils
from kombu.utils.functional import (
ChannelPromise, LRUCache, fxrange, fxrangemax, memoize, lazy,
maybe_evaluate, maybe_list, reprcall, reprkwargs, retry_over_time,
accepts_argument,
)
class test_ChannelPromise:
def test_repr(self):
obj = Mock(name='cb')
assert 'promise' in repr(ChannelPromise(obj))
obj.assert_not_called()
class test_shufflecycle:
def test_shuffles(self):
prev_repeat, utils.repeat = utils.repeat, Mock()
try:
utils.repeat.return_value = list(range(10))
values = {'A', 'B', 'C'}
cycle = utils.shufflecycle(values)
seen = set()
for i in range(10):
next(cycle)
utils.repeat.assert_called_with(None)
assert seen.issubset(values)
with pytest.raises(StopIteration):
next(cycle)
next(cycle)
finally:
utils.repeat = prev_repeat
def double(x):
return x * 2
class test_LRUCache:
def test_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x[i] = i
assert list(x.keys()) == list(slots[limit:])
assert x.items()
assert x.values()
def test_is_pickleable(self):
x = LRUCache(limit=10)
x.update(luke=1, leia=2)
y = pickle.loads(pickle.dumps(x))
assert y.limit == y.limit
assert y == x
def test_update_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x.update({i: i})
assert list(x.keys()) == list(slots[limit:])
def test_least_recently_used(self):
x = LRUCache(3)
x[1], x[2], x[3] = 1, 2, 3
assert list(x.keys()), [1, 2 == 3]
x[4], x[5] = 4, 5
assert list(x.keys()), [3, 4 == 5]
# access 3, which makes it the last used key.
x[3]
x[6] = 6
assert list(x.keys()), [5, 3 == 6]
x[7] = 7
assert list(x.keys()), [3, 6 == 7]
def test_update_larger_than_cache_size(self):
x = LRUCache(2)
x.update({x: x for x in range(100)})
assert list(x.keys()), [98 == 99]
def test_items(self):
c = LRUCache()
c.update(a=1, b=2, c=3)
assert list(c.items())
def test_incr(self):
c = LRUCache()
c.update(a='1')
c.incr('a')
assert c['a'] == '2'
def test_memoize():
counter = count(1)
@memoize(maxsize=2)
def x(i):
return next(counter)
assert x(1) == 1
assert x(1) == 1
assert x(2) == 2
assert x(3) == 3
assert x(1) == 4
x.clear()
assert x(3) == 5
class test_lazy:
def test__str__(self):
assert (str(lazy(lambda: 'the quick brown fox')) ==
'the quick brown fox')
def test__repr__(self):
assert repr(lazy(lambda: 'fi fa fo')).strip('u') == "'fi fa fo'"
def test_evaluate(self):
assert lazy(lambda: 2 + 2)() == 4
assert lazy(lambda x: x * 4, 2) == 8
assert lazy(lambda x: x * 8, 2)() == 16
def test_cmp(self):
assert lazy(lambda: 10) == lazy(lambda: 10)
assert lazy(lambda: 10) != lazy(lambda: 20)
def test__reduce__(self):
x = lazy(double, 4)
y = pickle.loads(pickle.dumps(x))
assert x() == y()
def test__deepcopy__(self):
from copy import deepcopy
x = lazy(double, 4)
y = deepcopy(x)
assert x._fun == y._fun
assert x._args == y._args
assert x() == y()
@pytest.mark.parametrize('obj,expected', [
(lazy(lambda: 10), 10),
(20, 20),
])
def test_maybe_evaluate(obj, expected):
assert maybe_evaluate(obj) == expected
class test_retry_over_time:
class Predicate(Exception):
pass
def setup(self):
self.index = 0
def myfun(self):
if self.index < 9:
raise self.Predicate()
return 42
def errback(self, exc, intervals, retries):
interval = next(intervals)
sleepvals = (None, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 16.0)
self.index += 1
assert interval == sleepvals[self.index]
return interval
@mock.sleepdeprived(module=utils)
def test_simple(self):
prev_count, utils.count = utils.count, Mock()
try:
utils.count.return_value = list(range(1))
x = retry_over_time(self.myfun, self.Predicate,
errback=None, interval_max=14)
assert x is None
utils.count.return_value = list(range(10))
cb = Mock()
x = retry_over_time(self.myfun, self.Predicate,
errback=self.errback, callback=cb,
interval_max=14)
assert x == 42
assert self.index == 9
cb.assert_called_with()
finally:
utils.count = prev_count
def test_retry_timeout(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=self.errback, interval_max=14, timeout=1
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=None, timeout=1,
)
@mock.sleepdeprived(module=utils)
def test_retry_zero(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=self.errback, interval_max=14,
)
assert self.index == 0
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_once(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=self.errback, interval_max=14,
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_always(self):
Predicate = self.Predicate
class Fun:
def __init__(self):
self.calls = 0
def __call__(self, *args, **kwargs):
try:
if self.calls >= 10:
return 42
raise Predicate()
finally:
self.calls += 1
fun = Fun()
assert retry_over_time(
fun, self.Predicate,
max_retries=None, errback=None, interval_max=14) == 42
assert fun.calls == 11
@pytest.mark.parametrize('obj,expected', [
(None, None),
(1, [1]),
([1, 2, 3], [1, 2, 3]),
])
def test_maybe_list(obj, expected):
assert maybe_list(obj) == expected
def test_fxrange__no_repeatlast():
assert list(fxrange(1.0, 3.0, 1.0)) == [1.0, 2.0, 3.0]
@pytest.mark.parametrize('args,expected', [
((1.0, 3.0, 1.0, 30.0),
[1.0, 2.0, 3.0, 3.0, 3.0, 3.0,
3.0, 3.0, 3.0, 3.0, 3.0]),
((1.0, None, 1.0, 30.0),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]),
])
def test_fxrangemax(args, expected):
assert list(fxrangemax(*args)) == expected
def test_reprkwargs():
assert reprkwargs({'foo': 'bar', 1: 2, 'k': 'v'})
def test_reprcall():
assert reprcall('add', (2, 2), {'copy': True})
class test_accepts_arg:
def function(self, foo, bar, baz="baz"):
pass
def test_valid_argument(self):
assert accepts_argument(self.function, 'self')
assert accepts_argument(self.function, 'foo')
assert accepts_argument(self.function, 'baz')
def test_invalid_argument(self):
assert not accepts_argument(self.function, 'random_argument')
def test_raise_exception(self):
with pytest.raises(Exception):
accepts_argument(None, 'foo')
|
import unittest
from trashcli.fs import FileSystemReader
from trashcli.fs import mkdirs
from trashcli.fs import has_sticky_bit
from .files import require_empty_dir, make_empty_file, set_sticky_bit
import os
class TestWithInSandbox:
def test_mkdirs_with_default_mode(self):
mkdirs("sandbox/test-dir/sub-dir")
assert os.path.isdir("sandbox/test-dir/sub-dir")
def test_has_sticky_bit_returns_true(self):
make_empty_file("sandbox/sticky")
run('chmod +t sandbox/sticky')
assert has_sticky_bit('sandbox/sticky')
def test_has_sticky_bit_returns_false(self):
make_empty_file("sandbox/non-sticky")
run('chmod -t sandbox/non-sticky')
assert not has_sticky_bit("sandbox/non-sticky")
def setUp(self):
require_empty_dir('sandbox')
is_sticky_dir=FileSystemReader().is_sticky_dir
class Test_is_sticky_dir(unittest.TestCase):
def test_dir_non_sticky(self):
mkdirs('sandbox/dir'); assert not is_sticky_dir('sandbox/dir')
def test_dir_sticky(self):
mkdirs('sandbox/dir'); set_sticky_bit('sandbox/dir')
assert is_sticky_dir('sandbox/dir')
def test_non_dir_but_sticky(self):
make_empty_file('sandbox/dir');
set_sticky_bit('sandbox/dir')
assert not is_sticky_dir('sandbox/dir')
def setUp(self):
require_empty_dir('sandbox')
def run(command):
import subprocess
assert subprocess.call(command.split()) == 0
|
from django.conf import settings
from weblate.machinery.base import (
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
YOUDAO_API_ROOT = "https://openapi.youdao.com/api"
class YoudaoTranslation(MachineTranslation):
"""Youdao Zhiyun API machine translation support."""
name = "Youdao Zhiyun"
max_score = 90
# Map codes used by Youdao to codes used by Weblate
language_map = {"zh_Hans": "zh-CHS", "zh": "zh-CHS", "en": "EN"}
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_YOUDAO_ID is None:
raise MissingConfiguration("Youdao Translate requires app key")
if settings.MT_YOUDAO_SECRET is None:
raise MissingConfiguration("Youdao Translate requires app secret")
def download_languages(self):
"""List of supported languages."""
return [
"zh-CHS",
"ja",
"EN", # Officially youdao uses uppercase for en
"ko",
"fr",
"ru",
"pt",
"es",
"vi",
"de",
"ar",
"id",
]
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
salt, sign = self.signed_salt(
settings.MT_YOUDAO_ID, settings.MT_YOUDAO_SECRET, text
)
response = self.request(
"get",
YOUDAO_API_ROOT,
params={
"q": text,
"_from": source,
"to": language,
"appKey": settings.MT_YOUDAO_ID,
"salt": salt,
"sign": sign,
},
)
payload = response.json()
if int(payload["errorCode"]) != 0:
raise MachineTranslationError("Error code: {}".format(payload["errorCode"]))
translation = payload["translation"][0]
yield {
"text": translation,
"quality": self.max_score,
"service": self.name,
"source": text,
}
|
from miio import DeviceException
from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.components.xiaomi_miio import config_flow, const
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN
from tests.async_mock import Mock, patch
ZEROCONF_NAME = "name"
ZEROCONF_PROP = "properties"
ZEROCONF_MAC = "mac"
TEST_HOST = "1.2.3.4"
TEST_TOKEN = "12345678901234567890123456789012"
TEST_NAME = "Test_Gateway"
TEST_MODEL = "model5"
TEST_MAC = "ab:cd:ef:gh:ij:kl"
TEST_GATEWAY_ID = TEST_MAC
TEST_HARDWARE_VERSION = "AB123"
TEST_FIRMWARE_VERSION = "1.2.3_456"
TEST_ZEROCONF_NAME = "lumi-gateway-v3_miio12345678._miio._udp.local."
TEST_SUB_DEVICE_LIST = []
def get_mock_info(
model=TEST_MODEL,
mac_address=TEST_MAC,
hardware_version=TEST_HARDWARE_VERSION,
firmware_version=TEST_FIRMWARE_VERSION,
):
"""Return a mock gateway info instance."""
gateway_info = Mock()
gateway_info.model = model
gateway_info.mac_address = mac_address
gateway_info.hardware_version = hardware_version
gateway_info.firmware_version = firmware_version
return gateway_info
async def test_config_flow_step_user_no_device(hass):
"""Test config flow, user step with no device selected."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "no_device_selected"}
async def test_config_flow_step_gateway_connect_error(hass):
"""Test config flow, gateway connection error."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{config_flow.CONF_GATEWAY: True},
)
assert result["type"] == "form"
assert result["step_id"] == "gateway"
assert result["errors"] == {}
with patch(
"homeassistant.components.xiaomi_miio.gateway.gateway.Gateway.info",
side_effect=DeviceException({}),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: TEST_HOST, CONF_NAME: TEST_NAME, CONF_TOKEN: TEST_TOKEN},
)
assert result["type"] == "form"
assert result["step_id"] == "gateway"
assert result["errors"] == {"base": "cannot_connect"}
async def test_config_flow_gateway_success(hass):
"""Test a successful config flow."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{config_flow.CONF_GATEWAY: True},
)
assert result["type"] == "form"
assert result["step_id"] == "gateway"
assert result["errors"] == {}
mock_info = get_mock_info()
with patch(
"homeassistant.components.xiaomi_miio.gateway.gateway.Gateway.info",
return_value=mock_info,
), patch(
"homeassistant.components.xiaomi_miio.gateway.gateway.Gateway.discover_devices",
return_value=TEST_SUB_DEVICE_LIST,
), patch(
"homeassistant.components.xiaomi_miio.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: TEST_HOST, CONF_NAME: TEST_NAME, CONF_TOKEN: TEST_TOKEN},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
config_flow.CONF_FLOW_TYPE: config_flow.CONF_GATEWAY,
CONF_HOST: TEST_HOST,
CONF_TOKEN: TEST_TOKEN,
"model": TEST_MODEL,
"mac": TEST_MAC,
}
async def test_zeroconf_gateway_success(hass):
"""Test a successful zeroconf discovery of a gateway."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
zeroconf.ATTR_HOST: TEST_HOST,
ZEROCONF_NAME: TEST_ZEROCONF_NAME,
ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC},
},
)
assert result["type"] == "form"
assert result["step_id"] == "gateway"
assert result["errors"] == {}
mock_info = get_mock_info()
with patch(
"homeassistant.components.xiaomi_miio.gateway.gateway.Gateway.info",
return_value=mock_info,
), patch(
"homeassistant.components.xiaomi_miio.gateway.gateway.Gateway.discover_devices",
return_value=TEST_SUB_DEVICE_LIST,
), patch(
"homeassistant.components.xiaomi_miio.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_NAME: TEST_NAME, CONF_TOKEN: TEST_TOKEN},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
config_flow.CONF_FLOW_TYPE: config_flow.CONF_GATEWAY,
CONF_HOST: TEST_HOST,
CONF_TOKEN: TEST_TOKEN,
"model": TEST_MODEL,
"mac": TEST_MAC,
}
async def test_zeroconf_unknown_device(hass):
"""Test a failed zeroconf discovery because of a unknown device."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
zeroconf.ATTR_HOST: TEST_HOST,
ZEROCONF_NAME: "not-a-xiaomi-miio-device",
ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC},
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_xiaomi_miio"
async def test_zeroconf_no_data(hass):
"""Test a failed zeroconf discovery because of no data."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}, data={}
)
assert result["type"] == "abort"
assert result["reason"] == "not_xiaomi_miio"
async def test_zeroconf_missing_data(hass):
"""Test a failed zeroconf discovery because of missing data."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={zeroconf.ATTR_HOST: TEST_HOST, ZEROCONF_NAME: TEST_ZEROCONF_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "not_xiaomi_miio"
|
import io
import logging
import re
from zipfile import ZipFile
from babelfish import Language, language_converters
from guessit import guessit
from requests import Session
from . import ParserBeautifulSoup, Provider
from ..cache import EPISODE_EXPIRATION_TIME, SHOW_EXPIRATION_TIME, region
from ..exceptions import ProviderError
from ..matches import guess_matches
from ..subtitle import Subtitle, fix_line_ending
from ..video import Episode
logger = logging.getLogger(__name__)
language_converters.register('tvsubtitles = subliminal.converters.tvsubtitles:TVsubtitlesConverter')
link_re = re.compile(r'^(?P<series>.+?)(?: \(?\d{4}\)?| \((?:US|UK)\))? \((?P<first_year>\d{4})-\d{4}\)$')
episode_id_re = re.compile(r'^episode-\d+\.html$')
class TVsubtitlesSubtitle(Subtitle):
"""TVsubtitles Subtitle."""
provider_name = 'tvsubtitles'
def __init__(self, language, page_link, subtitle_id, series, season, episode, year, rip, release):
super(TVsubtitlesSubtitle, self).__init__(language, page_link=page_link)
self.subtitle_id = subtitle_id
self.series = series
self.season = season
self.episode = episode
self.year = year
self.rip = rip
self.release = release
@property
def id(self):
return str(self.subtitle_id)
@property
def info(self):
return self.release or self.rip
def get_matches(self, video):
matches = guess_matches(video, {
'title': self.series,
'season': self.season,
'episode': self.episode,
'year': self.year,
'release_group': self.release
})
# other properties
if self.release:
matches |= guess_matches(video, guessit(self.release, {'type': 'episode'}), partial=True)
if self.rip:
matches |= guess_matches(video, guessit(self.rip, {'type': 'episode'}), partial=True)
return matches
class TVsubtitlesProvider(Provider):
"""TVsubtitles Provider."""
languages = {Language('por', 'BR')} | {Language(l) for l in [
'ara', 'bul', 'ces', 'dan', 'deu', 'ell', 'eng', 'fin', 'fra', 'hun', 'ita', 'jpn', 'kor', 'nld', 'pol', 'por',
'ron', 'rus', 'spa', 'swe', 'tur', 'ukr', 'zho'
]}
video_types = (Episode,)
server_url = 'http://www.tvsubtitles.net/'
subtitle_class = TVsubtitlesSubtitle
def __init__(self):
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
def terminate(self):
self.session.close()
@region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME)
def search_show_id(self, series, year=None):
"""Search the show id from the `series` and `year`.
:param str series: series of the episode.
:param year: year of the series, if any.
:type year: int
:return: the show id, if any.
:rtype: int
"""
# make the search
logger.info('Searching show id for %r', series)
r = self.session.post(self.server_url + 'search.php', data={'q': series}, timeout=10)
r.raise_for_status()
# get the series out of the suggestions
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
show_id = None
for suggestion in soup.select('div.left li div a[href^="/tvshow-"]'):
match = link_re.match(suggestion.text)
if not match:
logger.error('Failed to match %s', suggestion.text)
continue
if match.group('series').lower() == series.lower():
if year is not None and int(match.group('first_year')) != year:
logger.debug('Year does not match')
continue
show_id = int(suggestion['href'][8:-5])
logger.debug('Found show id %d', show_id)
break
return show_id
@region.cache_on_arguments(expiration_time=EPISODE_EXPIRATION_TIME)
def get_episode_ids(self, show_id, season):
"""Get episode ids from the show id and the season.
:param int show_id: show id.
:param int season: season of the episode.
:return: episode ids per episode number.
:rtype: dict
"""
# get the page of the season of the show
logger.info('Getting the page of show id %d, season %d', show_id, season)
r = self.session.get(self.server_url + 'tvshow-%d-%d.html' % (show_id, season), timeout=10)
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
# loop over episode rows
episode_ids = {}
for row in soup.select('table#table5 tr'):
# skip rows that do not have a link to the episode page
if not row('a', href=episode_id_re):
continue
# extract data from the cells
cells = row('td')
episode = int(cells[0].text.split('x')[1])
episode_id = int(cells[1].a['href'][8:-5])
episode_ids[episode] = episode_id
if episode_ids:
logger.debug('Found episode ids %r', episode_ids)
else:
logger.warning('No episode ids found')
return episode_ids
def query(self, show_id, series, season, episode, year=None):
# get the episode ids
episode_ids = self.get_episode_ids(show_id, season)
if episode not in episode_ids:
logger.error('Episode %d not found', episode)
return []
# get the episode page
logger.info('Getting the page for episode %d', episode_ids[episode])
r = self.session.get(self.server_url + 'episode-%d.html' % episode_ids[episode], timeout=10)
soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser'])
# loop over subtitles rows
subtitles = []
for row in soup.select('.subtitlen'):
# read the item
language = Language.fromtvsubtitles(row.h5.img['src'][13:-4])
subtitle_id = int(row.parent['href'][10:-5])
page_link = self.server_url + 'subtitle-%d.html' % subtitle_id
rip = row.find('p', title='rip').text.strip() or None
release = row.find('h5').text.strip() or None
subtitle = self.subtitle_class(language, page_link, subtitle_id, series, season, episode, year, rip,
release)
logger.debug('Found subtitle %s', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
# lookup show_id
titles = [video.series] + video.alternative_series
show_id = None
for title in titles:
show_id = self.search_show_id(title, video.year)
if show_id is not None:
break
# query for subtitles with the show_id
if show_id is not None:
subtitles = [s for s in self.query(show_id, title, video.season, video.episode, video.year)
if s.language in languages and s.episode == video.episode]
if subtitles:
return subtitles
else:
logger.error('No show id found for %r (%r)', video.series, {'year': video.year})
return []
def download_subtitle(self, subtitle):
# download as a zip
logger.info('Downloading subtitle %r', subtitle)
r = self.session.get(self.server_url + 'download-%d.html' % subtitle.subtitle_id, timeout=10)
r.raise_for_status()
# open the zip
with ZipFile(io.BytesIO(r.content)) as zf:
if len(zf.namelist()) > 1:
raise ProviderError('More than one file to unzip')
subtitle.content = fix_line_ending(zf.read(zf.namelist()[0]))
|
import pytest
from homeassistant.components import switch
from homeassistant.components.mochad import switch as mochad
from homeassistant.setup import async_setup_component
import tests.async_mock as mock
@pytest.fixture(autouse=True)
def pymochad_mock():
"""Mock pymochad."""
with mock.patch("homeassistant.components.mochad.switch.device"), mock.patch(
"homeassistant.components.mochad.switch.MochadException"
):
yield
@pytest.fixture
def switch_mock(hass):
"""Mock switch."""
controller_mock = mock.MagicMock()
dev_dict = {"address": "a1", "name": "fake_switch"}
return mochad.MochadSwitch(hass, controller_mock, dev_dict)
async def test_setup_adds_proper_devices(hass):
"""Test if setup adds devices."""
good_config = {
"mochad": {},
"switch": {
"platform": "mochad",
"devices": [{"name": "Switch1", "address": "a1"}],
},
}
assert await async_setup_component(hass, switch.DOMAIN, good_config)
async def test_name(switch_mock):
"""Test the name."""
assert "fake_switch" == switch_mock.name
async def test_turn_on(switch_mock):
"""Test turn_on."""
switch_mock.turn_on()
switch_mock.switch.send_cmd.assert_called_once_with("on")
async def test_turn_off(switch_mock):
"""Test turn_off."""
switch_mock.turn_off()
switch_mock.switch.send_cmd.assert_called_once_with("off")
|
from itertools import chain
from warnings import warn
import numpy as np
import numbers
from pgmpy.estimators import ParameterEstimator
from pgmpy.factors.discrete import TabularCPD
from pgmpy.models import BayesianModel
class BayesianEstimator(ParameterEstimator):
def __init__(self, model, data, **kwargs):
"""
Class used to compute parameters for a model using Bayesian Parameter Estimation.
See `MaximumLikelihoodEstimator` for constructor parameters.
"""
if not isinstance(model, BayesianModel):
raise NotImplementedError(
"Bayesian Parameter Estimation is only implemented for BayesianModel"
)
super(BayesianEstimator, self).__init__(model, data, **kwargs)
def get_parameters(
self, prior_type="BDeu", equivalent_sample_size=5, pseudo_counts=None
):
"""
Method to estimate the model parameters (CPDs).
Parameters
----------
prior_type: 'dirichlet', 'BDeu', or 'K2'
string indicting which type of prior to use for the model parameters.
- If 'prior_type' is 'dirichlet', the following must be provided:
'pseudo_counts' = dirichlet hyperparameters; a single number or a dict containing, for each
variable, a 2-D array of the shape (node_card, product of parents_card) with a "virtual"
count for each variable state in the CPD, that is added to the state counts.
(lexicographic ordering of states assumed)
- If 'prior_type' is 'BDeu', then an 'equivalent_sample_size'
must be specified instead of 'pseudo_counts'. This is equivalent to
'prior_type=dirichlet' and using uniform 'pseudo_counts' of
`equivalent_sample_size/(node_cardinality*np.prod(parents_cardinalities))` for each node.
'equivalent_sample_size' can either be a numerical value or a dict that specifies
the size for each variable separately.
- A prior_type of 'K2' is a shorthand for 'dirichlet' + setting every pseudo_count to 1,
regardless of the cardinality of the variable.
Returns
-------
parameters: list
List of TabularCPDs, one for each variable of the model
Examples
--------
>>> import numpy as np
>>> import pandas as pd
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.estimators import BayesianEstimator
>>> values = pd.DataFrame(np.random.randint(low=0, high=2, size=(1000, 4)),
... columns=['A', 'B', 'C', 'D'])
>>> model = BayesianModel([('A', 'B'), ('C', 'B'), ('C', 'D')])
>>> estimator = BayesianEstimator(model, values)
>>> estimator.get_parameters(prior_type='BDeu', equivalent_sample_size=5)
[<TabularCPD representing P(C:2) at 0x7f7b534251d0>,
<TabularCPD representing P(B:2 | C:2, A:2) at 0x7f7b4dfd4da0>,
<TabularCPD representing P(A:2) at 0x7f7b4dfd4fd0>,
<TabularCPD representing P(D:2 | C:2) at 0x7f7b4df822b0>]
"""
parameters = []
for node in self.model.nodes():
_equivalent_sample_size = (
equivalent_sample_size[node]
if isinstance(equivalent_sample_size, dict)
else equivalent_sample_size
)
if isinstance(pseudo_counts, numbers.Real):
_pseudo_counts = pseudo_counts
else:
_pseudo_counts = pseudo_counts[node] if pseudo_counts else None
cpd = self.estimate_cpd(
node,
prior_type=prior_type,
equivalent_sample_size=_equivalent_sample_size,
pseudo_counts=_pseudo_counts,
)
parameters.append(cpd)
return parameters
def estimate_cpd(
self, node, prior_type="BDeu", pseudo_counts=[], equivalent_sample_size=5
):
"""
Method to estimate the CPD for a given variable.
Parameters
----------
node: int, string (any hashable python object)
The name of the variable for which the CPD is to be estimated.
prior_type: 'dirichlet', 'BDeu', 'K2',
string indicting which type of prior to use for the model parameters.
- If 'prior_type' is 'dirichlet', the following must be provided:
'pseudo_counts' = dirichlet hyperparameters; a single number or 2-D array
of shape (node_card, product of parents_card) with a "virtual" count for
each variable state in the CPD. The virtual counts are added to the
actual state counts found in the data. (if a list is provided, a
lexicographic ordering of states is assumed)
- If 'prior_type' is 'BDeu', then an 'equivalent_sample_size'
must be specified instead of 'pseudo_counts'. This is equivalent to
'prior_type=dirichlet' and using uniform 'pseudo_counts' of
`equivalent_sample_size/(node_cardinality*np.prod(parents_cardinalities))`.
- A prior_type of 'K2' is a shorthand for 'dirichlet' + setting every
pseudo_count to 1, regardless of the cardinality of the variable.
Returns
-------
CPD: TabularCPD
Examples
--------
>>> import pandas as pd
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.estimators import BayesianEstimator
>>> data = pd.DataFrame(data={'A': [0, 0, 1], 'B': [0, 1, 0], 'C': [1, 1, 0]})
>>> model = BayesianModel([('A', 'C'), ('B', 'C')])
>>> estimator = BayesianEstimator(model, data)
>>> cpd_C = estimator.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=[1, 2])
>>> print(cpd_C)
╒══════╤══════╤══════╤══════╤════════════════════╕
│ A │ A(0) │ A(0) │ A(1) │ A(1) │
├──────┼──────┼──────┼──────┼────────────────────┤
│ B │ B(0) │ B(1) │ B(0) │ B(1) │
├──────┼──────┼──────┼──────┼────────────────────┤
│ C(0) │ 0.25 │ 0.25 │ 0.5 │ 0.3333333333333333 │
├──────┼──────┼──────┼──────┼────────────────────┤
│ C(1) │ 0.75 │ 0.75 │ 0.5 │ 0.6666666666666666 │
╘══════╧══════╧══════╧══════╧════════════════════╛
"""
node_cardinality = len(self.state_names[node])
parents = sorted(self.model.get_parents(node))
parents_cardinalities = [len(self.state_names[parent]) for parent in parents]
cpd_shape = (node_cardinality, np.prod(parents_cardinalities, dtype=int))
prior_type = prior_type.lower()
# Throw a warning if pseudo_count is specified without prior_type=dirichlet
if (pseudo_counts != []) and (prior_type != "dirichlet"):
warn(
f"pseudo count specified with {prior_type} prior. It will be ignored, use dirichlet prior for specifying pseudo_counts"
)
if prior_type == "k2":
pseudo_counts = np.ones(cpd_shape, dtype=int)
elif prior_type == "bdeu":
alpha = float(equivalent_sample_size) / (
node_cardinality * np.prod(parents_cardinalities)
)
pseudo_counts = np.ones(cpd_shape, dtype=float) * alpha
elif prior_type == "dirichlet":
if isinstance(pseudo_counts, numbers.Real):
pseudo_counts = np.ones(cpd_shape, dtype=int) * pseudo_counts
else:
pseudo_counts = np.array(pseudo_counts)
if pseudo_counts.shape != cpd_shape:
raise ValueError(
f"The shape of pseudo_counts for the node: {node} must be of shape: {str(cpd_shape)}"
)
else:
raise ValueError("'prior_type' not specified")
state_counts = self.state_counts(node)
bayesian_counts = state_counts + pseudo_counts
cpd = TabularCPD(
node,
node_cardinality,
np.array(bayesian_counts),
evidence=parents,
evidence_card=parents_cardinalities,
state_names={var: self.state_names[var] for var in chain([node], parents)},
)
cpd.normalize()
return cpd
|
import copy
from datetime import timedelta
import json
from hatasmota.utils import (
get_topic_stat_status,
get_topic_tele_sensor,
get_topic_tele_will,
)
import pytest
from homeassistant import config_entries
from homeassistant.components import sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNKNOWN
from homeassistant.util import dt
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message, async_fire_time_changed
DEFAULT_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"DHT11": {"Temperature": None},
"TempUnit": "C",
}
}
INDEXED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"TotalStartTime": "2018-11-23T15:33:47",
"Total": 0.017,
"TotalTariff": [0.000, 0.017],
"Yesterday": 0.000,
"Today": 0.002,
"ExportActive": 0.000,
"ExportTariff": [0.000, 0.000],
"Period": 0.00,
"Power": 0.00,
"ApparentPower": 7.84,
"ReactivePower": -7.21,
"Factor": 0.39,
"Frequency": 50.0,
"Voltage": 234.31,
"Current": 0.039,
"ImportActive": 12.580,
"ImportReactive": 0.002,
"ExportReactive": 39.131,
"PhaseAngle": 290.45,
},
}
}
NESTED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-03-03T00:00:00+00:00",
"TX23": {
"Speed": {"Act": 14.8, "Avg": 8.5, "Min": 12.2, "Max": 14.8},
"Dir": {
"Card": "WSW",
"Deg": 247.5,
"Avg": 266.1,
"AvgCard": "W",
"Range": 0,
},
},
"SpeedUnit": "km/h",
}
}
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"DHT11":{"Temperature":20.5}}'
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.5"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS8",
'{"StatusSNS":{"DHT11":{"Temperature":20.0}}}',
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.0"
async def test_nested_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"TX23":{"Speed":{"Act":"12.3"}}}'
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "12.3"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS8",
'{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"}}}}',
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "23.4"
async def test_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(INDEXED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"TotalTariff":[1.2,3.4]}}'
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "3.4"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS8",
'{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}',
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "7.8"
@pytest.mark.parametrize("status_sensor_disabled", [False])
async def test_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
entity_reg = await hass.helpers.entity_registry.async_get_registry()
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_status_signal",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test pushed state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"Wifi":{"Signal":20.5}}'
)
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.5"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS11",
'{"StatusSTS":{"Wifi":{"Signal":20.0}}}',
)
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.0"
async def test_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"DHT11": {"Temperature": None},
"Beer": {"CarbonDioxide": None},
"TempUnit": "C",
}
}
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.attributes.get("device_class") == "temperature"
assert state.attributes.get("friendly_name") == "Tasmota DHT11 Temperature"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "C"
state = hass.states.get("sensor.tasmota_beer_CarbonDioxide")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota Beer CarbonDioxide"
assert state.attributes.get("icon") == "mdi:molecule-co2"
assert state.attributes.get("unit_of_measurement") == "ppm"
async def test_nested_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Speed Act"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "km/h"
state = hass.states.get("sensor.tasmota_tx23_dir_avg")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Dir Avg"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == " "
async def test_indexed_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"Dummy1": {"Temperature": [None, None]},
"Dummy2": {"CarbonDioxide": [None, None]},
"TempUnit": "C",
}
}
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dummy1_temperature_0")
assert state.attributes.get("device_class") == "temperature"
assert state.attributes.get("friendly_name") == "Tasmota Dummy1 Temperature 0"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "C"
state = hass.states.get("sensor.tasmota_dummy2_carbondioxide_1")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota Dummy2 CarbonDioxide 1"
assert state.attributes.get("icon") == "mdi:molecule-co2"
assert state.attributes.get("unit_of_measurement") == "ppm"
@pytest.mark.parametrize("status_sensor_disabled", [False])
async def test_enable_status_sensor(hass, mqtt_mock, setup_tasmota):
"""Test enabling status sensor."""
entity_reg = await hass.helpers.entity_registry.async_get_registry()
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_signal")
assert state is None
entry = entity_reg.async_get("sensor.tasmota_signal")
assert entry.disabled
assert entry.disabled_by == "integration"
# Enable the status sensor
updated_entry = entity_reg.async_update_entity(
"sensor.tasmota_signal", disabled_by=None
)
assert updated_entry != entry
assert updated_entry.disabled is False
await hass.async_block_till_done()
async_fire_time_changed(
hass,
dt.utcnow()
+ timedelta(
seconds=config_entries.EntityRegistryDisabledHandler.RELOAD_AFTER_UPDATE_DELAY
+ 1
),
)
await hass.async_block_till_done()
# Fake re-send of retained discovery message
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_signal")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_signal")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability_when_connection_lost(
hass,
mqtt_client_mock,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_availability_discovery_update(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
poll_topic = "tasmota_49A3BC/cmnd/STATUS"
await help_test_availability_poll_state(
hass,
mqtt_client_mock,
mqtt_mock,
sensor.DOMAIN,
config,
poll_topic,
"8",
sensor_config,
)
async def test_discovery_removal_sensor(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config1 = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_discovery_removal(
hass,
mqtt_mock,
caplog,
sensor.DOMAIN,
config,
config,
sensor_config1,
{},
"tasmota_dht11_temperature",
"Tasmota DHT11 Temperature",
)
async def test_discovery_update_unchanged_sensor(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test update of discovered sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
with patch(
"homeassistant.components.tasmota.sensor.TasmotaSensor.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass,
mqtt_mock,
caplog,
sensor.DOMAIN,
config,
discovery_update,
sensor_config,
"tasmota_dht11_temperature",
"Tasmota DHT11 Temperature",
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
unique_id = f"{DEFAULT_CONFIG['mac']}_sensor_sensor_DHT11_Temperature"
await help_test_discovery_device_remove(
hass, mqtt_mock, sensor.DOMAIN, unique_id, config, sensor_config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
topics = [
get_topic_tele_sensor(config),
get_topic_stat_status(config, 8),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
topics,
sensor_config,
"tasmota_dht11_temperature",
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
await help_test_entity_id_update_discovery_update(
hass,
mqtt_mock,
sensor.DOMAIN,
config,
sensor_config,
"tasmota_dht11_temperature",
)
|
import posixpath
from perfkitbenchmarker import data
from perfkitbenchmarker import linux_packages
# TODO: Make collection interval configurable.
INTERVAL = 10
SCRIPT_NAME = 'build_collectd.sh.j2'
COLLECTD_URL = ('https://github.com/collectd/collectd/archive/'
'collectd-5.5.0.tar.gz')
BUILD_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'collectd-build')
CSV_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'collectd-csv')
PREFIX = posixpath.join(linux_packages.INSTALL_DIR, 'collectd')
PID_FILE = posixpath.join(PREFIX, 'var', 'run', 'collectd.pid')
def _Install(vm):
context = {
'collectd_url': COLLECTD_URL,
'build_dir': BUILD_DIR,
'root_dir': PREFIX,
'csv_dir': CSV_DIR,
'interval': INTERVAL}
remote_path = posixpath.join(
linux_packages.INSTALL_DIR,
posixpath.splitext(posixpath.basename(SCRIPT_NAME))[0])
vm.RenderTemplate(data.ResourcePath(SCRIPT_NAME),
remote_path, context=context)
vm.RemoteCommand('bash ' + remote_path)
def _Uninstall(vm):
vm.RemoteCommand('kill $(cat {0})'.format(PID_FILE), ignore_failure=True)
def YumInstall(vm):
"""Installs collectd on 'vm'."""
_Install(vm)
def AptInstall(vm):
"""Installs collectd on 'vm'."""
_Install(vm)
def AptUninstall(vm):
"""Stops collectd on 'vm'."""
_Uninstall(vm)
def YumUninstall(vm):
"""Stops collectd on 'vm'."""
_Uninstall(vm)
|
import pytest
from unittest.mock import Mock
from kombu import Connection, Consumer, Exchange, Producer, Queue
from kombu.message import Message
from kombu.transport.base import (
StdChannel, Transport, Management, to_rabbitmq_queue_arguments,
)
@pytest.mark.parametrize('args,input,expected', [
({}, {'message_ttl': 20}, {'x-message-ttl': 20000}),
({}, {'message_ttl': None}, {}),
({'foo': 'bar'}, {'expires': 30.3}, {'x-expires': 30300, 'foo': 'bar'}),
({'x-expires': 3}, {'expires': 4}, {'x-expires': 4000}),
({}, {'max_length': 10}, {'x-max-length': 10}),
({}, {'max_length_bytes': 1033}, {'x-max-length-bytes': 1033}),
({}, {'max_priority': 303}, {'x-max-priority': 303}),
])
def test_rabbitmq_queue_arguments(args, input, expected):
assert to_rabbitmq_queue_arguments(args, **input) == expected
class test_StdChannel:
def setup(self):
self.conn = Connection('memory://')
self.channel = self.conn.channel()
self.channel.queues.clear()
self.conn.connection.state.clear()
def test_Consumer(self):
q = Queue('foo', Exchange('foo'))
cons = self.channel.Consumer(q)
assert isinstance(cons, Consumer)
assert cons.channel is self.channel
def test_Producer(self):
prod = self.channel.Producer()
assert isinstance(prod, Producer)
assert prod.channel is self.channel
def test_interface_get_bindings(self):
with pytest.raises(NotImplementedError):
StdChannel().get_bindings()
def test_interface_after_reply_message_received(self):
assert StdChannel().after_reply_message_received(Queue('foo')) is None
class test_Message:
def setup(self):
self.conn = Connection('memory://')
self.channel = self.conn.channel()
self.message = Message(channel=self.channel, delivery_tag=313)
def test_postencode(self):
m = Message('FOO', channel=self.channel, postencode='ccyzz')
with pytest.raises(LookupError):
m._reraise_error()
m.ack()
def test_ack_respects_no_ack_consumers(self):
self.channel.no_ack_consumers = {'abc'}
self.message.delivery_info['consumer_tag'] = 'abc'
ack = self.channel.basic_ack = Mock()
self.message.ack()
assert self.message._state != 'ACK'
ack.assert_not_called()
def test_ack_missing_consumer_tag(self):
self.channel.no_ack_consumers = {'abc'}
self.message.delivery_info = {}
ack = self.channel.basic_ack = Mock()
self.message.ack()
ack.assert_called_with(self.message.delivery_tag, multiple=False)
def test_ack_not_no_ack(self):
self.channel.no_ack_consumers = set()
self.message.delivery_info['consumer_tag'] = 'abc'
ack = self.channel.basic_ack = Mock()
self.message.ack()
ack.assert_called_with(self.message.delivery_tag, multiple=False)
def test_ack_log_error_when_no_error(self):
ack = self.message.ack = Mock()
self.message.ack_log_error(Mock(), KeyError)
ack.assert_called_with(multiple=False)
def test_ack_log_error_when_error(self):
ack = self.message.ack = Mock()
ack.side_effect = KeyError('foo')
logger = Mock()
self.message.ack_log_error(logger, KeyError)
ack.assert_called_with(multiple=False)
logger.critical.assert_called()
assert "Couldn't ack" in logger.critical.call_args[0][0]
def test_reject_log_error_when_no_error(self):
reject = self.message.reject = Mock()
self.message.reject_log_error(Mock(), KeyError, requeue=True)
reject.assert_called_with(requeue=True)
def test_reject_log_error_when_error(self):
reject = self.message.reject = Mock()
reject.side_effect = KeyError('foo')
logger = Mock()
self.message.reject_log_error(logger, KeyError)
reject.assert_called_with(requeue=False)
logger.critical.assert_called()
assert "Couldn't reject" in logger.critical.call_args[0][0]
class test_interface:
def test_establish_connection(self):
with pytest.raises(NotImplementedError):
Transport(None).establish_connection()
def test_close_connection(self):
with pytest.raises(NotImplementedError):
Transport(None).close_connection(None)
def test_create_channel(self):
with pytest.raises(NotImplementedError):
Transport(None).create_channel(None)
def test_close_channel(self):
with pytest.raises(NotImplementedError):
Transport(None).close_channel(None)
def test_drain_events(self):
with pytest.raises(NotImplementedError):
Transport(None).drain_events(None)
def test_heartbeat_check(self):
Transport(None).heartbeat_check(Mock(name='connection'))
def test_driver_version(self):
assert Transport(None).driver_version()
def test_register_with_event_loop(self):
Transport(None).register_with_event_loop(
Mock(name='connection'), Mock(name='loop'),
)
def test_unregister_from_event_loop(self):
Transport(None).unregister_from_event_loop(
Mock(name='connection'), Mock(name='loop'),
)
def test_manager(self):
assert Transport(None).manager
class test_Management:
def test_get_bindings(self):
m = Management(Mock(name='transport'))
with pytest.raises(NotImplementedError):
m.get_bindings()
|
import threading
from json import loads, dumps
from queue import Empty
from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm import sessionmaker
from kombu.transport import virtual
from kombu.utils import cached_property
from kombu.utils.encoding import bytes_to_str
from .models import (ModelBase, Queue as QueueBase, Message as MessageBase,
class_registry, metadata)
VERSION = (1, 1, 0)
__version__ = '.'.join(map(str, VERSION))
_MUTEX = threading.RLock()
class Channel(virtual.Channel):
"""The channel class."""
_session = None
_engines = {} # engine cache
def __init__(self, connection, **kwargs):
self._configure_entity_tablenames(connection.client.transport_options)
super().__init__(connection, **kwargs)
def _configure_entity_tablenames(self, opts):
self.queue_tablename = opts.get('queue_tablename', 'kombu_queue')
self.message_tablename = opts.get('message_tablename', 'kombu_message')
#
# Define the model definitions. This registers the declarative
# classes with the active SQLAlchemy metadata object. This *must* be
# done prior to the ``create_engine`` call.
#
self.queue_cls and self.message_cls
def _engine_from_config(self):
conninfo = self.connection.client
transport_options = conninfo.transport_options.copy()
transport_options.pop('queue_tablename', None)
transport_options.pop('message_tablename', None)
return create_engine(conninfo.hostname, **transport_options)
def _open(self):
conninfo = self.connection.client
if conninfo.hostname not in self._engines:
with _MUTEX:
if conninfo.hostname in self._engines:
# Engine was created while we were waiting to
# acquire the lock.
return self._engines[conninfo.hostname]
engine = self._engine_from_config()
Session = sessionmaker(bind=engine)
metadata.create_all(engine)
self._engines[conninfo.hostname] = engine, Session
return self._engines[conninfo.hostname]
@property
def session(self):
if self._session is None:
_, Session = self._open()
self._session = Session()
return self._session
def _get_or_create(self, queue):
obj = self.session.query(self.queue_cls) \
.filter(self.queue_cls.name == queue).first()
if not obj:
with _MUTEX:
obj = self.session.query(self.queue_cls) \
.filter(self.queue_cls.name == queue).first()
if obj:
# Queue was created while we were waiting to
# acquire the lock.
return obj
obj = self.queue_cls(queue)
self.session.add(obj)
try:
self.session.commit()
except OperationalError:
self.session.rollback()
return obj
def _new_queue(self, queue, **kwargs):
self._get_or_create(queue)
def _put(self, queue, payload, **kwargs):
obj = self._get_or_create(queue)
message = self.message_cls(dumps(payload), obj)
self.session.add(message)
try:
self.session.commit()
except OperationalError:
self.session.rollback()
def _get(self, queue):
obj = self._get_or_create(queue)
if self.session.bind.name == 'sqlite':
self.session.execute('BEGIN IMMEDIATE TRANSACTION')
try:
msg = self.session.query(self.message_cls) \
.with_for_update() \
.filter(self.message_cls.queue_id == obj.id) \
.filter(self.message_cls.visible != False) \
.order_by(self.message_cls.sent_at) \
.order_by(self.message_cls.id) \
.limit(1) \
.first()
if msg:
msg.visible = False
return loads(bytes_to_str(msg.payload))
raise Empty()
finally:
self.session.commit()
def _query_all(self, queue):
obj = self._get_or_create(queue)
return self.session.query(self.message_cls) \
.filter(self.message_cls.queue_id == obj.id)
def _purge(self, queue):
count = self._query_all(queue).delete(synchronize_session=False)
try:
self.session.commit()
except OperationalError:
self.session.rollback()
return count
def _size(self, queue):
return self._query_all(queue).count()
def _declarative_cls(self, name, base, ns):
if name not in class_registry:
with _MUTEX:
if name in class_registry:
# Class was registered while we were waiting to
# acquire the lock.
return class_registry[name]
return type(str(name), (base, ModelBase), ns)
return class_registry[name]
@cached_property
def queue_cls(self):
return self._declarative_cls(
'Queue',
QueueBase,
{'__tablename__': self.queue_tablename}
)
@cached_property
def message_cls(self):
return self._declarative_cls(
'Message',
MessageBase,
{'__tablename__': self.message_tablename}
)
class Transport(virtual.Transport):
"""The transport class."""
Channel = Channel
can_parse_url = True
default_port = 0
driver_type = 'sql'
driver_name = 'sqlalchemy'
connection_errors = (OperationalError, )
def driver_version(self):
import sqlalchemy
return sqlalchemy.__version__
|
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from cmsplugin_cascade.bootstrap4.mixins import BootstrapUtilities
CASCADE_PLUGINS = getattr(settings, 'SHOP_CASCADE_PLUGINS',
['auth', 'breadcrumb', 'catalog', 'cart', 'checkout', 'extensions', 'order', 'processbar', 'search']
)
def set_defaults(config):
config.setdefault('plugins_with_extra_mixins', {})
config.setdefault('plugins_with_extra_render_templates', {})
config['plugins_with_extra_mixins'].setdefault('ShopReorderButtonPlugin', BootstrapUtilities(
BootstrapUtilities.margins, BootstrapUtilities.floats,
))
config['plugins_with_extra_mixins'].setdefault('ShopCancelOrderButtonPlugin', BootstrapUtilities(
BootstrapUtilities.margins, BootstrapUtilities.floats,
))
config['plugins_with_extra_mixins'].setdefault('ShopProceedButton', BootstrapUtilities(
BootstrapUtilities.margins, BootstrapUtilities.floats,
))
config['plugins_with_extra_mixins'].setdefault('ShopLeftExtension', BootstrapUtilities(
BootstrapUtilities.paddings,
))
config['plugins_with_extra_mixins'].setdefault('ShopRightExtension', BootstrapUtilities(
BootstrapUtilities.paddings,
))
config['plugins_with_extra_mixins'].setdefault('ShopAddToCartPlugin', BootstrapUtilities(
BootstrapUtilities.margins,
))
config['plugins_with_extra_mixins'].setdefault('BootstrapButtonPlugin', BootstrapUtilities(
BootstrapUtilities.floats,
))
config['plugins_with_extra_render_templates'].setdefault('BootstrapButtonPlugin', [
('shop/button.html', _("Responsive Feedback")),
('cascade/bootstrap4/button.html', _("Default")),
])
|
import unittest
from urwid import text_layout
from urwid.compat import B
import urwid
class CalcBreaksTest(object):
def cbtest(self, width, exp):
result = text_layout.default_layout.calculate_text_segments(
B(self.text), width, self.mode )
assert len(result) == len(exp), repr((result, exp))
for l,e in zip(result, exp):
end = l[-1][-1]
assert end == e, repr((result,exp))
def test(self):
for width, exp in self.do:
self.cbtest( width, exp )
class CalcBreaksCharTest(CalcBreaksTest, unittest.TestCase):
mode = 'any'
text = "abfghsdjf askhtrvs\naltjhgsdf ljahtshgf"
# tests
do = [
( 100, [18,38] ),
( 6, [6, 12, 18, 25, 31, 37, 38] ),
( 10, [10, 18, 29, 38] ),
]
class CalcBreaksDBCharTest(CalcBreaksTest, unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
mode = 'any'
text = "abfgh\xA1\xA1j\xA1\xA1xskhtrvs\naltjhgsdf\xA1\xA1jahtshgf"
# tests
do = [
( 10, [10, 18, 28, 38] ),
( 6, [5, 11, 17, 18, 25, 31, 37, 38] ),
( 100, [18, 38]),
]
class CalcBreaksWordTest(CalcBreaksTest, unittest.TestCase):
mode = 'space'
text = "hello world\nout there. blah"
# tests
do = [
( 10, [5, 11, 22, 27] ),
( 5, [5, 11, 17, 22, 27] ),
( 100, [11, 27] ),
]
class CalcBreaksWordTest2(CalcBreaksTest, unittest.TestCase):
mode = 'space'
text = "A simple set of words, really...."
do = [
( 10, [8, 15, 22, 33]),
( 17, [15, 33]),
( 13, [12, 22, 33]),
]
class CalcBreaksDBWordTest(CalcBreaksTest, unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
mode = 'space'
text = "hel\xA1\xA1 world\nout-\xA1\xA1tre blah"
# tests
do = [
( 10, [5, 11, 21, 26] ),
( 5, [5, 11, 16, 21, 26] ),
( 100, [11, 26] ),
]
class CalcBreaksUTF8Test(CalcBreaksTest, unittest.TestCase):
def setUp(self):
urwid.set_encoding("utf-8")
mode = 'space'
text = '\xe6\x9b\xbf\xe6\xb4\xbc\xe6\xb8\x8e\xe6\xba\x8f\xe6\xbd\xba'
do = [
(4, [6, 12, 15] ),
(10, [15] ),
(5, [6, 12, 15] ),
]
class CalcBreaksCantDisplayTest(unittest.TestCase):
def test(self):
urwid.set_encoding("euc-jp")
self.assertRaises(text_layout.CanNotDisplayText,
text_layout.default_layout.calculate_text_segments,
B('\xA1\xA1'), 1, 'space' )
urwid.set_encoding("utf-8")
self.assertRaises(text_layout.CanNotDisplayText,
text_layout.default_layout.calculate_text_segments,
B('\xe9\xa2\x96'), 1, 'space' )
class SubsegTest(unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
def st(self, seg, text, start, end, exp):
text = B(text)
s = urwid.LayoutSegment(seg)
result = s.subseg( text, start, end )
assert result == exp, "Expected %r, got %r"%(exp,result)
def test1_padding(self):
self.st( (10, None), "", 0, 8, [(8, None)] )
self.st( (10, None), "", 2, 10, [(8, None)] )
self.st( (10, 0), "", 3, 7, [(4, 0)] )
self.st( (10, 0), "", 0, 20, [(10, 0)] )
def test2_text(self):
self.st( (10, 0, B("1234567890")), "", 0, 8, [(8,0,B("12345678"))] )
self.st( (10, 0, B("1234567890")), "", 2, 10, [(8,0,B("34567890"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 2, 8,
[(6, 0, B("\xA1\xA156\xA1\xA1"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 3, 8,
[(5, 0, B(" 56\xA1\xA1"))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 2, 7,
[(5, 0, B("\xA1\xA156 "))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 3, 7,
[(4, 0, B(" 56 "))] )
self.st( (10, 0, B("12\xA1\xA156\xA1\xA190")), "", 0, 20,
[(10, 0, B("12\xA1\xA156\xA1\xA190"))] )
def test3_range(self):
t = "1234567890"
self.st( (10, 0, 10), t, 0, 8, [(8, 0, 8)] )
self.st( (10, 0, 10), t, 2, 10, [(8, 2, 10)] )
self.st( (6, 2, 8), t, 1, 6, [(5, 3, 8)] )
self.st( (6, 2, 8), t, 0, 5, [(5, 2, 7)] )
self.st( (6, 2, 8), t, 1, 5, [(4, 3, 7)] )
t = "12\xA1\xA156\xA1\xA190"
self.st( (10, 0, 10), t, 0, 8, [(8, 0, 8)] )
self.st( (10, 0, 10), t, 2, 10, [(8, 2, 10)] )
self.st( (6, 2, 8), t, 1, 6, [(1, 3), (4, 4, 8)] )
self.st( (6, 2, 8), t, 0, 5, [(4, 2, 6), (1, 6)] )
self.st( (6, 2, 8), t, 1, 5, [(1, 3), (2, 4, 6), (1, 6)] )
class CalcTranslateTest(object):
def setUp(self):
urwid.set_encoding("utf-8")
def test1_left(self):
result = urwid.default_layout.layout( self.text,
self.width, 'left', self.mode)
assert result == self.result_left, result
def test2_right(self):
result = urwid.default_layout.layout( self.text,
self.width, 'right', self.mode)
assert result == self.result_right, result
def test3_center(self):
result = urwid.default_layout.layout( self.text,
self.width, 'center', self.mode)
assert result == self.result_center, result
class CalcTranslateCharTest(CalcTranslateTest, unittest.TestCase):
text = "It's out of control!\nYou've got to"
mode = 'any'
width = 15
result_left = [
[(15, 0, 15)],
[(5, 15, 20), (0, 20)],
[(13, 21, 34), (0, 34)]]
result_right = [
[(15, 0, 15)],
[(10, None), (5, 15, 20), (0,20)],
[(2, None), (13, 21, 34), (0,34)]]
result_center = [
[(15, 0, 15)],
[(5, None), (5, 15, 20), (0,20)],
[(1, None), (13, 21, 34), (0,34)]]
class CalcTranslateWordTest(CalcTranslateTest, unittest.TestCase):
text = "It's out of control!\nYou've got to"
mode = 'space'
width = 14
result_left = [
[(11, 0, 11), (0, 11)],
[(8, 12, 20), (0, 20)],
[(13, 21, 34), (0, 34)]]
result_right = [
[(3, None), (11, 0, 11), (0, 11)],
[(6, None), (8, 12, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)]]
result_center = [
[(2, None), (11, 0, 11), (0, 11)],
[(3, None), (8, 12, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)]]
class CalcTranslateWordTest2(CalcTranslateTest, unittest.TestCase):
text = "It's out of control!\nYou've got to "
mode = 'space'
width = 14
result_left = [
[(11, 0, 11), (0, 11)],
[(8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
result_right = [
[(3, None), (11, 0, 11), (0, 11)],
[(6, None), (8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
result_center = [
[(2, None), (11, 0, 11), (0, 11)],
[(3, None), (8, 12, 20), (0, 20)],
[(14, 21, 35), (0, 35)]]
class CalcTranslateWordTest3(CalcTranslateTest, unittest.TestCase):
def setUp(self):
urwid.set_encoding('utf-8')
text = B('\xe6\x9b\xbf\xe6\xb4\xbc\n\xe6\xb8\x8e\xe6\xba\x8f\xe6\xbd\xba')
width = 10
mode = 'space'
result_left = [
[(4, 0, 6), (0, 6)],
[(6, 7, 16), (0, 16)]]
result_right = [
[(6, None), (4, 0, 6), (0, 6)],
[(4, None), (6, 7, 16), (0, 16)]]
result_center = [
[(3, None), (4, 0, 6), (0, 6)],
[(2, None), (6, 7, 16), (0, 16)]]
class CalcTranslateWordTest4(CalcTranslateTest, unittest.TestCase):
text = ' Die Gedank'
width = 3
mode = 'space'
result_left = [
[(0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
result_right = [
[(3, None), (0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
result_center = [
[(2, None), (0, 0)],
[(3, 1, 4), (0, 4)],
[(3, 5, 8)],
[(3, 8, 11), (0, 11)]]
class CalcTranslateWordTest5(CalcTranslateTest, unittest.TestCase):
text = ' Word.'
width = 3
mode = 'space'
result_left = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
result_right = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
result_center = [[(3, 0, 3)], [(3, 3, 6), (0, 6)]]
class CalcTranslateClipTest(CalcTranslateTest, unittest.TestCase):
text = "It's out of control!\nYou've got to\n\nturn it off!!!"
mode = 'clip'
width = 14
result_left = [
[(20, 0, 20), (0, 20)],
[(13, 21, 34), (0, 34)],
[(0, 35)],
[(14, 36, 50), (0, 50)]]
result_right = [
[(-6, None), (20, 0, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)],
[(14, None), (0, 35)],
[(14, 36, 50), (0, 50)]]
result_center = [
[(-3, None), (20, 0, 20), (0, 20)],
[(1, None), (13, 21, 34), (0, 34)],
[(7, None), (0, 35)],
[(14, 36, 50), (0, 50)]]
class CalcTranslateCantDisplayTest(CalcTranslateTest, unittest.TestCase):
text = B('Hello\xe9\xa2\x96')
mode = 'space'
width = 1
result_left = [[]]
result_right = [[]]
result_center = [[]]
class CalcPosTest(unittest.TestCase):
def setUp(self):
self.text = "A" * 27
self.trans = [
[(2,None),(7,0,7),(0,7)],
[(13,8,21),(0,21)],
[(3,None),(5,22,27),(0,27)]]
self.mytests = [(1,0, 0), (2,0, 0), (11,0, 7),
(-3,1, 8), (-2,1, 8), (1,1, 9), (31,1, 21),
(1,2, 22), (11,2, 27) ]
def tests(self):
for x,y, expected in self.mytests:
got = text_layout.calc_pos( self.text, self.trans, x, y )
assert got == expected, "%r got:%r expected:%r" % ((x, y), got,
expected)
class Pos2CoordsTest(unittest.TestCase):
pos_list = [5, 9, 20, 26]
text = "1234567890" * 3
mytests = [
( [[(15,0,15)], [(15,15,30),(0,30)]],
[(5,0),(9,0),(5,1),(11,1)] ),
( [[(9,0,9)], [(12,9,21)], [(9,21,30),(0,30)]],
[(5,0),(0,1),(11,1),(5,2)] ),
( [[(2,None), (15,0,15)], [(2,None), (15,15,30),(0,30)]],
[(7,0),(11,0),(7,1),(13,1)] ),
( [[(3, 6, 9),(0,9)], [(5, 20, 25),(0,25)]],
[(0,0),(3,0),(0,1),(5,1)] ),
( [[(10, 0, 10),(0,10)]],
[(5,0),(9,0),(10,0),(10,0)] ),
]
def test(self):
for t, answer in self.mytests:
for pos,a in zip(self.pos_list,answer) :
r = text_layout.calc_coords( self.text, t, pos)
assert r==a, "%r got: %r expected: %r"%(t,r,a)
|
from homeassistant.const import SERVICE_RELOAD
from homeassistant.helpers.reload import async_reload_integration_platforms
from .const import DOMAIN, EVENT_TEMPLATE_RELOADED, PLATFORMS
async def async_setup_reload_service(hass):
"""Create the reload service for the template domain."""
if hass.services.has_service(DOMAIN, SERVICE_RELOAD):
return
async def _reload_config(call):
"""Reload the template platform config."""
await async_reload_integration_platforms(hass, DOMAIN, PLATFORMS)
hass.bus.async_fire(EVENT_TEMPLATE_RELOADED, context=call.context)
hass.helpers.service.async_register_admin_service(
DOMAIN, SERVICE_RELOAD, _reload_config
)
|
import logging
from datetime import datetime, timedelta
from pymongo.errors import OperationFailure
logger = logging.getLogger(__name__)
CACHE_COLL = 'cache'
CACHE_DB = 'meta_db'
CACHE_SETTINGS = 'settings'
CACHE_SETTINGS_KEY = 'cache'
"""
Sample cache_settings collection entry:
meta_db.cache_settings.insertOne({"type": "cache", "enabled": true, "cache_expiry": 600})
meta_db.cache_settings.find(): { "_id" : ObjectId("5cd5388b9fddfbe6e968f11b"), "type": "cache", "enabled" : false, "cache_expiry" : 600 }
"""
DEFAULT_CACHE_EXPIRY = 3600
class Cache:
def __init__(self, client, cache_expiry=DEFAULT_CACHE_EXPIRY, cache_db=CACHE_DB, cache_col=CACHE_COLL):
self._client = client
self._cachedb = client[cache_db]
self._cachecol = None
try:
if cache_col not in self._cachedb.list_collection_names():
self._cachedb.create_collection(cache_col).create_index("date", expireAfterSeconds=cache_expiry)
except OperationFailure as op:
logging.debug("This is fine if you are not admin. The collection should already be created for you: %s", op)
self._cachecol = self._cachedb[cache_col]
def _get_cache_settings(self):
try:
return self._cachedb[CACHE_SETTINGS].find_one({'type': CACHE_SETTINGS_KEY})
except OperationFailure as op:
logging.debug("Cannot access %s in db: %s. Error: %s" % (CACHE_SETTINGS, CACHE_DB, op))
return None
def set_caching_state(self, enabled):
"""
Used to enable or disable the caching globally
:return:
"""
if not isinstance(enabled, bool):
logging.error("Enabled should be a boolean type.")
return
if CACHE_SETTINGS not in self._cachedb.list_collection_names():
logging.info("Creating %s collection for cache settings" % CACHE_SETTINGS)
self._cachedb[CACHE_SETTINGS].insert_one({
'type': CACHE_SETTINGS_KEY,
'enabled': enabled,
'cache_expiry': DEFAULT_CACHE_EXPIRY
})
else:
self._cachedb[CACHE_SETTINGS].update_one({'type': CACHE_SETTINGS_KEY}, {'$set': {'enabled': enabled}})
logging.info("Caching set to: %s" % enabled)
def _is_not_expired(self, cached_data, newer_than_secs):
# Use the expiry period in the settings (or the default) if not overriden by the function argument.
if newer_than_secs:
expiry_period = newer_than_secs
else:
cache_settings = self._get_cache_settings()
expiry_period = cache_settings['cache_expiry'] if cache_settings else DEFAULT_CACHE_EXPIRY
return datetime.utcnow() < cached_data['date'] + timedelta(seconds=expiry_period)
def get(self, key, newer_than_secs=None):
"""
:param key: Key for the dataset. eg. list_libraries.
:param newer_than_secs: None to indicate use cache if available. Used to indicate what level of staleness
in seconds is tolerable.
:return: None unless if there is non stale data present in the cache.
"""
try:
if not self._cachecol:
# Collection not created or no permissions to read from it.
return None
cached_data = self._cachecol.find_one({"type": key})
# Check that there is data in cache and it's not stale.
if cached_data and self._is_not_expired(cached_data, newer_than_secs):
return cached_data['data']
except OperationFailure as op:
logging.warning("Could not read from cache due to: %s. Ask your admin to give read permissions on %s:%s",
op, CACHE_DB, CACHE_COLL)
return None
def set(self, key, data):
try:
self._cachecol.update_one(
{"type": key},
{"$set": {"type": key, "date": datetime.utcnow(), "data": data}},
upsert=True
)
except OperationFailure as op:
logging.debug("This operation is to be run with admin permissions. Should be fine: %s", op)
def append(self, key, append_data):
try:
self._cachecol.update_one(
{'type': key},
{
# Add to set will not add the same library again to the list unlike set.
'$addToSet': {'data': append_data},
'$setOnInsert': {'type': key, 'date': datetime.utcnow()}
},
upsert=True
)
except OperationFailure as op:
logging.debug("Admin is required to append to the cache: %s", op)
def delete_item_from_key(self, key, item):
try:
self._cachecol.update(
{'type': key},
{"$pull": {"data": item}}
)
except OperationFailure as op:
logging.debug("Admin is required to remove from cache: %s", op)
def update_item_for_key(self, key, old, new):
# This op is not atomic, but given the rarity of renaming a lib, it should not cause issues.
self.delete_item_from_key(key, old)
self.append(key, new)
def is_caching_enabled(self, cache_enabled_in_env):
cache_settings = self._get_cache_settings()
# Caching is enabled unless explicitly disabled. Can be disabled either by an env variable or config in mongo.
if cache_settings and not cache_settings['enabled']:
return False
# Disabling from Mongo Setting take precedence over this env variable
if not cache_enabled_in_env:
return False
return True
|
import logging
from typing import Optional
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_UNIT_SYSTEM_IMPERIAL,
LENGTH_KILOMETERS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.util import dt
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from .const import (
ATTR_ACTIVITY,
ATTR_DISTANCE,
ATTR_EXTERNAL_ID,
ATTR_HAZARDS,
DEFAULT_ICON,
DOMAIN,
FEED,
)
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_UPDATE = "feed_last_update"
ATTR_LAST_UPDATE_SUCCESSFUL = "feed_last_update_successful"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the GeoNet NZ Volcano Feed platform."""
manager = hass.data[DOMAIN][FEED][entry.entry_id]
@callback
def async_add_sensor(feed_manager, external_id, unit_system):
"""Add sensor entity from feed."""
new_entity = GeonetnzVolcanoSensor(
entry.entry_id, feed_manager, external_id, unit_system
)
_LOGGER.debug("Adding sensor %s", new_entity)
async_add_entities([new_entity], True)
manager.listeners.append(
async_dispatcher_connect(
hass, manager.async_event_new_entity(), async_add_sensor
)
)
hass.async_create_task(manager.async_update())
_LOGGER.debug("Sensor setup done")
class GeonetnzVolcanoSensor(Entity):
"""This represents an external event with GeoNet NZ Volcano feed data."""
def __init__(self, config_entry_id, feed_manager, external_id, unit_system):
"""Initialize entity with data from feed entry."""
self._config_entry_id = config_entry_id
self._feed_manager = feed_manager
self._external_id = external_id
self._unit_system = unit_system
self._title = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._alert_level = None
self._activity = None
self._hazards = None
self._feed_last_update = None
self._feed_last_update_successful = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_update = async_dispatcher_connect(
self.hass,
f"geonetnz_volcano_update_{self._external_id}",
self._update_callback,
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
if self._remove_signal_update:
self._remove_signal_update()
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for GeoNet NZ Volcano feed location events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
last_update = self._feed_manager.last_update()
last_update_successful = self._feed_manager.last_update_successful()
if feed_entry:
self._update_from_feed(feed_entry, last_update, last_update_successful)
def _update_from_feed(self, feed_entry, last_update, last_update_successful):
"""Update the internal state from the provided feed entry."""
self._title = feed_entry.title
# Convert distance if not metric system.
if self._unit_system == CONF_UNIT_SYSTEM_IMPERIAL:
self._distance = round(
IMPERIAL_SYSTEM.length(feed_entry.distance_to_home, LENGTH_KILOMETERS),
1,
)
else:
self._distance = round(feed_entry.distance_to_home, 1)
self._latitude = round(feed_entry.coordinates[0], 5)
self._longitude = round(feed_entry.coordinates[1], 5)
self._attribution = feed_entry.attribution
self._alert_level = feed_entry.alert_level
self._activity = feed_entry.activity
self._hazards = feed_entry.hazards
self._feed_last_update = dt.as_utc(last_update) if last_update else None
self._feed_last_update_successful = (
dt.as_utc(last_update_successful) if last_update_successful else None
)
@property
def state(self):
"""Return the state of the sensor."""
return self._alert_level
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return DEFAULT_ICON
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return f"Volcano {self._title}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "alert level"
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_ATTRIBUTION, self._attribution),
(ATTR_ACTIVITY, self._activity),
(ATTR_HAZARDS, self._hazards),
(ATTR_LONGITUDE, self._longitude),
(ATTR_LATITUDE, self._latitude),
(ATTR_DISTANCE, self._distance),
(ATTR_LAST_UPDATE, self._feed_last_update),
(ATTR_LAST_UPDATE_SUCCESSFUL, self._feed_last_update_successful),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
|
import time
import functools
from hashlib import md5
from urllib.request import parse_http_list, parse_keqv_list
import cherrypy
from cherrypy._cpcompat import ntob, tonative
__author__ = 'visteya'
__date__ = 'April 2009'
def md5_hex(s):
return md5(ntob(s, 'utf-8')).hexdigest()
qop_auth = 'auth'
qop_auth_int = 'auth-int'
valid_qops = (qop_auth, qop_auth_int)
valid_algorithms = ('MD5', 'MD5-sess')
FALLBACK_CHARSET = 'ISO-8859-1'
DEFAULT_CHARSET = 'UTF-8'
def TRACE(msg):
cherrypy.log(msg, context='TOOLS.AUTH_DIGEST')
# Three helper functions for users of the tool, providing three variants
# of get_ha1() functions for three different kinds of credential stores.
def get_ha1_dict_plain(user_password_dict):
"""Returns a get_ha1 function which obtains a plaintext password from a
dictionary of the form: {username : password}.
If you want a simple dictionary-based authentication scheme, with plaintext
passwords, use get_ha1_dict_plain(my_userpass_dict) as the value for the
get_ha1 argument to digest_auth().
"""
def get_ha1(realm, username):
password = user_password_dict.get(username)
if password:
return md5_hex('%s:%s:%s' % (username, realm, password))
return None
return get_ha1
def get_ha1_dict(user_ha1_dict):
"""Returns a get_ha1 function which obtains a HA1 password hash from a
dictionary of the form: {username : HA1}.
If you want a dictionary-based authentication scheme, but with
pre-computed HA1 hashes instead of plain-text passwords, use
get_ha1_dict(my_userha1_dict) as the value for the get_ha1
argument to digest_auth().
"""
def get_ha1(realm, username):
return user_ha1_dict.get(username)
return get_ha1
def get_ha1_file_htdigest(filename):
"""Returns a get_ha1 function which obtains a HA1 password hash from a
flat file with lines of the same format as that produced by the Apache
htdigest utility. For example, for realm 'wonderland', username 'alice',
and password '4x5istwelve', the htdigest line would be::
alice:wonderland:3238cdfe91a8b2ed8e39646921a02d4c
If you want to use an Apache htdigest file as the credentials store,
then use get_ha1_file_htdigest(my_htdigest_file) as the value for the
get_ha1 argument to digest_auth(). It is recommended that the filename
argument be an absolute path, to avoid problems.
"""
def get_ha1(realm, username):
result = None
f = open(filename, 'r')
for line in f:
u, r, ha1 = line.rstrip().split(':')
if u == username and r == realm:
result = ha1
break
f.close()
return result
return get_ha1
def synthesize_nonce(s, key, timestamp=None):
"""Synthesize a nonce value which resists spoofing and can be checked
for staleness. Returns a string suitable as the value for 'nonce' in
the www-authenticate header.
s
A string related to the resource, such as the hostname of the server.
key
A secret string known only to the server.
timestamp
An integer seconds-since-the-epoch timestamp
"""
if timestamp is None:
timestamp = int(time.time())
h = md5_hex('%s:%s:%s' % (timestamp, s, key))
nonce = '%s:%s' % (timestamp, h)
return nonce
def H(s):
"""The hash function H"""
return md5_hex(s)
def _try_decode_header(header, charset):
global FALLBACK_CHARSET
for enc in (charset, FALLBACK_CHARSET):
try:
return tonative(ntob(tonative(header, 'latin1'), 'latin1'), enc)
except ValueError as ve:
last_err = ve
else:
raise last_err
class HttpDigestAuthorization(object):
"""
Parses a Digest Authorization header and performs
re-calculation of the digest.
"""
scheme = 'digest'
def errmsg(self, s):
return 'Digest Authorization header: %s' % s
@classmethod
def matches(cls, header):
scheme, _, _ = header.partition(' ')
return scheme.lower() == cls.scheme
def __init__(
self, auth_header, http_method,
debug=False, accept_charset=DEFAULT_CHARSET[:],
):
self.http_method = http_method
self.debug = debug
if not self.matches(auth_header):
raise ValueError('Authorization scheme is not "Digest"')
self.auth_header = _try_decode_header(auth_header, accept_charset)
scheme, params = self.auth_header.split(' ', 1)
# make a dict of the params
items = parse_http_list(params)
paramsd = parse_keqv_list(items)
self.realm = paramsd.get('realm')
self.username = paramsd.get('username')
self.nonce = paramsd.get('nonce')
self.uri = paramsd.get('uri')
self.method = paramsd.get('method')
self.response = paramsd.get('response') # the response digest
self.algorithm = paramsd.get('algorithm', 'MD5').upper()
self.cnonce = paramsd.get('cnonce')
self.opaque = paramsd.get('opaque')
self.qop = paramsd.get('qop') # qop
self.nc = paramsd.get('nc') # nonce count
# perform some correctness checks
if self.algorithm not in valid_algorithms:
raise ValueError(
self.errmsg("Unsupported value for algorithm: '%s'" %
self.algorithm))
has_reqd = (
self.username and
self.realm and
self.nonce and
self.uri and
self.response
)
if not has_reqd:
raise ValueError(
self.errmsg('Not all required parameters are present.'))
if self.qop:
if self.qop not in valid_qops:
raise ValueError(
self.errmsg("Unsupported value for qop: '%s'" % self.qop))
if not (self.cnonce and self.nc):
raise ValueError(
self.errmsg('If qop is sent then '
'cnonce and nc MUST be present'))
else:
if self.cnonce or self.nc:
raise ValueError(
self.errmsg('If qop is not sent, '
'neither cnonce nor nc can be present'))
def __str__(self):
return 'authorization : %s' % self.auth_header
def validate_nonce(self, s, key):
"""Validate the nonce.
Returns True if nonce was generated by synthesize_nonce() and the
timestamp is not spoofed, else returns False.
s
A string related to the resource, such as the hostname of
the server.
key
A secret string known only to the server.
Both s and key must be the same values which were used to synthesize
the nonce we are trying to validate.
"""
try:
timestamp, hashpart = self.nonce.split(':', 1)
s_timestamp, s_hashpart = synthesize_nonce(
s, key, timestamp).split(':', 1)
is_valid = s_hashpart == hashpart
if self.debug:
TRACE('validate_nonce: %s' % is_valid)
return is_valid
except ValueError: # split() error
pass
return False
def is_nonce_stale(self, max_age_seconds=600):
"""Returns True if a validated nonce is stale. The nonce contains a
timestamp in plaintext and also a secure hash of the timestamp.
You should first validate the nonce to ensure the plaintext
timestamp is not spoofed.
"""
try:
timestamp, hashpart = self.nonce.split(':', 1)
if int(timestamp) + max_age_seconds > int(time.time()):
return False
except ValueError: # int() error
pass
if self.debug:
TRACE('nonce is stale')
return True
def HA2(self, entity_body=''):
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
# RFC 2617 3.2.2.3
# If the "qop" directive's value is "auth" or is unspecified,
# then A2 is:
# A2 = method ":" digest-uri-value
#
# If the "qop" value is "auth-int", then A2 is:
# A2 = method ":" digest-uri-value ":" H(entity-body)
if self.qop is None or self.qop == 'auth':
a2 = '%s:%s' % (self.http_method, self.uri)
elif self.qop == 'auth-int':
a2 = '%s:%s:%s' % (self.http_method, self.uri, H(entity_body))
else:
# in theory, this should never happen, since I validate qop in
# __init__()
raise ValueError(self.errmsg('Unrecognized value for qop!'))
return H(a2)
def request_digest(self, ha1, entity_body=''):
"""Calculates the Request-Digest. See :rfc:`2617` section 3.2.2.1.
ha1
The HA1 string obtained from the credentials store.
entity_body
If 'qop' is set to 'auth-int', then A2 includes a hash
of the "entity body". The entity body is the part of the
message which follows the HTTP headers. See :rfc:`2617` section
4.3. This refers to the entity the user agent sent in the
request which has the Authorization header. Typically GET
requests don't have an entity, and POST requests do.
"""
ha2 = self.HA2(entity_body)
# Request-Digest -- RFC 2617 3.2.2.1
if self.qop:
req = '%s:%s:%s:%s:%s' % (
self.nonce, self.nc, self.cnonce, self.qop, ha2)
else:
req = '%s:%s' % (self.nonce, ha2)
# RFC 2617 3.2.2.2
#
# If the "algorithm" directive's value is "MD5" or is unspecified,
# then A1 is:
# A1 = unq(username-value) ":" unq(realm-value) ":" passwd
#
# If the "algorithm" directive's value is "MD5-sess", then A1 is
# calculated only once - on the first request by the client following
# receipt of a WWW-Authenticate challenge from the server.
# A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
# ":" unq(nonce-value) ":" unq(cnonce-value)
if self.algorithm == 'MD5-sess':
ha1 = H('%s:%s:%s' % (ha1, self.nonce, self.cnonce))
digest = H('%s:%s' % (ha1, req))
return digest
def _get_charset_declaration(charset):
global FALLBACK_CHARSET
charset = charset.upper()
return (
(', charset="%s"' % charset)
if charset != FALLBACK_CHARSET
else ''
)
def www_authenticate(
realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
stale=False, accept_charset=DEFAULT_CHARSET[:],
):
"""Constructs a WWW-Authenticate header for Digest authentication."""
if qop not in valid_qops:
raise ValueError("Unsupported value for qop: '%s'" % qop)
if algorithm not in valid_algorithms:
raise ValueError("Unsupported value for algorithm: '%s'" % algorithm)
HEADER_PATTERN = (
'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"%s%s'
)
if nonce is None:
nonce = synthesize_nonce(realm, key)
stale_param = ', stale="true"' if stale else ''
charset_declaration = _get_charset_declaration(accept_charset)
return HEADER_PATTERN % (
realm, nonce, algorithm, qop, stale_param, charset_declaration,
)
def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
"""A CherryPy tool that hooks at before_handler to perform
HTTP Digest Access Authentication, as specified in :rfc:`2617`.
If the request has an 'authorization' header with a 'Digest' scheme,
this tool authenticates the credentials supplied in that header.
If the request has no 'authorization' header, or if it does but the
scheme is not "Digest", or if authentication fails, the tool sends
a 401 response with a 'WWW-Authenticate' Digest header.
realm
A string containing the authentication realm.
get_ha1
A callable that looks up a username in a credentials store
and returns the HA1 string, which is defined in the RFC to be
MD5(username : realm : password). The function's signature is:
``get_ha1(realm, username)``
where username is obtained from the request's 'authorization' header.
If username is not found in the credentials store, get_ha1() returns
None.
key
A secret string known only to the server, used in the synthesis
of nonces.
"""
request = cherrypy.serving.request
auth_header = request.headers.get('authorization')
respond_401 = functools.partial(
_respond_401, realm, key, accept_charset, debug)
if not HttpDigestAuthorization.matches(auth_header or ''):
respond_401()
msg = 'The Authorization header could not be parsed.'
with cherrypy.HTTPError.handle(ValueError, 400, msg):
auth = HttpDigestAuthorization(
auth_header, request.method,
debug=debug, accept_charset=accept_charset,
)
if debug:
TRACE(str(auth))
if not auth.validate_nonce(realm, key):
respond_401()
ha1 = get_ha1(realm, auth.username)
if ha1 is None:
respond_401()
# note that for request.body to be available we need to
# hook in at before_handler, not on_start_resource like
# 3.1.x digest_auth does.
digest = auth.request_digest(ha1, entity_body=request.body)
if digest != auth.response:
respond_401()
# authenticated
if debug:
TRACE('digest matches auth.response')
# Now check if nonce is stale.
# The choice of ten minutes' lifetime for nonce is somewhat
# arbitrary
if auth.is_nonce_stale(max_age_seconds=600):
respond_401(stale=True)
request.login = auth.username
if debug:
TRACE('authentication of %s successful' % auth.username)
def _respond_401(realm, key, accept_charset, debug, **kwargs):
"""
Respond with 401 status and a WWW-Authenticate header
"""
header = www_authenticate(
realm, key,
accept_charset=accept_charset,
**kwargs
)
if debug:
TRACE(header)
cherrypy.serving.response.headers['WWW-Authenticate'] = header
raise cherrypy.HTTPError(
401, 'You are not authorized to access that resource')
|
from bson.json_util import dumps, loads
from bson.objectid import ObjectId
from flask import Blueprint, request, Response
from app.commons import build_response
from app.commons.utils import update_document
from app.entities.models import Entity
entities_blueprint = Blueprint('entities_blueprint', __name__,
url_prefix='/entities')
@entities_blueprint.route('/', methods=['POST'])
def create_entity():
"""
Create a story from the provided json
:return:
"""
content = request.get_json(silent=True)
entity = Entity()
entity.name = content.get("name")
entity.entity_values = []
try:
entity_id = entity.save()
except Exception as e:
return build_response.build_json({"error": str(e)})
return build_response.build_json({
"_id": str(entity_id.id)
})
@entities_blueprint.route('/')
def read_entities():
"""
find list of entities
:return:
"""
intents = Entity.objects.only('name', 'id')
return build_response.sent_json(intents.to_json())
@entities_blueprint.route('/<id>')
def read_entity(id):
"""
Find details for the given entity name
:param id:
:return:
"""
return Response(
response=dumps(Entity.objects.get(
id=ObjectId(id)).to_mongo().to_dict()),
status=200, mimetype="application/json")
@entities_blueprint.route('/<id>', methods=['PUT'])
def update_entity(id):
"""
Update a story from the provided json
:param id:
:return:
"""
json_data = loads(request.get_data())
entity = Entity.objects.get(id=ObjectId(id))
entity = update_document(entity, json_data)
entity.save()
return build_response.sent_ok()
@entities_blueprint.route('/<id>', methods=['DELETE'])
def delete_entity(id):
"""
Delete a intent
:param id:
:return:
"""
Entity.objects.get(id=ObjectId(id)).delete()
return build_response.sent_ok()
|
import asyncio
from homeassistant.components import cloud, notify as hass_notify
from homeassistant.components.webhook import (
async_register as webhook_register,
async_unregister as webhook_unregister,
)
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.helpers import device_registry as dr, discovery
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
ATTR_DEVICE_ID,
ATTR_DEVICE_NAME,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_OS_VERSION,
CONF_CLOUDHOOK_URL,
DATA_BINARY_SENSOR,
DATA_CONFIG_ENTRIES,
DATA_DELETED_IDS,
DATA_DEVICES,
DATA_SENSOR,
DATA_STORE,
DOMAIN,
STORAGE_KEY,
STORAGE_VERSION,
)
from .helpers import savable_state
from .http_api import RegistrationsView
from .webhook import handle_webhook
PLATFORMS = "sensor", "binary_sensor", "device_tracker"
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the mobile app component."""
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
app_config = await store.async_load()
if app_config is None:
app_config = {
DATA_BINARY_SENSOR: {},
DATA_CONFIG_ENTRIES: {},
DATA_DELETED_IDS: [],
DATA_SENSOR: {},
}
hass.data[DOMAIN] = {
DATA_BINARY_SENSOR: app_config.get(DATA_BINARY_SENSOR, {}),
DATA_CONFIG_ENTRIES: {},
DATA_DELETED_IDS: app_config.get(DATA_DELETED_IDS, []),
DATA_DEVICES: {},
DATA_SENSOR: app_config.get(DATA_SENSOR, {}),
DATA_STORE: store,
}
hass.http.register_view(RegistrationsView())
for deleted_id in hass.data[DOMAIN][DATA_DELETED_IDS]:
try:
webhook_register(
hass, DOMAIN, "Deleted Webhook", deleted_id, handle_webhook
)
except ValueError:
pass
hass.async_create_task(
discovery.async_load_platform(hass, "notify", DOMAIN, {}, config)
)
return True
async def async_setup_entry(hass, entry):
"""Set up a mobile_app entry."""
registration = entry.data
webhook_id = registration[CONF_WEBHOOK_ID]
hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id] = entry
device_registry = await dr.async_get_registry(hass)
device = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, registration[ATTR_DEVICE_ID])},
manufacturer=registration[ATTR_MANUFACTURER],
model=registration[ATTR_MODEL],
name=registration[ATTR_DEVICE_NAME],
sw_version=registration[ATTR_OS_VERSION],
)
hass.data[DOMAIN][DATA_DEVICES][webhook_id] = device
registration_name = f"Mobile App: {registration[ATTR_DEVICE_NAME]}"
webhook_register(hass, DOMAIN, registration_name, webhook_id, handle_webhook)
for domain in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, domain)
)
await hass_notify.async_reload(hass, DOMAIN)
return True
async def async_unload_entry(hass, entry):
"""Unload a mobile app entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
webhook_id = entry.data[CONF_WEBHOOK_ID]
webhook_unregister(hass, webhook_id)
del hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id]
await hass_notify.async_reload(hass, DOMAIN)
return True
async def async_remove_entry(hass, entry):
"""Cleanup when entry is removed."""
hass.data[DOMAIN][DATA_DELETED_IDS].append(entry.data[CONF_WEBHOOK_ID])
store = hass.data[DOMAIN][DATA_STORE]
await store.async_save(savable_state(hass))
if CONF_CLOUDHOOK_URL in entry.data:
try:
await cloud.async_delete_cloudhook(hass, entry.data[CONF_WEBHOOK_ID])
except cloud.CloudNotAvailable:
pass
|
import pytest
from molecule.model import schema_v2
@pytest.fixture
def _model_driver_section_data():
return {
'driver': {
'name': 'docker',
'provider': {
'name': None,
},
'options': {
'managed': True,
'foo': 'bar',
},
'ssh_connection_options': [
'foo',
'bar',
],
'safe_files': [
'foo',
'bar',
],
}
}
@pytest.mark.parametrize(
'_config', ['_model_driver_section_data'], indirect=True)
def test_driver(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_errors_section_data():
return {
'driver': {
'name': int(),
'provider': {
'name': int(),
'foo': 'bar',
},
'options': {
'managed': str(),
},
'ssh_connection_options': [
int(),
],
'safe_files': [
int(),
],
}
}
@pytest.mark.parametrize(
'_config', ['_model_driver_errors_section_data'], indirect=True)
def test_driver_has_errors(_config):
x = {
'driver': [{
'safe_files': [{
0: ['must be of string type'],
}],
'options': [{
'managed': ['must be of boolean type']
}],
'ssh_connection_options': [{
0: ['must be of string type'],
}],
'name': ['must be of string type'],
'provider': [{
'name': ['must be of string type'],
}],
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_provider_name_nullable_section_data():
return {
'driver': {
'provider': {
'name': None,
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_driver_provider_name_nullable_section_data'],
indirect=True)
def test_driver_provider_name_nullable(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_provider_name_not_nullable_when_vagrant_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': None,
},
}
}
@pytest.mark.parametrize(
'_config',
['_model_driver_provider_name_not_nullable_when_vagrant_section_data'],
indirect=True)
def test_driver_provider_name_not_nullable_when_vagrant_driver(_config):
x = {
'driver': [{
'provider': [{
'name': ['unallowed value None', 'null value not allowed']
}]
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_provider_allows_virtualbox_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'virtualbox',
},
}
}
@pytest.fixture
def _model_driver_provider_allows_vmware_fusion_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'vmware_fusion',
},
}
}
@pytest.fixture
def _model_driver_provider_allows_vmware_workstation_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'vmware_workstation',
},
}
}
@pytest.fixture
def _model_driver_provider_allows_vmware_desktop_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'vmware_desktop',
},
}
}
@pytest.fixture
def _model_driver_provider_allows_parallels_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'parallels',
},
}
}
@pytest.fixture
def _model_driver_provider_allows_libvirt_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'libvirt',
},
}
}
@pytest.mark.parametrize(
'_config', [
('_model_driver_provider_allows_virtualbox_section_data'),
('_model_driver_provider_allows_vmware_fusion_section_data'),
('_model_driver_provider_allows_vmware_workstation_section_data'),
('_model_driver_provider_allows_vmware_desktop_section_data'),
('_model_driver_provider_allows_parallels_section_data'),
('_model_driver_provider_allows_libvirt_section_data'),
],
indirect=True)
def test_driver_provider_allows_name(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_provider_errors_invalid_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': str(),
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_driver_provider_errors_invalid_section_data'],
indirect=True)
def test_driver_invalid_provider_name_has_errors(_config):
x = {
'driver': [{
'provider': [{
'name': ['unallowed value ']
}],
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_driver_allows_azure_section_data():
return {
'driver': {
'name': 'azure',
}
}
@pytest.fixture
def _model_driver_allows_delegated_section_data():
return {
'driver': {
'name': 'delegated',
}
}
@pytest.fixture
def _model_driver_allows_digitalocean_section_data():
return {
'driver': {
'name': 'digitalocean',
}
}
@pytest.fixture
def _model_driver_allows_docker_section_data():
return {
'driver': {
'name': 'docker',
}
}
@pytest.fixture
def _model_driver_allows_ec2_section_data():
return {
'driver': {
'name': 'ec2',
}
}
@pytest.fixture
def _model_driver_allows_gce_section_data():
return {
'driver': {
'name': 'gce',
}
}
@pytest.fixture
def _model_driver_allows_lxc_section_data():
return {
'driver': {
'name': 'lxc',
}
}
@pytest.fixture
def _model_driver_allows_lxd_section_data():
return {
'driver': {
'name': 'lxd',
}
}
@pytest.fixture
def _model_driver_allows_openstack_section_data():
return {
'driver': {
'name': 'openstack',
}
}
@pytest.fixture
def _model_driver_allows_vagrant_section_data():
return {
'driver': {
'name': 'vagrant',
}
}
###
@pytest.mark.parametrize(
'_config', [
('_model_driver_allows_azure_section_data'),
('_model_driver_allows_delegated_section_data'),
('_model_driver_allows_digitalocean_section_data'),
('_model_driver_allows_docker_section_data'),
('_model_driver_allows_ec2_section_data'),
('_model_driver_allows_gce_section_data'),
('_model_driver_allows_lxc_section_data'),
('_model_driver_allows_lxd_section_data'),
('_model_driver_allows_openstack_section_data'),
('_model_driver_allows_vagrant_section_data'),
],
indirect=True)
def test_driver_allows_name(_config):
assert {} == schema_v2.validate(_config)
|
from unittest import TestCase
import io
import pandas as pd
from scattertext.CorpusFromParsedDocuments import CorpusFromParsedDocuments
from scattertext.WhitespaceNLP import whitespace_nlp_with_sentences
from scattertext.diachronic.DiachronicTermMiner import DiachronicTermMiner
class TestDiachronicTermMiner(TestCase):
@classmethod
def setUpClass(cls):
df = pd.read_csv(io.StringIO("publish_date,headline_text,publish_yearmonth,publish_month\n20150409,rural sa rural reporter the tale of two orchards,201504,04\n20111206,roar get ulsan in champions league draw,201112,12\n20101201,130m annual cost to run desal plant,201012,12\n20040802,farmers worried about wto agreement loopholes,200408,08\n20170808,same sex marriage plebiscite attempt expected to be blocked,201708,08\n20130621,executives spend a night on the streets to experience homelessn,201306,06\n20070613,nsw govt signs pollution reduction agreement with,200706,06\n20060209,nt doctors show support for abortion drug,200602,02\n20130718,crash driver sought by police,201307,07\n20061119,howard disputes blairs iraq comments,200611,11\n20070725,german reporter released in afghanistan,200707,07\n20120224,hammer heal to coach kings,201202,02\n20090428,written apology over holocaust denial,200904,04\n20141024,unions hand tasmanian government alternative savings plan,201410,10\n20061118,shark gets some pride back,200611,11\n20130206,older watson concerned for jobe bombers,201302,02\n20140430,forum to showcase mid west mining developments,201404,04\n20140429,former wa treasurer buswell admits to driving offences,201404,04\n20070621,weather to determine sports fields opening,200706,06\n20140803,travel blamed for increasing rate of hiv in wa,201408,08\n20050715,stuey takes aim at green jersey,200507,07\n20061219,public urged to help combat fruit fly threat,200612,12\n20040302,robben chooses chelsea over united,200403,03\n20030820,jury to continue deliberations in hanson fraud,200308,08\n20030323,baghdads military facilities targeted in latest,200303,03\n20140417,an india holds biggest day of voting,201404,04\n20050102,car bomb attack kills 18 iraqi national guards,200501,01\n20080818,citation boosts vietnam veterans day significance,200808,08\n20131111,wenceslas magun speaks to pacific beat,201311,11\n20130325,an vanuatu gets new pm,201303,03\n20160423,woman killed in crash with stobie pole,201604,04\n20091006,message spread that attacks not tolerated brumby,200910,10\n20040707,iraq adopts new security laws,200407,07\n20030916,poland gets record case of the blues,200309,09\n20040406,jordan sentences eight to death over diplomat,200404,04\n20101022,arnold to relish cox plate pressure,201010,10\n20130610,lack of data creates concern over true extent of medical errors,201306,06\n20060317,labor warns on minority government,200603,03\n20100808,labor to ban truants from playing sport,201008,08\n20071210,sharks spotted in esperance port,200712,12\n20041224,aust troops to celebrate christmas in iraq,200412,12\n20090819,jail term for rsl theft,200908,08\n20070408,closer am1nodisplay,200704,04\n20161114,nt man jailed for crimes against children,201611,11\n20051003,union warns ir changes threaten australian way of,200510,10\n20041007,afghan children lose high court battle against,200410,10\n20130506,parkinsons test sought,201305,05\n20110329,police accused of not probing brutality claim,201103,03\n20090828,cairns trip ends in top end lsd bust,200908,08\n20120816,coroner criticises ambulance 'ramping',201208,08\n20130121,new recruits for womens cycling team,201301,01\n20050203,uni to hold tropical science precinct talks,200502,02\n20041110,jetstar asia prepares for launch,200411,11\n20090326,mccreadie granted immunity,200903,03\n20170821,one killed in france after car crashes into bus shelters,201708,08\n20081031,gambhir handed one test ban,200810,10\n20150527,school communities unsettled about prospect of school closures,201505,05\n20050829,man accused of ramming car with children inside,200508,08\n20130821,van park owner pursues legal options over free,201308,08\n20060406,national network to track pseudoephedrine sales,200604,04\n20040708,big sports complex planned near maitland,200407,07\n20100714,ex afl player paid nearly 80k to conman,201007,07\n20120711,victory retain milligans services,201207,07\n20080221,bad weather delays dalrymple bay coal terminal,200802,02\n20151021,govt department tests scales get what paid for,201510,10\n20090208,battered jets sign italian striker vignaroli,200902,02\n20130205,capital hill monday 4 february 2013,201302,02\n20161013,medicinal cannabis register considered tasmania,201610,10\n20041202,underwood sworn in as chief justice,200412,12\n20110701,rta heeds call for pedestrian safety upgrade,201107,07\n20120723,miners say cost of business too high,201207,07\n20090103,funding secures more aerial shark patrols,200901,01\n20170603,were australias first people nomadic,201706,06\n20031019,tributes pour in for spanish writer montalban,200310,10\n20080301,interview ricky ponting,200803,03\n20100831,forlan at the double for atletico,201008,08\n20060907,lawyers say vizards silence is unfair to hilliard,200609,09\n20060524,shoulder troubles for roddick ahead of french,200605,05\n20080809,tennis form guide mens singles,200808,08\n20171206,family of betty dixon still asking questions as cold case ends,201712,12\n20080715,fed court overturns annoying ban,200807,07\n20120131,rare earth industry developing rapidly,201201,01\n20131117,tremlett prior set to start for england,201311,11\n20121114,eltons latest book explores brothers relationship,201211,11\n20070316,evans a man of honesty and integrity,200703,03\n20040908,financial lobby criticises labor tax package,200409,09\n20030604,health service urged to review gp anaesthetist,200306,06\n20030410,restrictions for melbourne as water cost rises,200304,04\n20161022,pamela anderson speaks out about pornographys numbing effects,201610,10\n20120804,fire warning,201208,08\n20110329,paramedic gives evidence at road crash murder trial,201103,03\n20160711,response to labor mp call to ban fracking in south west,201607,07\n20111007,health razor gang disbands early,201110,10\n20141023,acid attacks on women spark protests in iran,201410,10\n20100401,mp airs fears for forestry jobs,201004,04\n20121124,interview rianna ponting,201211,11\n20120820,tony burke talks with four corners,201208,08\n20100815,20 million affected by pakistan floods,201008,08\n20091222,china planning to execute briton next week,200912,12\n20100819,woman granted bail over torso in bush find,201008,08\n20091103,christmas island locals forgotten in asylum debate,200911,11\n20071027,eden monaro headed for labor poll,200710,10\n20121027,alleged hijackers flown to sri lanka to face charges,201210,10\n20160320,powerlifting: watch a benchpress; a deadlift and a,201603,03\n20130913,new york jets' mark sanchez facing season ending shoulder sur,201309,09\n20120324,we have to put bligh legacy behind us,201203,03\n20050524,budget sees return of investment properties tax,200505,05\n20101117,germany increases security amid terrorist threat,201011,11\n20150713,newcastle man in coma after drunken argument,201507,07\n20140812,titans need help in afl battle,201408,08\n20170119,vegemite back in australian hands,201701,01\n20070508,utai out cutler in for dogs,200705,05\n20160818,artists opens up world of picture book illustrations,201608,08\n20150731,north queensland ports urge ports bill fine tuning,201507,07\n20060623,wimmera sheep sales increase,200606,06\n20120105,opposition queries extra senior bureaucrats,201201,01\n20120514,hume result,201205,05\n20070909,victorians going green,200709,09\n20121113,broken hill baby birds back in their nests,201211,11\n20111023,drunk driving police,201110,10\n20070806,four arrested over safe breaks,200708,08\n20131214,sri lanka retain twenty20 number one ranking,201312,12\n20061122,sydney tourism snubs regional areas,200611,11\n20070512,curbishley confident of players resolve,200705,05\n20050924,ten killed in gaza hamas rally blast,200509,09\n20080804,police dig for baby 12 years on,200808,08\n20090602,centenary show for gin gin,200906,06\n20090426,g20 ministers still cautious on global economy swan,200904,04\n20080918,david kidman from ferrier hodgson talks about the,200809,09\n20091101,beauty with a twist,200911,11\n20091203,henderson talks up brave 2030 plan,200912,12\n20070913,power in no rush to decide political future,200709,09\n20091209,swine flu far milder than feared,200912,12\n20091216,us house of reps honours miles davis album,200912,12\n20160816,two dead in crash on eyre highway near balladonia,201608,08\n20091022,worms linked to coeliac relief,200910,10\n20140401,wafarmers urges growers to decrease debt,201404,04\n20121115,fmg diversifies into oil and gas,201211,11\n20040121,leaders may need to resolve trade talks,200401,01\n20081207,tasmanians urged to spend within their means,200812,12\n20140822,sa police join search for missing warrnambool man,201408,08\n20051219,company fined after explosions injured workers,200512,12\n20081013,thai queen to attend protesters funeral,200810,10\n20111124,global stocks close,201111,11\n20051221,aquaculture group upset with course axing,200512,12\n20121224,somali troops end hostages' three year ordeal,201212,12\n20090804,bligh vows to refer email row to cmc,200908,08\n20100714,appointed to healths top job,201007,07\n20100128,remote schools low on my school site,201001,01\n20140505,festival visitors get taste for regions produce,201405,05\n20030413,canegrowers push for ethanol mix in all petrol,200304,04\n20110409,clarke ton helps aussies to victory,201104,04\n20151207,police seek witnesses to fatal tintinara road crash,201512,12\n20041013,tax relief tipped for wa home buyers,200410,10\n20050312,bulls charge towards home final,200503,03\n20151125,three men dead in perth workplace accidents,201511,11\n20160516,federal government considers assistance package dairy farmers,201605,05\n20130523,minister jeanette powell outlines strategy for victoria's abo,201305,05\n20140919,jackson primary school censorship,201409,09\n20090909,russians behind cyber crime says afp,200909,09\n20030709,indias congress considers coalition to oust bjp,200307,07\n20050425,council plans memorial to grassby,200504,04\n20090810,slovak mine blast traps 19 miners,200908,08\n20121123,some tourism operators say no to schoolies,201211,11\n20150507,australian farming families the feature of a new,201505,05\n20120322,young roos,201203,03\n20101206,katich has scans on achilles injury,201012,12\n20070627,pricey sydney tops census again,200706,06\n20060319,opals enjoy another big win,200603,03\n20160318,albany residents to be quizzed over muttonbird reserve,201603,03\n20150902,china fta senator colbeck trade,201509,09\n20160609,greyhound racing nsw charges 179 trainers owners,201606,06\n20060220,internet smss blamed for big crowd at party,200602,02\n20031203,renison mine to remain closed,200312,12\n20151215,newcastle giving tree finished for 2015,201512,12\n20070707,afp release five doctors after questioning,200707,07\n20121130,an bangladesh inspections,201211,11\n20121008,man quizzed over high speed chase,201210,10\n20080409,lennon under fire over kons resignation,200804,04\n20130510,compo concerns,201305,05\n20150730,police plead for clues to tenterden road crash,201507,07\n20081014,an open and shut case for nw road,200810,10\n20100511,scott daughters settle estate fight,201005,05\n20080523,suitability of hensons images depends on context,200805,05\n20060622,aged care group restructures decision making,200606,06\n20150204,nff wants banks to pass on interest rate cut to farmers,201502,02\n20041118,govts urged to act on commuter train service,200411,11\n20030323,worldwide protests demand peace,200303,03\n20040601,gillespie talks up worth of zimbabwe series,200406,06\n20050506,tribunal cracks down on video evidence,200505,05\n20151021,police make arrest missing mother linda sidon gold coast,201510,10\n20121012,scientists uncover mystery of ball lightning,201210,10\n20140430,encouraging girls in engineering jpbs,201404,04\n20160816,woman charged over assault of victorian labor mp jane garrett,201608,08\n20140224,cattle saleyards canteen ladies,201402,02\n20080726,final showdown looms for tour,200807,07\n20111229,pesce a rising tide of chaos,201112,12\n20040426,former us ambassador doubts iraq wmd focus,200404,04\n20080603,evicted aborigines finish training in sydney,200806,06\n20070412,cadets to attend sandakan dawn service,200704,04\n20100425,red shirts discarded ahead of crackdown,201004,04\n20070625,four to appear in court over coolgardie burglary,200706,06\n20140812,nrn graincorp ceo,201408,08\n20101230,interview michael clarke,201012,12\n20110506,workers to mine tafe for education needs,201105,05\n20130912,wafl player has bail varied to play,201309,09\n20120809,simpson elected murray irrigation shareholder,201208,08\n20121206,ice blamed for crime spike,201212,12\n20080622,opec divided on saudi summit and production boost,200806,06\n20050513,heroin bust in adelaide,200505,05\n20051004,nrma highlights need for pacific highway attention,200510,10\n20110706,public quizzed about closed inlet,201107,07\n20150225,herbicide resistance peter newman,201502,02\n20050216,push for second kakadu uranium mine,200502,02\n20040314,murali set to join warne in 500 wicket club,200403,03\n20131104,soil carbon climate change,201311,11\n20100208,the wwfs paul gamblin says a report should put,201002,02\n20040922,indonesian presidential hopeful plans peace in aceh,200409,09\n20170405,bushfire emergency downgraded near esperance in wa,201704,04\n20120724,injured sea birds washing up inland,201207,07\n20160729,donald trump v hillary clinton star power of the conventions,201607,07\n20120522,impact of bomb blasts on the brain,201205,05\n20140811,israel palestine agree to 72 hour cease fire in gaza,201408,08\n20130610,14yos accused of armed robbery,201306,06\n20051114,mp says tafe fees soaring,200511,11\n20050419,woolworths sales up more than 14pc,200504,04\n20080907,peter leek breaks butterfly world record,200809,09\n20080426,jones trickett set new world records,200804,04\n20041224,karzai removes warlords from afghan cabinet,200412,12\n20120329,no confidence showdown looming,201203,03\n20110114,brazil floods mudslides kill hundreds,201101,01\n20160918,hospital parking fees petition gains support on change org,201609,09\n20140716,china gdp growth hits expectations,201407,07\n20071206,pasha findings prompt port review,200712,12\n20080627,pigeons smuggle drugs phones into rio prison,200806,06\n20071228,plucky india fights back in melbourne,200712,12\n20150419,thousands in germany protest against ttip europe us trade deal,201504,04\n20100112,rain sets up new crop for cane farmers,201001,01\n20110110,peter andre named hardest working singer,201101,01\n20120830,search becomes rescue as asylum boat found,201208,08\n20050715,manslaughter charge dropped in bondage case,200507,07\n20120822,laurie daley interview,201208,08\n20030601,williams silent on sydney ji unit claim,200306,06\n20060226,govt offers to buy back sydney harbour fishing,200602,02\n20061115,reward offered to catch roo shooter,200611,11\n20121128,report suggests turnaround for struggling boxed,201211,11\n20081024,november execution for bali bombers,200810,10\n20040513,ethnic sounds unite eurovision,200405,05\n20111128,murray darling authority chairman craig knowles,201111,11\n20160122,brisbane artist helps fans pay tribute to idols through nail art,201601,01\n20120821,australia too complacent,201208,08\n20070829,rudd pressures howard to pick election date,200708,08\n20171203,cooper cronk goes out on top announcing retirement from rep,201712,12\n20140212,oz shares surge after ceo announces departure,201402,02\n20060630,council happy to receive community funds for,200606,06\n20131113,lifeline helping miners prevent suicide,201311,11\n20100701,authorities fear grass fires deliberately lit,201007,07\n20040827,family hires security guard for protection,200408,08\n20110315,contempt of court charge against paper dropped,201103,03\n20030416,full text 13 point plan for iraq,200304,04\n20090704,nrl interview neil henry,200907,07\n20120306,sa courts,201203,03\n20060119,australia west indies postpone 2007 test series,200601,01\n20140603,bosnia finalises cup squad,201406,06\n20121127,victorian government backs down on scrapping fruit,201211,11\n20050131,perth kalgoorlie line set to reopen on weekend,200501,01\n20150428,chile volcano calbuco economy 600 million tourism eruption,201504,04\n20130313,grain prices rabobank,201303,03\n20140415,fia upholds ricciardo disqualification,201404,04\n20100425,pies embarrass dons on big stage,201004,04\n20120213,shining path leader captured,201202,02\n20160715,rescue plane goes down in goldfields hunt for missing man,201607,07\n20110901,storm wont appeal blairs ban,201109,09\n20131108,today tonight twist in gittany trial,201311,11\n20070413,tour boat profits blown away,200704,04\n20170921,farmers open the farm gate to combat carrot glut,201709,09\n20130507,qdo resignation,201305,05\n20060531,australian teams join quake aid efforts,200605,05\n20110705,bartos the public service numbers game,201107,07\n20060705,patient no shows end specialist medical service,200607,07\n20150804,multi million dollar northern farming system project,201508,08\n20171229,china foreign ministry denies claims its still,201712,12\n20110807,masterchef winner,201108,08\n20161006,for better or worse: four corners,201610,10\n20070308,rsl investigates veterans home care service,200703,03\n20090212,keane at the double for ireland,200902,02\n20080102,pakistan issues photos of bhutto death offers,200801,01\n20121113,pair charged following police shooting,201211,11\n20040304,hope for business chamber turnaround,200403,03\n20050226,cabinet to consider nightclub lock out plan,200502,02\n20061220,illawarra schools do well in hsc,200612,12\n20121112,data reveals strong regional rental markets,201211,11\n20060629,teen found safe after missing in bush for three,200606,06\n20060110,star studded field confirmed for johnnie walker,200601,01\n20120113,abc sport,201201,01\n20140702,trade balance slumps to near 2 billion deficit on fall in iron,201407,07\n20090928,star to be born again,200909,09\n20100712,experts warn against growing diabetes threat,201007,07\n20031212,rampaging roy wins cultural recognition,200312,12\n20081221,chinese warships to join anti piracy force,200812,12\n20040603,mayor highlights hidden amalgamation costs,200406,06\n20091013,locals threaten to block kokoda over crash compo,200910,10\n20081211,connex told to fix industrial dispute,200812,12\n20141204,ronja huon aquaculture salmon,201412,12\n20161102,private investor interest in henty pub,201611,11\n20100324,councils face off over oakajee,201003,03\n20160407,the peasant prince,201604,04\n20171018,daphne caruana galizias son accuses malta pm of complicity,201710,10\n20151012,barns risky detention policy,201510,10\n20130102,under age drinking a big problem in manning great lakes,201301,01\n20150918,the rbas advice for the us fed on hiking rates,201509,09\n20151027,adelaide bite baseballer's assault charge may be dropped,201510,10\n20070207,survey normal govt procedure says minister,200702,02\n20170324,anz joins the rush to raise home loan interest rates,201703,03\n20110214,work to start on new adelaide airport parking,201102,02\n20130309,interview johnathan thurston,201303,03\n20101206,west coast abalone season winds up,201012,12\n20110705,westhoff injury gives cornes his chance,201107,07\n20100930,pyne sent from chamber for hopeless jibe,201009,09\n20120515,rocks to tackle foreshore erosion woes,201205,05\n20101217,storm threat eases in south east queensland,201012,12\n20041017,richmond slips away from anthony,200410,10\n20070910,rare nsw plant faces extinction,200709,09\n20140602,clunies ross science award for gravity separator,201406,06\n20090713,angelita pires on trial for conspiracy,200907,07\n20070916,nt comes to grips with alcohol bans,200709,09\n20040929,tourism award nomination for pioneer settlement,200409,09\n20100223,australia v west indies innings highlights,201002,02\n20080508,people must be across risks and benefits of gm,200805,05\n20080624,goodes accepts ban,200806,06\n20030619,capriati and rubin win at eastbourne,200306,06\n20100610,youth job agency to close doors,201006,06\n20051110,call made to cut infrastructure project red tape,200511,11\n20130530,adam scott not planning to sue over anchoring,201305,05\n20041216,toxicologist calls for more drink spiking evidence,200412,12\n20110605,police find teen detention centre escapee,201106,06\n20060727,memorial to honour murdered sisters,200607,07\n20150908,jason day heads presidents cup team to take on us in october,201509,09\n20040702,icc confirms postponement of zimbabwe tests,200407,07\n20120413,philips bob brown,201204,04\n20080318,newcastle building society passes on rate rise,200803,03\n20121121,emma roberts avery wines,201211,11\n20101218,vics take innings points,201012,12\n20130514,nt cattle sold to vic,201305,05\n20101122,art world welcomes indigenous recruits,201011,11\n20130227,hough eyeing off moscow berth,201302,02\n20120718,an thai military outpost and village attacked,201207,07\n20110331,labors downfall the machine and the split,201103,03\n20150715,tonga pm casts doubt on country's ability to host pacific games,201507,07\n20141002,accc approves sale of acttab to tabcorp group,201410,10\n20050930,hope for power station to attract new industries,200509,09\n20140317,hamelin wake,201403,03\n20101013,11 jailed over van gogh theft,201010,10\n20090418,20 hostages freed from pirate mother ship,200904,04\n20131121,probe into 2011 police shooting in coffs harbour still incomple,201311,11\n20090920,torres double gets liverpool home,200909,09\n20100502,mayfair holding firm at quail hollow,201005,05\n20041106,samarra car bombs kill 8 wound 20,200411,11\n20080923,ses under pressure as storms hit riverina,200809,09\n20150528,australians unaware they have chronic kidney disease report,201505,05\n20080929,court hears torres strait seas claim,200809,09\n20141118,abortion row erupts between coalition candidates in ballarat,201411,11\n20090211,tornado kills 8 people in oklahoma,200902,02\n20170623,danny noonan ex afl player jailed for stealing from clients,201706,06\n20151104,efficient housing a focus for aboriginal land council's new w,201511,11\n20070416,missing elderly man found safe,200704,04\n20060607,council includes road repair funds in draft budget,200606,06\n20090903,cba feels wrath over storm collapse,200909,09\n20121209,marquez knocks out pacquiao,201212,12\n20090619,sharks fraud claims parents charged,200906,06\n20121219,ambulance reforms written off by paramedic's union,201212,12\n20151221,water sharing arrangement could be fast tracked due to contamin,201512,12\n20070514,viduka in no rush to decide future,200705,05\n20100212,penn universitys climategate findings,201002,02\n20051014,bikers ride honours sheene,200510,10\n20090201,hotter drier january,200902,02\n20091231,capital fireworks to bring in new year,200912,12\n20150327,joeys to be released into the wild after adelaide bushfires,201503,03\n20100223,amcor profit beats expectations,201002,02\n20040813,sex charges highlight need for workplace education,200408,08\n20030326,libs claim south coast seat,200303,03\n20060502,federal govt to fund airport security upgrade,200605,05\n20100710,yacht murder case begins,201007,07\n20070305,carpenter vows to force grill out of alp,200703,03\n20051006,us senate moves to ban prisoner torture,200510,10\n20121223,tendulkar retires from odis,201212,12\n20141003,nobel peace summit 'suspended' over dalai lama visa row,201410,10\n20050601,schumacher dismisses quit questions,200506,06\n20040921,parents shy away from meningococcal vaccinations,200409,09\n20121023,prince charles australian travel plans revealed,201210,10\n20140509,new mental health centre to help patients,201405,05\n20030929,lisbie hat trick stuns liverpool,200309,09\n20060202,awb kickbacks scandal puts govt under us pressure,200602,02\n20050909,man killed in head on crash,200509,09\n20130725,nrn ag minister shepp,201307,07\n20070807,croydon council delivers budget,200708,08\n20121102,an worldbank earmarks $245m for burma,201211,11\n20110523,doubt behind the aggression,201105,05\n20100826,interview brett kimmorley,201008,08\n20040703,new disease threatens qld citrus crops,200407,07\n20080522,man charged with assaulting girls wanted in qld,200805,05\n20140709,mining ojbection legislation changes,201407,07\n20160308,efforts to get more women to become truck drivers in tasmania,201603,03\n20040706,crackdown on overseas trained country doctors,200407,07\n20151119,national rural news,201511,11\n20140321,sydney light rail extension to open next week,201403,03\n20151211,doris fenbows killer alexis katsis jailed for 15 years,201512,12\n20111012,waca ceo wood resigns,201110,10\n20060824,program cuts childhood obesity rate researchers say,200608,08\n20140130,hospital forced to use surge capacity beds on regular basis,201401,01\n20101006,red cross opens doors in kalgoorlie boulder,201010,10\n20030716,boyle praises freeman as best of her generation,200307,07\n20131114,ract takes over federal groups' tourism ventures,201311,11\n20170529,queensland government to play ball over adani loan: treasurer,201705,05\n20151021,milky way galaxy star forming clouds,201510,10\n20120511,van egmond admits informal talks about leaving jets,201205,05\n20110718,more groundwater trials at mount zero,201107,07\n20051212,angel wins murgon by election,200512,12\n20100301,record rain fills heart of australia,201003,03\n20090727,council to sign algae biodiesel agreement,200907,07\n20121207,uninterrupted grain harvest nears end,201212,12\n20160824,wesfarmers richard goyder defends business council,201608,08\n20051017,briefings to be held for would be councillors,200510,10\n20040623,hobart prepares for jim bacons funeral,200406,06\n20070807,second suspected foot and mouth outbreak in britain,200708,08\n20101010,qr national float details unveiled,201010,10\n20060919,brock funeral begins in melbourne,200609,09\n20170620,family road trip tells burke and wills story through theatre,201706,06\n20151109,china and australia to share antarctic sea ice research,201511,11\n20141119,victoria beats south australia in shield,201411,11\n20150930,tas country hour wednesday 21 september 2015,201509,09\n20141015,consumer sentiment negative in westpac survey,201410,10\n20090719,india can make its own decisions clinton says,200907,07\n20140320,council urged to crack down on illegal holiday,201403,03\n20080925,dog attacks policewoman in boulder,200809,09\n20080123,springborg attempting to rebadge the national,200801,01\n20050120,houses crack in canadian cold spell,200501,01\n20130923,mining company discovers second cement spill in sugarloaf,201309,09\n20031108,us jobs figures fail to bolster markets,200311,11\n20110519,boat tragedy video released,201105,05\n20121102,call for review of water concessions,201211,11\n20120616,interview michael maguire,201206,06\n20030413,death toll rises on nsw roads,200304,04\n20110330,no verdict in airport caterer drug case,201103,03\n20100921,study to probe field days value,201009,09\n20100912,resilience will help say dogs,201009,09\n20110607,boaties rescue sparks emergency beacon reminder,201106,06\n20110628,robinson re signs with reds,201106,06\n20040110,fleming ton seals kiwi win,200401,01\n20111123,holden recalls diesel cars,201111,11\n20041012,china may sign fta with nz first,200410,10\n20130417,new radar,201304,04\n20140304,nsw country hour 4 march 2014,201403,03\n20060807,stanhope rejects tax discrepancy claims,200608,08\n20070308,downpour cancels bemboka show,200703,03\n20160718,toowoomba south lnp david janetzki claims victory in by election,201607,07\n20101208,flood peak fears ease in rockhampton,201012,12\n20050525,dumped car not linked to missing schoolboy police,200505,05\n20071115,second stage of vegie industry water saving,200711,11\n20080908,aust paralympic swimmers miss out on medals,200809,09\n20150622,geelong star kills another dolphin prompting fishery closure,201506,06\n20050417,ofc backs socceroos asian move,200504,04\n20150316,islamic state militants claim attack on checkpoint in libya,201503,03\n20080729,luhrmann on transformative experiences,200807,07\n20111115,man jailed over beer bottle glassing,201111,11\n20051031,windies coach denies players have attitude problem,200510,10\n20101119,court jails driver for running down man,201011,11\n20110503,pakistan embarrassed by intelligence failure,201105,05\n20071121,security camera funding pledge for mackay,200711,11\n20110104,police suspect careless campers behind bushfire,201101,01\n20150825,san francisco coach attempts to hose down hayne hype,201508,08\n20030315,hewitt still top dog,200303,03\n20131227,ukraine protesters rally after journalist bashed,201312,12\n20080423,bryce bligh address students at brisbane anzac,200804,04\n20080902,domestic markets flat despite interest rate cut,200809,09\n20080113,bligh approval soars to 68pc,200801,01\n20080303,southern road fatality,200803,03\n20160127,tunarama 2016 highlights port lincoln,201601,01\n20141223,warner will be ready for boxing day test,201412,12\n20150707,75yo fraser coast woman dies after suspected,201507,07\n20090515,rees urges players to come forward,200905,05\n20140311,smith agrees to four year extension at storm,201403,03\n20120511,black caviar prepares for australian finale,201205,05\n20160929,sa weather fuel shortages eyre peninsula residents stranded,201609,09\n20151209,north coast victims tell stolen generations inquiry more suppor,201512,12\n20141204,ebola global toll rises further as virus spreads in sierra leone,201412,12\n20071101,bryan cousins lashes out at media,200711,11\n20070211,clashes flare again over jerusalem mosque,200702,02\n20101220,blisters and pimples clog 000,201012,12\n20140731,australian medical association regional queenslanders obese,201407,07\n20080213,apology welcome reconciliation the next goal tas,200802,02\n20050916,two injured in skydiving accident,200509,09\n20151211,captain of honduras soccer team shot dead,201512,12\n20090102,israels labour rebounds in polls after gaza blitz,200901,01\n20111115,karumba barra centre could close,201111,11\n20090826,nelson proud of saving propellant factory,200908,08\n20130330,couple wanted over sydney diamond heist,201303,03\n20090501,mp demands more police to fill shortages,200905,05\n20141010,glenn hall re signs with north queensland cowboys,201410,10\n20140425,projections illuminate anzacs,201404,04\n"))
df['parse'] = df.headline_text.apply(whitespace_nlp_with_sentences)
df['publish_yearmonth'] = df['publish_yearmonth'].astype(str)
df['publish_month'] = df['publish_month'].astype(str)
cls.corpus = (CorpusFromParsedDocuments(df,
category_col='publish_yearmonth',
parsed_col='parse')
.build())
def test_setup(self):
DiachronicTermMiner(self.corpus)
with self.assertRaises(Exception):
DiachronicTermMiner(self.corpus, timesteps_to_lag=3999)
DiachronicTermMiner(self.corpus, timesteps_to_lag=2)
with self.assertRaises(Exception):
DiachronicTermMiner(self.corpus, start_category='asd')
with self.assertRaises(Exception):
DiachronicTermMiner(self.corpus, start_category='200304')
DiachronicTermMiner(self.corpus, start_category='201404')
with self.assertRaises(Exception):
DiachronicTermMiner(self.corpus, seasonality_column='publish_montha')
DiachronicTermMiner(self.corpus, seasonality_column='publish_month')
def test_get_terms_to_display(self):
df = DiachronicTermMiner(self.corpus, num_terms=10).get_display_dataframe()
self.assertEquals(list(df.columns), ['term', 'variable', 'frequency', 'trending'])
self.assertEquals(len(set(df.term)), 10)
df = DiachronicTermMiner(self.corpus, num_terms=20).get_display_dataframe()
self.assertEquals(len(set(df.term)), 20)
def test_get_visualization(self):
try:
import altair
except:
return
DiachronicTermMiner(self.corpus, num_terms=10).visualize()
|
import logging
import unittest
import numpy as np
from gensim.corpora.mmcorpus import MmCorpus
from gensim.models import logentropy_model
from gensim.test.utils import datapath, get_tmpfile
class TestLogEntropyModel(unittest.TestCase):
TEST_CORPUS = [[(1, 1.0)], [], [(0, 0.5), (2, 1.0)], []]
def setUp(self):
self.corpus_small = MmCorpus(datapath('test_corpus_small.mm'))
self.corpus_ok = MmCorpus(datapath('test_corpus_ok.mm'))
self.corpus_empty = []
def test_generator_fail(self):
"""Test creating a model using a generator as input; should fail."""
def get_generator(test_corpus=TestLogEntropyModel.TEST_CORPUS):
for test_doc in test_corpus:
yield test_doc
self.assertRaises(ValueError, logentropy_model.LogEntropyModel, corpus=get_generator())
def test_empty_fail(self):
"""Test creating a model using an empty input; should fail."""
self.assertRaises(ValueError, logentropy_model.LogEntropyModel, corpus=self.corpus_empty)
def testTransform(self):
# create the transformation model
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=False)
# transform one document
doc = list(self.corpus_ok)[0]
transformed = model[doc]
expected = [
(0, 0.3748900964125389),
(1, 0.30730215324230725),
(3, 1.20941755462856)
]
self.assertTrue(np.allclose(transformed, expected))
def testPersistence(self):
fname = get_tmpfile('gensim_models_logentry.tst')
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=True)
model.save(fname)
model2 = logentropy_model.LogEntropyModel.load(fname)
self.assertTrue(model.entr == model2.entr)
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec]))
def testPersistenceCompressed(self):
fname = get_tmpfile('gensim_models_logentry.tst.gz')
model = logentropy_model.LogEntropyModel(self.corpus_ok, normalize=True)
model.save(fname)
model2 = logentropy_model.LogEntropyModel.load(fname, mmap=None)
self.assertTrue(model.entr == model2.entr)
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec]))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from datetime import timedelta
import logging
from typing import Optional
from geojson_client.usgs_earthquake_hazards_program_feed import (
UsgsEarthquakeHazardsProgramFeedManager,
)
import voluptuous as vol
from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
ATTR_ALERT = "alert"
ATTR_EXTERNAL_ID = "external_id"
ATTR_MAGNITUDE = "magnitude"
ATTR_PLACE = "place"
ATTR_STATUS = "status"
ATTR_TIME = "time"
ATTR_TYPE = "type"
ATTR_UPDATED = "updated"
CONF_FEED_TYPE = "feed_type"
CONF_MINIMUM_MAGNITUDE = "minimum_magnitude"
DEFAULT_MINIMUM_MAGNITUDE = 0.0
DEFAULT_RADIUS_IN_KM = 50.0
DEFAULT_UNIT_OF_MEASUREMENT = LENGTH_KILOMETERS
SCAN_INTERVAL = timedelta(minutes=5)
SIGNAL_DELETE_ENTITY = "usgs_earthquakes_feed_delete_{}"
SIGNAL_UPDATE_ENTITY = "usgs_earthquakes_feed_update_{}"
SOURCE = "usgs_earthquakes_feed"
VALID_FEED_TYPES = [
"past_hour_significant_earthquakes",
"past_hour_m45_earthquakes",
"past_hour_m25_earthquakes",
"past_hour_m10_earthquakes",
"past_hour_all_earthquakes",
"past_day_significant_earthquakes",
"past_day_m45_earthquakes",
"past_day_m25_earthquakes",
"past_day_m10_earthquakes",
"past_day_all_earthquakes",
"past_week_significant_earthquakes",
"past_week_m45_earthquakes",
"past_week_m25_earthquakes",
"past_week_m10_earthquakes",
"past_week_all_earthquakes",
"past_month_significant_earthquakes",
"past_month_m45_earthquakes",
"past_month_m25_earthquakes",
"past_month_m10_earthquakes",
"past_month_all_earthquakes",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FEED_TYPE): vol.In(VALID_FEED_TYPES),
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
vol.Optional(
CONF_MINIMUM_MAGNITUDE, default=DEFAULT_MINIMUM_MAGNITUDE
): cv.positive_float,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the USGS Earthquake Hazards Program Feed platform."""
scan_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
feed_type = config[CONF_FEED_TYPE]
coordinates = (
config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude),
)
radius_in_km = config[CONF_RADIUS]
minimum_magnitude = config[CONF_MINIMUM_MAGNITUDE]
# Initialize the entity manager.
feed = UsgsEarthquakesFeedEntityManager(
hass,
add_entities,
scan_interval,
coordinates,
feed_type,
radius_in_km,
minimum_magnitude,
)
def start_feed_manager(event):
"""Start feed manager."""
feed.startup()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
class UsgsEarthquakesFeedEntityManager:
"""Feed Entity Manager for USGS Earthquake Hazards Program feed."""
def __init__(
self,
hass,
add_entities,
scan_interval,
coordinates,
feed_type,
radius_in_km,
minimum_magnitude,
):
"""Initialize the Feed Entity Manager."""
self._hass = hass
self._feed_manager = UsgsEarthquakeHazardsProgramFeedManager(
self._generate_entity,
self._update_entity,
self._remove_entity,
coordinates,
feed_type,
filter_radius=radius_in_km,
filter_minimum_magnitude=minimum_magnitude,
)
self._add_entities = add_entities
self._scan_interval = scan_interval
def startup(self):
"""Start up this manager."""
self._feed_manager.update()
self._init_regular_updates()
def _init_regular_updates(self):
"""Schedule regular updates at the specified interval."""
track_time_interval(
self._hass, lambda now: self._feed_manager.update(), self._scan_interval
)
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
def _generate_entity(self, external_id):
"""Generate new entity."""
new_entity = UsgsEarthquakesEvent(self, external_id)
# Add new entities to HA.
self._add_entities([new_entity], True)
def _update_entity(self, external_id):
"""Update entity."""
dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY.format(external_id))
def _remove_entity(self, external_id):
"""Remove entity."""
dispatcher_send(self._hass, SIGNAL_DELETE_ENTITY.format(external_id))
class UsgsEarthquakesEvent(GeolocationEvent):
"""This represents an external event with USGS Earthquake data."""
def __init__(self, feed_manager, external_id):
"""Initialize entity with data from feed entry."""
self._feed_manager = feed_manager
self._external_id = external_id
self._name = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._place = None
self._magnitude = None
self._time = None
self._updated = None
self._status = None
self._type = None
self._alert = None
self._remove_signal_delete = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_delete = async_dispatcher_connect(
self.hass,
SIGNAL_DELETE_ENTITY.format(self._external_id),
self._delete_callback,
)
self._remove_signal_update = async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_ENTITY.format(self._external_id),
self._update_callback,
)
@callback
def _delete_callback(self):
"""Remove this entity."""
self._remove_signal_delete()
self._remove_signal_update()
self.hass.async_create_task(self.async_remove())
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for USGS Earthquake events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
if feed_entry:
self._update_from_feed(feed_entry)
def _update_from_feed(self, feed_entry):
"""Update the internal state from the provided feed entry."""
self._name = feed_entry.title
self._distance = feed_entry.distance_to_home
self._latitude = feed_entry.coordinates[0]
self._longitude = feed_entry.coordinates[1]
self._attribution = feed_entry.attribution
self._place = feed_entry.place
self._magnitude = feed_entry.magnitude
self._time = feed_entry.time
self._updated = feed_entry.updated
self._status = feed_entry.status
self._type = feed_entry.type
self._alert = feed_entry.alert
@property
def icon(self):
"""Return the icon to use in the frontend."""
return "mdi:pulse"
@property
def source(self) -> str:
"""Return source value of this external event."""
return SOURCE
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return self._name
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return DEFAULT_UNIT_OF_MEASUREMENT
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_PLACE, self._place),
(ATTR_MAGNITUDE, self._magnitude),
(ATTR_TIME, self._time),
(ATTR_UPDATED, self._updated),
(ATTR_STATUS, self._status),
(ATTR_TYPE, self._type),
(ATTR_ALERT, self._alert),
(ATTR_ATTRIBUTION, self._attribution),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
|
revision = "a02a678ddc25"
down_revision = "8ae67285ff14"
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"rotation_policies",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=True),
sa.Column("days", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
"certificates", sa.Column("rotation_policy_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(
None, "certificates", "rotation_policies", ["rotation_policy_id"], ["id"]
)
conn = op.get_bind()
stmt = text("insert into rotation_policies (days, name) values (:days, :name)")
stmt = stmt.bindparams(days=30, name="default")
conn.execute(stmt)
stmt = text("select id from rotation_policies where name=:name")
stmt = stmt.bindparams(name="default")
rotation_policy_id = conn.execute(stmt).fetchone()[0]
stmt = text("update certificates set rotation_policy_id=:rotation_policy_id")
stmt = stmt.bindparams(rotation_policy_id=rotation_policy_id)
conn.execute(stmt)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "certificates", type_="foreignkey")
op.drop_column("certificates", "rotation_policy_id")
op.drop_index(
"certificate_replacement_associations_ix",
table_name="certificate_replacement_associations",
)
op.create_index(
"certificate_replacement_associations_ix",
"certificate_replacement_associations",
["replaced_certificate_id", "certificate_id"],
unique=True,
)
op.drop_table("rotation_policies")
# ### end Alembic commands ###
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
)
from tests.components.homekit_controller.common import setup_test_component
TEMPERATURE = ("temperature", "temperature.current")
HUMIDITY = ("humidity", "relative-humidity.current")
LIGHT_LEVEL = ("light", "light-level.current")
CARBON_DIOXIDE_LEVEL = ("carbon-dioxide", "carbon-dioxide.level")
BATTERY_LEVEL = ("battery", "battery-level")
CHARGING_STATE = ("battery", "charging-state")
LO_BATT = ("battery", "status-lo-batt")
def create_temperature_sensor_service(accessory):
"""Define temperature characteristics."""
service = accessory.add_service(ServicesTypes.TEMPERATURE_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.TEMPERATURE_CURRENT)
cur_state.value = 0
def create_humidity_sensor_service(accessory):
"""Define humidity characteristics."""
service = accessory.add_service(ServicesTypes.HUMIDITY_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT)
cur_state.value = 0
def create_light_level_sensor_service(accessory):
"""Define light level characteristics."""
service = accessory.add_service(ServicesTypes.LIGHT_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.LIGHT_LEVEL_CURRENT)
cur_state.value = 0
def create_carbon_dioxide_level_sensor_service(accessory):
"""Define carbon dioxide level characteristics."""
service = accessory.add_service(ServicesTypes.CARBON_DIOXIDE_SENSOR)
cur_state = service.add_char(CharacteristicsTypes.CARBON_DIOXIDE_LEVEL)
cur_state.value = 0
def create_battery_level_sensor(accessory):
"""Define battery level characteristics."""
service = accessory.add_service(ServicesTypes.BATTERY_SERVICE)
cur_state = service.add_char(CharacteristicsTypes.BATTERY_LEVEL)
cur_state.value = 100
low_battery = service.add_char(CharacteristicsTypes.STATUS_LO_BATT)
low_battery.value = 0
charging_state = service.add_char(CharacteristicsTypes.CHARGING_STATE)
charging_state.value = 0
return service
async def test_temperature_sensor_read_state(hass, utcnow):
"""Test reading the state of a HomeKit temperature sensor accessory."""
helper = await setup_test_component(
hass, create_temperature_sensor_service, suffix="temperature"
)
helper.characteristics[TEMPERATURE].value = 10
state = await helper.poll_and_get_state()
assert state.state == "10"
helper.characteristics[TEMPERATURE].value = 20
state = await helper.poll_and_get_state()
assert state.state == "20"
assert state.attributes["device_class"] == DEVICE_CLASS_TEMPERATURE
async def test_humidity_sensor_read_state(hass, utcnow):
"""Test reading the state of a HomeKit humidity sensor accessory."""
helper = await setup_test_component(
hass, create_humidity_sensor_service, suffix="humidity"
)
helper.characteristics[HUMIDITY].value = 10
state = await helper.poll_and_get_state()
assert state.state == "10"
helper.characteristics[HUMIDITY].value = 20
state = await helper.poll_and_get_state()
assert state.state == "20"
assert state.attributes["device_class"] == DEVICE_CLASS_HUMIDITY
async def test_light_level_sensor_read_state(hass, utcnow):
"""Test reading the state of a HomeKit temperature sensor accessory."""
helper = await setup_test_component(
hass, create_light_level_sensor_service, suffix="light_level"
)
helper.characteristics[LIGHT_LEVEL].value = 10
state = await helper.poll_and_get_state()
assert state.state == "10"
helper.characteristics[LIGHT_LEVEL].value = 20
state = await helper.poll_and_get_state()
assert state.state == "20"
assert state.attributes["device_class"] == DEVICE_CLASS_ILLUMINANCE
async def test_carbon_dioxide_level_sensor_read_state(hass, utcnow):
"""Test reading the state of a HomeKit carbon dioxide sensor accessory."""
helper = await setup_test_component(
hass, create_carbon_dioxide_level_sensor_service, suffix="co2"
)
helper.characteristics[CARBON_DIOXIDE_LEVEL].value = 10
state = await helper.poll_and_get_state()
assert state.state == "10"
helper.characteristics[CARBON_DIOXIDE_LEVEL].value = 20
state = await helper.poll_and_get_state()
assert state.state == "20"
async def test_battery_level_sensor(hass, utcnow):
"""Test reading the state of a HomeKit battery level sensor."""
helper = await setup_test_component(
hass, create_battery_level_sensor, suffix="battery"
)
helper.characteristics[BATTERY_LEVEL].value = 100
state = await helper.poll_and_get_state()
assert state.state == "100"
assert state.attributes["icon"] == "mdi:battery"
helper.characteristics[BATTERY_LEVEL].value = 20
state = await helper.poll_and_get_state()
assert state.state == "20"
assert state.attributes["icon"] == "mdi:battery-20"
assert state.attributes["device_class"] == DEVICE_CLASS_BATTERY
async def test_battery_charging(hass, utcnow):
"""Test reading the state of a HomeKit battery's charging state."""
helper = await setup_test_component(
hass, create_battery_level_sensor, suffix="battery"
)
helper.characteristics[BATTERY_LEVEL].value = 0
helper.characteristics[CHARGING_STATE].value = 1
state = await helper.poll_and_get_state()
assert state.attributes["icon"] == "mdi:battery-outline"
helper.characteristics[BATTERY_LEVEL].value = 20
state = await helper.poll_and_get_state()
assert state.attributes["icon"] == "mdi:battery-charging-20"
async def test_battery_low(hass, utcnow):
"""Test reading the state of a HomeKit battery's low state."""
helper = await setup_test_component(
hass, create_battery_level_sensor, suffix="battery"
)
helper.characteristics[LO_BATT].value = 0
helper.characteristics[BATTERY_LEVEL].value = 1
state = await helper.poll_and_get_state()
assert state.attributes["icon"] == "mdi:battery-10"
helper.characteristics[LO_BATT].value = 1
state = await helper.poll_and_get_state()
assert state.attributes["icon"] == "mdi:battery-alert"
|
from weblate.fonts.models import FONT_STORAGE, Font
from weblate.utils.celery import app
@app.task(trail=False)
def cleanup_font_files():
"""Remove stale fonts."""
try:
files = FONT_STORAGE.listdir(".")[1]
except OSError:
return
for name in files:
if name == "fonts.conf":
continue
if not Font.objects.filter(font=name).exists():
FONT_STORAGE.delete(name)
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(
3600 * 24, cleanup_font_files.s(), name="font-files-cleanup"
)
|
import io
from vcr.stubs import VCRHTTPResponse
def test_response_should_have_headers_field():
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"access-control-allow-credentials": ["true"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"access-control-allow-origin": ["*"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers is not None
def test_response_headers_should_be_equal_to_msg():
recorded_response = {
"status": {"message": b"OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers == response.msg
def test_response_headers_should_have_correct_values():
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["10806"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers.get("content-length") == "10806"
assert response.headers.get("date") == "Fri, 24 Oct 2014 18:35:37 GMT"
def test_response_parses_correctly_and_fp_attribute_error_is_not_thrown():
"""
Regression test for https://github.com/kevin1024/vcrpy/issues/440
:return:
"""
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"access-control-allow-credentials": ["true"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"access-control-allow-origin": ["*"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {
"string": b"\nPMID- 19416910\nOWN - NLM\nSTAT- MEDLINE\nDA - 20090513\nDCOM- "
b"20090622\nLR - "
b"20141209\nIS - 1091-6490 (Electronic)\nIS - 0027-8424 (Linking)\nVI - "
b"106\nIP - "
b"19\nDP - 2009 May 12\nTI - Genetic dissection of histone deacetylase "
b"requirement in "
b"tumor cells.\nPG - 7751-5\nLID - 10.1073/pnas.0903139106 [doi]\nAB - "
b"Histone "
b"deacetylase inhibitors (HDACi) represent a new group of drugs currently\n "
b" being "
b"tested in a wide variety of clinical applications. They are especially\n "
b" effective "
b"in preclinical models of cancer where they show antiproliferative\n "
b"action in many "
b"different types of cancer cells. Recently, the first HDACi was\n "
b"approved for the "
b"treatment of cutaneous T cell lymphomas. Most HDACi currently in\n "
b"clinical "
},
}
vcr_response = VCRHTTPResponse(recorded_response)
handle = io.TextIOWrapper(io.BufferedReader(vcr_response), encoding="utf-8")
handle = iter(handle)
articles = [line for line in handle]
assert len(articles) > 1
|
from pylutron_caseta import OCCUPANCY_GROUP_OCCUPIED
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OCCUPANCY,
BinarySensorEntity,
)
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta binary_sensor platform.
Adds occupancy groups from the Caseta bridge associated with the
config_entry as binary_sensor entities.
"""
entities = []
bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id]
occupancy_groups = bridge.occupancy_groups
for occupancy_group in occupancy_groups.values():
entity = LutronOccupancySensor(occupancy_group, bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronOccupancySensor(LutronCasetaDevice, BinarySensorEntity):
"""Representation of a Lutron occupancy group."""
@property
def device_class(self):
"""Flag supported features."""
return DEVICE_CLASS_OCCUPANCY
@property
def is_on(self):
"""Return the brightness of the light."""
return self._device["status"] == OCCUPANCY_GROUP_OCCUPIED
async def async_added_to_hass(self):
"""Register callbacks."""
self._smartbridge.add_occupancy_subscriber(
self.device_id, self.async_write_ha_state
)
@property
def device_id(self):
"""Return the device ID used for calling pylutron_caseta."""
return self._device["occupancy_group_id"]
@property
def unique_id(self):
"""Return a unique identifier."""
return f"occupancygroup_{self.device_id}"
@property
def device_info(self):
"""Return the device info.
Sensor entities are aggregated from one or more physical
sensors by each room. Therefore, there shouldn't be devices
related to any sensor entities.
"""
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"device_id": self.device_id}
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from . import LUTRON_CONTROLLER, LUTRON_DEVICES, LutronDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Lutron lights."""
devs = []
for (area_name, device) in hass.data[LUTRON_DEVICES]["light"]:
dev = LutronLight(area_name, device, hass.data[LUTRON_CONTROLLER])
devs.append(dev)
add_entities(devs, True)
def to_lutron_level(level):
"""Convert the given Home Assistant light level (0-255) to Lutron (0.0-100.0)."""
return float((level * 100) / 255)
def to_hass_level(level):
"""Convert the given Lutron (0.0-100.0) light level to Home Assistant (0-255)."""
return int((level * 255) / 100)
class LutronLight(LutronDevice, LightEntity):
"""Representation of a Lutron Light, including dimmable."""
def __init__(self, area_name, lutron_device, controller):
"""Initialize the light."""
self._prev_brightness = None
super().__init__(area_name, lutron_device, controller)
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
@property
def brightness(self):
"""Return the brightness of the light."""
new_brightness = to_hass_level(self._lutron_device.last_level())
if new_brightness != 0:
self._prev_brightness = new_brightness
return new_brightness
def turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_BRIGHTNESS in kwargs and self._lutron_device.is_dimmable:
brightness = kwargs[ATTR_BRIGHTNESS]
elif self._prev_brightness == 0:
brightness = 255 / 2
else:
brightness = self._prev_brightness
self._prev_brightness = brightness
self._lutron_device.level = to_lutron_level(brightness)
def turn_off(self, **kwargs):
"""Turn the light off."""
self._lutron_device.level = 0
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"lutron_integration_id": self._lutron_device.id}
@property
def is_on(self):
"""Return true if device is on."""
return self._lutron_device.last_level() > 0
def update(self):
"""Call when forcing a refresh of the device."""
if self._prev_brightness is None:
self._prev_brightness = to_hass_level(self._lutron_device.level)
|
import urllib2
from urllib import urlencode
try:
from xml.etree import ElementTree
except ImportError:
ElementTree = None
try:
from ElementTree import ParseError as ETParseError
except ImportError:
ETParseError = Exception
import diamond.collector
class KafkaCollector(diamond.collector.Collector):
ATTRIBUTE_TYPES = {
'double': float,
'float': float,
'int': int,
'java.lang.Object': float,
'long': long,
}
def get_default_config_help(self):
config_help = super(KafkaCollector, self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(KafkaCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 8082,
'path': 'kafka',
})
return config
def _get(self, path, query_args=None):
if not path.startswith('/'):
path = '/' + path
qargs = {'template': 'identity'}
if query_args:
qargs.update(query_args)
url = 'http://%s:%i%s?%s' % (
self.config['host'], int(self.config['port']),
path, urlencode(qargs))
try:
response = urllib2.urlopen(url)
except urllib2.URLError as err:
self.log.error("%s: %s", url, err)
return None
try:
return ElementTree.fromstring(response.read())
except ETParseError:
self.log.error("Unable to parse response from mx4j")
return None
def get_mbeans(self, pattern):
query_args = {'querynames': pattern}
mbeans = self._get('/serverbydomain', query_args)
if mbeans is None:
return
found_beans = set()
for mbean in mbeans.getiterator(tag='MBean'):
objectname = mbean.get('objectname')
if objectname:
found_beans.add(objectname)
return found_beans
def query_mbean(self, objectname, key_prefix=None):
query_args = {
'objectname': objectname,
'operations': False,
'constructors': False,
'notifications': False,
}
attributes = self._get('/mbean', query_args)
if attributes is None:
return
if key_prefix is None:
# Could be 1 or 2 = in the string
# java.lang:type=Threading
# "kafka.controller":type="ControllerStats",
# name="LeaderElectionRateAndTimeMs"
split_num = objectname.count('=')
for i in range(split_num):
if i == 0:
key_prefix = objectname.split('=')[1]
if '"' in key_prefix:
key_prefix = key_prefix.split('"')[1]
if "," in key_prefix:
key_prefix = key_prefix.split(',')[0]
elif i > 0:
key = objectname.split('=')[i + 1]
if key:
if '"' in key:
key = key.split('"')[1]
key_prefix = key_prefix + '.' + key
key_prefix = key_prefix.replace(",", ".")
metrics = {}
for attrib in attributes.getiterator(tag='Attribute'):
atype = attrib.get('type')
ptype = self.ATTRIBUTE_TYPES.get(atype)
if not ptype:
continue
try:
value = ptype(attrib.get('value'))
except ValueError:
# It will be too busy, so not logging it every time
self.log.debug('Unable to parse the value for ' +
atype + " in " + objectname)
continue
name = '.'.join([key_prefix, attrib.get('name')])
# Some prefixes and attributes could have spaces, thus we must
# sanitize them
name = name.replace(' ', '')
metrics[name] = value
return metrics
def collect(self):
if ElementTree is None:
self.log.error('Failed to import xml.etree.ElementTree')
return
# Get list of gatherable stats
query_list = [
'*kafka*:*',
'java.lang:type=GarbageCollector,name=*',
'java.lang:type=Threading'
]
mbeans = set()
for pattern in query_list:
match = self.get_mbeans(pattern)
mbeans.update(match)
metrics = {}
# Query each one for stats
for mbean in mbeans:
if mbean is None:
continue
stats = self.query_mbean(mbean)
if stats is None:
self.log.error('Failed to get stats for' + mbean)
metrics.update(stats)
# Publish stats
for metric, value in metrics.iteritems():
self.publish(metric, value)
|
import sys
from flexx.util.testing import run_tests_if_main, skipif, skip, raises
from flexx.event.both_tester import run_in_both, this_is_js
from flexx import event
loop = event.loop
class MyObject(event.Component):
@event.emitter
def foo(self, v):
if not isinstance(v, (int, float)):
raise TypeError('Foo emitter expects a number.')
return dict(value=float(v))
@event.emitter
def bar(self, v):
return dict(value=float(v)+1) # note plus 1
@event.emitter
def wrong(self, v):
return float(v) # does not return a dict
@event.reaction('foo')
def on_foo(self, *events):
print('foo', ', '.join([str(ev.value) for ev in events]))
@event.reaction('bar')
def on_bar(self, *events):
print('bar', ', '.join([str(ev.value) for ev in events]))
class MyObject2(MyObject):
@event.emitter
def bar(self, v):
return super().bar(v + 10)
class MyObject3(MyObject):
@event.reaction('foo', mode='greedy')
def on_foo(self, *events):
print('foo', ', '.join([str(ev.value) for ev in events]))
@event.reaction('bar', mode='greedy')
def on_bar(self, *events):
print('bar', ', '.join([str(ev.value) for ev in events]))
@run_in_both(MyObject)
def test_emitter_ok():
"""
foo 3.2
foo 3.2, 3.3
bar 4.8, 4.9
bar 4.9
"""
m = MyObject()
with loop:
m.foo(3.2)
with loop:
m.foo(3.2)
m.foo(3.3)
with loop:
m.bar(3.8)
m.bar(3.9)
with loop:
m.bar(3.9)
@run_in_both(MyObject2)
def test_emitter_overloading(): # and super()
"""
bar 14.2, 15.5
"""
m = MyObject2()
with loop:
m.bar(3.2)
m.bar(4.5)
@run_in_both(MyObject)
def test_emitter_order():
"""
foo 3.1, 3.2
bar 6.3, 6.4
foo 3.5, 3.6
bar 6.7, 6.8
bar 6.9, 6.9
"""
m = MyObject()
# Even though we emit foo 4 times between two event loop iterations,
# they are only grouped as much as to preserve order. This was not
# the case before the 2017 Flexx refactoring.
with loop:
m.foo(3.1)
m.foo(3.2)
m.bar(5.3)
m.bar(5.4)
m.foo(3.5)
m.foo(3.6)
m.bar(5.7)
m.bar(5.8)
# The last two occur after an event loop iter, so these cannot be grouped
# with the previous.
with loop:
m.bar(5.9)
m.bar(5.9)
@run_in_both(MyObject3)
def test_emitter_order_greedy():
"""
foo 3.1, 3.2, 3.5, 3.6
bar 6.3, 6.4, 6.7, 6.8
bar 6.9, 6.9
"""
m = MyObject3()
# Even though we emit foo 4 times between two event loop iterations,
# they are only grouped as much as to preserve order. This was not
# the case before the 2017 Flexx refactoring.
with loop:
m.foo(3.1)
m.foo(3.2)
m.bar(5.3)
m.bar(5.4)
m.foo(3.5)
m.foo(3.6)
m.bar(5.7)
m.bar(5.8)
# The last two occur after an event loop iter, so these cannot be grouped
# with the previous.
with loop:
m.bar(5.9)
m.bar(5.9)
@run_in_both(MyObject)
def test_emitter_fail():
"""
fail TypeError
fail TypeError
fail ValueError
"""
m = MyObject()
try:
m.wrong(1.1)
except TypeError:
print('fail TypeError')
try:
m.foo('bla')
except TypeError:
print('fail TypeError')
try:
m.emit('bla:x')
except ValueError:
print('fail ValueError')
## Meta-ish tests that are similar for property/emitter/action/reaction
@run_in_both(MyObject)
def test_emitter_not_settable():
"""
fail AttributeError
"""
m = MyObject()
try:
m.foo = 3
except AttributeError:
print('fail AttributeError')
# We cannot prevent deletion in JS, otherwise we cannot overload
def test_emitter_python_only():
m = MyObject()
# Emitter decorator needs proper callable
with raises(TypeError):
event.emitter(3)
if '__pypy__' in sys.builtin_module_names:
pass # skip
else:
with raises(TypeError):
event.emitter(isinstance)
# Check type of the instance attribute
assert isinstance(m.foo, event._emitter.Emitter)
# Cannot set or delete an emitter
with raises(AttributeError):
m.foo = 3
with raises(AttributeError):
del m.foo
# Repr and docs
assert 'emitter' in repr(m.__class__.foo).lower()
assert 'emitter' in repr(m.foo).lower()
assert 'foo' in repr(m.foo)
run_tests_if_main()
|
import os
import shutil
import time
from urllib import parse, request
from urllib.error import HTTPError, URLError
from .progressbar import ProgressBar
from .numerics import hashfunc
from .misc import sizeof_fmt
from ._logging import logger, verbose
# Adapted from nilearn
def _get_http(url, temp_file_name, initial_size, timeout, verbose_bool):
"""Safely (resume a) download to a file from http(s)."""
# Actually do the reading
response = None
extra = ''
if initial_size > 0:
logger.debug(' Resuming at %s' % (initial_size,))
req = request.Request(
url, headers={'Range': 'bytes=%s-' % (initial_size,)})
try:
response = request.urlopen(req, timeout=timeout)
content_range = response.info().get('Content-Range', None)
if (content_range is None or not content_range.startswith(
'bytes %s-' % (initial_size,))):
raise IOError('Server does not support resuming')
except (KeyError, HTTPError, URLError, IOError):
initial_size = 0
response = None
else:
extra = ', resuming at %s' % (sizeof_fmt(initial_size),)
if response is None:
response = request.urlopen(request.Request(url), timeout=timeout)
file_size = int(response.headers.get('Content-Length', '0').strip())
file_size += initial_size
url = response.geturl()
logger.info('Downloading %s (%s%s)' % (url, sizeof_fmt(file_size), extra))
del url
mode = 'ab' if initial_size > 0 else 'wb'
progress = ProgressBar(file_size, initial_size, unit='B',
mesg='Downloading', unit_scale=True,
unit_divisor=1024)
del file_size
chunk_size = 8192 # 2 ** 13
with open(temp_file_name, mode) as local_file:
while True:
t0 = time.time()
chunk = response.read(chunk_size)
dt = time.time() - t0
if dt < 0.01:
chunk_size *= 2
elif dt > 0.1 and chunk_size > 8192:
chunk_size = chunk_size // 2
if not chunk:
break
local_file.write(chunk)
progress.update_with_increment_value(len(chunk))
@verbose
def _fetch_file(url, file_name, print_destination=True, resume=True,
hash_=None, timeout=30., hash_type='md5', verbose=None):
"""Load requested file, downloading it if needed or requested.
Parameters
----------
url: string
The url of file to be downloaded.
file_name: string
Name, along with the path, of where downloaded file will be saved.
print_destination: bool, optional
If true, destination of where file was saved will be printed after
download finishes.
resume: bool, optional
If true, try to resume partially downloaded files.
hash_ : str | None
The hash of the file to check. If None, no checking is
performed.
timeout : float
The URL open timeout.
hash_type : str
The type of hashing to use such as "md5" or "sha1"
%(verbose)s
"""
# Adapted from NISL:
# https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
if hash_ is not None and (not isinstance(hash_, str) or
len(hash_) != 32) and hash_type == 'md5':
raise ValueError('Bad hash value given, should be a 32-character '
'string:\n%s' % (hash_,))
temp_file_name = file_name + ".part"
verbose_bool = (logger.level <= 20) # 20 is info
scheme = parse.urlparse(url).scheme
if scheme not in ('http', 'https'):
raise NotImplementedError('Cannot use scheme %r' % (scheme,))
try:
# Triage resume
if not os.path.exists(temp_file_name):
resume = False
if resume:
with open(temp_file_name, 'rb', buffering=0) as local_file:
local_file.seek(0, 2)
initial_size = local_file.tell()
del local_file
else:
initial_size = 0
_get_http(url, temp_file_name, initial_size, timeout, verbose_bool)
# check hash sum eg md5sum
if hash_ is not None:
logger.info('Verifying hash %s.' % (hash_,))
hashsum = hashfunc(temp_file_name, hash_type=hash_type)
if hash_ != hashsum:
raise RuntimeError('Hash mismatch for downloaded file %s, '
'expected %s but got %s'
% (temp_file_name, hash_, hashsum))
shutil.move(temp_file_name, file_name)
if print_destination is True:
logger.info('File saved as %s.\n' % file_name)
except Exception:
logger.error('Error while fetching file %s.'
' Dataset fetching aborted.' % url)
raise
def _url_to_local_path(url, path):
"""Mirror a url path in a local destination (keeping folder structure)."""
destination = parse.urlparse(url).path
# First char should be '/', and it needs to be discarded
if len(destination) < 2 or destination[0] != '/':
raise ValueError('Invalid URL')
destination = os.path.join(path, request.url2pathname(destination)[1:])
return destination
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import patch
from diamond.collector import Collector
from puppetagent import PuppetAgentCollector
##########################################################################
def run_only_if_yaml_is_available(func):
try:
import yaml
except ImportError:
yaml = None
pred = lambda: yaml is not None
return run_only(func, pred)
class TestPuppetAgentCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PuppetAgentCollector', {
'interval': 10,
'yaml_path': self.getFixturePath('last_run_summary.yaml')
})
self.collector = PuppetAgentCollector(config, None)
def test_import(self):
self.assertTrue(PuppetAgentCollector)
@run_only_if_yaml_is_available
@patch.object(Collector, 'publish')
def test(self, publish_mock):
self.collector.collect()
metrics = {
'changes.total': 1,
'events.failure': 0,
'events.success': 1,
'events.total': 1,
'resources.changed': 1,
'resources.failed': 0,
'resources.failed_to_restart': 0,
'resources.out_of_sync': 1,
'resources.restarted': 0,
'resources.scheduled': 0,
'resources.skipped': 6,
'resources.total': 439,
'time.anchor': 0.009641,
'time.augeas': 1.286514,
'time.config_retrieval': 8.06442093849182,
'time.cron': 0.00089,
'time.exec': 9.780635,
'time.file': 1.729348,
'time.filebucket': 0.000633,
'time.firewall': 0.007807,
'time.group': 0.013421,
'time.last_run': 1377125556,
'time.mailalias': 0.000335,
'time.mount': 0.002749,
'time.package': 1.831337,
'time.resources': 0.000371,
'time.service': 0.734021,
'time.ssh_authorized_key': 0.017625,
'time.total': 23.5117989384918,
'time.user': 0.02927,
'version.config': 1377123965,
}
unpublished_metrics = {
'version.puppet': '2.7.14',
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
self.assertUnpublishedMany(publish_mock, unpublished_metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.