text
stringlengths 213
32.3k
|
---|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl.testing import absltest
class ClassA(absltest.TestCase):
"""Helper test case A for absltest_filtering_test."""
def testA(self):
sys.stderr.write('\nclass A test A\n')
def testB(self):
sys.stderr.write('\nclass A test B\n')
def testC(self):
sys.stderr.write('\nclass A test C\n')
class ClassB(absltest.TestCase):
"""Helper test case B for absltest_filtering_test."""
def testA(self):
sys.stderr.write('\nclass B test A\n')
def testB(self):
sys.stderr.write('\nclass B test B\n')
def testC(self):
sys.stderr.write('\nclass B test C\n')
def testD(self):
sys.stderr.write('\nclass B test D\n')
def testE(self):
sys.stderr.write('\nclass B test E\n')
self.fail('Force failure')
if __name__ == '__main__':
absltest.main()
|
revision = "1ae8e3104db8"
down_revision = "a02a678ddc25"
from alembic import op
def upgrade():
op.sync_enum_values(
"public", "log_type", ["key_view"], ["create_cert", "key_view", "update_cert"]
)
def downgrade():
op.sync_enum_values(
"public", "log_type", ["create_cert", "key_view", "update_cert"], ["key_view"]
)
|
from __future__ import absolute_import, division, print_function
class Infinity(object):
def __repr__(self):
return "Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __neg__(self):
return NegativeInfinity
Infinity = Infinity()
class NegativeInfinity(object):
def __repr__(self):
return "-Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return True
def __le__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False
def __ge__(self, other):
return False
def __neg__(self):
return Infinity
NegativeInfinity = NegativeInfinity()
|
from lark import Lark
from lark.parsers.earley_forest import ForestVisitor, TreeForestTransformer
class TokenPrioritizer(ForestVisitor):
def visit_symbol_node_in(self, node):
# visit the entire forest by returning node.children
return node.children
def visit_packed_node_in(self, node):
return node.children
def visit_symbol_node_out(self, node):
priority = 0
for child in node.children:
# Tokens do not have a priority attribute
# count them as -1
priority += getattr(child, 'priority', -1)
node.priority = priority
def visit_packed_node_out(self, node):
priority = 0
for child in node.children:
priority += getattr(child, 'priority', -1)
node.priority = priority
def on_cycle(self, node, path):
raise Exception("Oops, we encountered a cycle.")
grammar = """
start: hello " " world | hello_world
hello: "Hello"
world: "World"
hello_world: "Hello World"
"""
parser = Lark(grammar, parser='earley', ambiguity='forest')
forest = parser.parse("Hello World")
print("Default prioritizer:")
tree = TreeForestTransformer(resolve_ambiguity=True).transform(forest)
print(tree.pretty())
forest = parser.parse("Hello World")
print("Custom prioritizer:")
tree = TreeForestTransformer(resolve_ambiguity=True, prioritizer=TokenPrioritizer()).transform(forest)
print(tree.pretty())
# Output:
#
# Default prioritizer:
# start
# hello Hello
#
# world World
#
# Custom prioritizer:
# start
# hello_world Hello World
|
import cherrypy
from cherrypy.test import helper
from cherrypy._json import json
json_out = cherrypy.config(**{'tools.json_out.on': True})
json_in = cherrypy.config(**{'tools.json_in.on': True})
class JsonTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root(object):
@cherrypy.expose
def plain(self):
return 'hello'
@cherrypy.expose
@json_out
def json_string(self):
return 'hello'
@cherrypy.expose
@json_out
def json_list(self):
return ['a', 'b', 42]
@cherrypy.expose
@json_out
def json_dict(self):
return {'answer': 42}
@cherrypy.expose
@json_in
def json_post(self):
if cherrypy.request.json == [13, 'c']:
return 'ok'
else:
return 'nok'
@cherrypy.expose
@json_out
@cherrypy.config(**{'tools.caching.on': True})
def json_cached(self):
return 'hello there'
root = Root()
cherrypy.tree.mount(root)
def test_json_output(self):
if json is None:
self.skip('json not found ')
return
self.getPage('/plain')
self.assertBody('hello')
self.getPage('/json_string')
self.assertBody('"hello"')
self.getPage('/json_list')
self.assertBody('["a", "b", 42]')
self.getPage('/json_dict')
self.assertBody('{"answer": 42}')
def test_json_input(self):
if json is None:
self.skip('json not found ')
return
body = '[13, "c"]'
headers = [('Content-Type', 'application/json'),
('Content-Length', str(len(body)))]
self.getPage('/json_post', method='POST', headers=headers, body=body)
self.assertBody('ok')
body = '[13, "c"]'
headers = [('Content-Type', 'text/plain'),
('Content-Length', str(len(body)))]
self.getPage('/json_post', method='POST', headers=headers, body=body)
self.assertStatus(415, 'Expected an application/json content type')
body = '[13, -]'
headers = [('Content-Type', 'application/json'),
('Content-Length', str(len(body)))]
self.getPage('/json_post', method='POST', headers=headers, body=body)
self.assertStatus(400, 'Invalid JSON document')
def test_cached(self):
if json is None:
self.skip('json not found ')
return
self.getPage('/json_cached')
self.assertStatus(200, '"hello"')
self.getPage('/json_cached') # 2'nd time to hit cache
self.assertStatus(200, '"hello"')
|
from __future__ import print_function
import os
import sys
import argparse
_stash = globals()['_stash']
def construct_indices_from_list_spec(list_spec):
# Note unlike python, cut's indices start from 1
indices = []
for fld in list_spec.split(','):
if '-' in fld:
sidx, eidx = fld.split('-')
sidx = int(sidx) - 1
eidx = int(eidx) # -1 + 1 because base is 1 and eidx is inclusive
else:
sidx = int(fld) - 1
eidx = sidx + 1
indices.append((sidx, eidx))
return indices
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('-d', '--delimiter', nargs='?', metavar='DELIM', help='use DELIM instead of SPACE for field delimiter')
ap.add_argument('-f', '--fields', required=True, metavar='LIST', help='select only these fields')
ap.add_argument('files', nargs='*', help='files to cut')
ns = ap.parse_args(args)
indices = construct_indices_from_list_spec(ns.fields)
for infields in _stash.libcore.input_stream(ns.files):
if infields[0] is None:
_, filename, e = infields
print('%s: %s' % (filename, repr(e)))
else:
line, filename, lineno = infields
fields = line.split(ns.delimiter)
if len(fields) == 1:
print(fields[0])
else:
out = ' '.join((' '.join(fields[sidx:eidx]) for sidx, eidx in indices))
print(out)
if __name__ == '__main__':
main(sys.argv[1:])
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_COLD,
DOMAIN,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import AqualinkEntity
from .const import DOMAIN as AQUALINK_DOMAIN
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up discovered binary sensors."""
devs = []
for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]:
devs.append(HassAqualinkBinarySensor(dev))
async_add_entities(devs, True)
class HassAqualinkBinarySensor(AqualinkEntity, BinarySensorEntity):
"""Representation of a binary sensor."""
@property
def name(self) -> str:
"""Return the name of the binary sensor."""
return self.dev.label
@property
def is_on(self) -> bool:
"""Return whether the binary sensor is on or not."""
return self.dev.is_on
@property
def device_class(self) -> str:
"""Return the class of the binary sensor."""
if self.name == "Freeze Protection":
return DEVICE_CLASS_COLD
return None
|
import os.path
import coverage
from tests.coveragetest import CoverageTest
from tests.helpers import CheckUniqueFilenames
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
# Trace one file, but not the other. CheckUniqueFilenames will assert
# that _should_trace hasn't been called twice for the same file.
cov = coverage.Coverage(include=["f1.py"])
should_trace_hook = CheckUniqueFilenames.hook(cov, '_should_trace')
# Import the Python file, executing it.
self.start_import_stop(cov, "f2")
# Double-check that our files were checked.
abs_files = set(os.path.abspath(f) for f in should_trace_hook.filenames)
self.assertIn(os.path.abspath("f1.py"), abs_files)
self.assertIn(os.path.abspath("f2.py"), abs_files)
|
import string
import itertools
import textwrap
import pytest
from PyQt5.QtGui import QKeySequence
from qutebrowser.keyinput import basekeyparser
from qutebrowser.keyinput import keyutils
from unit.keyinput import test_keyutils
@pytest.mark.parametrize('entered, configured, match_type',
test_keyutils.TestKeySequence.MATCH_TESTS)
def test_matches_single(entered, configured, match_type):
entered = keyutils.KeySequence.parse(entered)
configured = keyutils.KeySequence.parse(configured)
trie = basekeyparser.BindingTrie()
trie[configured] = "eeloo"
command = "eeloo" if match_type == QKeySequence.ExactMatch else None
result = basekeyparser.MatchResult(match_type=match_type,
command=command,
sequence=entered)
assert trie.matches(entered) == result
def test_str():
bindings = {
keyutils.KeySequence.parse('a'): 'cmd-a',
keyutils.KeySequence.parse('ba'): 'cmd-ba',
keyutils.KeySequence.parse('bb'): 'cmd-bb',
keyutils.KeySequence.parse('cax'): 'cmd-cax',
keyutils.KeySequence.parse('cby'): 'cmd-cby',
}
trie = basekeyparser.BindingTrie()
trie.update(bindings)
expected = """
a:
=> cmd-a
b:
a:
=> cmd-ba
b:
=> cmd-bb
c:
a:
x:
=> cmd-cax
b:
y:
=> cmd-cby
"""
assert str(trie) == textwrap.dedent(expected).lstrip('\n')
@pytest.mark.parametrize('configured, expected', [
([],
# null match
[('a', QKeySequence.NoMatch),
('', QKeySequence.NoMatch)]),
(['abcd'],
[('abcd', QKeySequence.ExactMatch),
('abc', QKeySequence.PartialMatch)]),
(['aa', 'ab', 'ac', 'ad'],
[('ac', QKeySequence.ExactMatch),
('a', QKeySequence.PartialMatch),
('f', QKeySequence.NoMatch),
('acd', QKeySequence.NoMatch)]),
(['aaaaaaab', 'aaaaaaac', 'aaaaaaad'],
[('aaaaaaab', QKeySequence.ExactMatch),
('z', QKeySequence.NoMatch)]),
(string.ascii_letters,
[('a', QKeySequence.ExactMatch),
('!', QKeySequence.NoMatch)]),
])
def test_matches_tree(configured, expected, benchmark):
trie = basekeyparser.BindingTrie()
trie.update({keyutils.KeySequence.parse(keys): "eeloo"
for keys in configured})
def run():
for entered, match_type in expected:
sequence = keyutils.KeySequence.parse(entered)
command = ("eeloo" if match_type == QKeySequence.ExactMatch
else None)
result = basekeyparser.MatchResult(match_type=match_type,
command=command,
sequence=sequence)
assert trie.matches(sequence) == result
benchmark(run)
@pytest.mark.parametrize('configured', [
['a'],
itertools.permutations('asdfghjkl', 3)
])
def test_bench_create(configured, benchmark):
bindings = {keyutils.KeySequence.parse(keys): "dres"
for keys in configured}
def run():
trie = basekeyparser.BindingTrie()
trie.update(bindings)
benchmark(run)
|
import logging
from openzwavemqtt.const import ATTR_CODE_SLOT
from openzwavemqtt.exceptions import BaseOZWError
from openzwavemqtt.util.lock import clear_usercode, set_usercode
import voluptuous as vol
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
ATTR_USERCODE = "usercode"
SERVICE_SET_USERCODE = "set_usercode"
SERVICE_GET_USERCODE = "get_usercode"
SERVICE_CLEAR_USERCODE = "clear_usercode"
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave lock from config entry."""
@callback
def async_add_lock(value):
"""Add Z-Wave Lock."""
lock = ZWaveLock(value)
async_add_entities([lock])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(hass, f"{DOMAIN}_new_{LOCK_DOMAIN}", async_add_lock)
)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_SET_USERCODE,
{
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
vol.Required(ATTR_USERCODE): cv.string,
},
"async_set_usercode",
)
platform.async_register_entity_service(
SERVICE_CLEAR_USERCODE,
{vol.Required(ATTR_CODE_SLOT): vol.Coerce(int)},
"async_clear_usercode",
)
def _call_util_lock_function(function, *args):
"""Call an openzwavemqtt.util.lock function and return success of call."""
try:
function(*args)
except BaseOZWError as err:
_LOGGER.error("%s: %s", type(err), err.args[0])
return False
return True
class ZWaveLock(ZWaveDeviceEntity, LockEntity):
"""Representation of a Z-Wave lock."""
@property
def is_locked(self):
"""Return a boolean for the state of the lock."""
return bool(self.values.primary.value)
async def async_lock(self, **kwargs):
"""Lock the lock."""
self.values.primary.send_value(True)
async def async_unlock(self, **kwargs):
"""Unlock the lock."""
self.values.primary.send_value(False)
@callback
def async_set_usercode(self, code_slot, usercode):
"""Set the usercode to index X on the lock."""
if _call_util_lock_function(
set_usercode, self.values.primary.node, code_slot, usercode
):
_LOGGER.debug("User code at slot %s set", code_slot)
@callback
def async_clear_usercode(self, code_slot):
"""Clear usercode in slot X on the lock."""
if _call_util_lock_function(
clear_usercode, self.values.primary.node, code_slot
):
_LOGGER.info("Usercode at slot %s is cleared", code_slot)
|
from homeassistant.components.roku.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.components.roku import setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_config_entry_not_ready(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Roku configuration entry not ready."""
entry = await setup_integration(hass, aioclient_mock, error=True)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the Roku configuration entry unloading."""
with patch(
"homeassistant.components.roku.media_player.async_setup_entry",
return_value=True,
), patch(
"homeassistant.components.roku.remote.async_setup_entry",
return_value=True,
):
entry = await setup_integration(hass, aioclient_mock)
assert hass.data[DOMAIN][entry.entry_id]
assert entry.state == ENTRY_STATE_LOADED
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.entry_id not in hass.data[DOMAIN]
assert entry.state == ENTRY_STATE_NOT_LOADED
|
import numpy as np
import time
import numbers
from ..parallel import parallel_func
from ..fixes import BaseEstimator, is_classifier, _get_check_scoring
from ..utils import logger, warn, fill_doc
class LinearModel(BaseEstimator):
"""Compute and store patterns from linear models.
The linear model coefficients (filters) are used to extract discriminant
neural sources from the measured data. This class computes the
corresponding patterns of these linear filters to make them more
interpretable :footcite:`HaufeEtAl2014`.
Parameters
----------
model : object | None
A linear model from scikit-learn with a fit method
that updates a ``coef_`` attribute.
If None the model will be LogisticRegression.
Attributes
----------
filters_ : ndarray, shape ([n_targets], n_features)
If fit, the filters used to decompose the data.
patterns_ : ndarray, shape ([n_targets], n_features)
If fit, the patterns used to restore M/EEG signals.
See Also
--------
CSP
mne.preprocessing.ICA
mne.preprocessing.Xdawn
Notes
-----
.. versionadded:: 0.10
References
----------
.. footbibliography::
"""
def __init__(self, model=None): # noqa: D102
if model is None:
from sklearn.linear_model import LogisticRegression
model = LogisticRegression(solver='liblinear')
self.model = model
self._estimator_type = getattr(model, "_estimator_type", None)
def fit(self, X, y, **fit_params):
"""Estimate the coefficients of the linear model.
Save the coefficients in the attribute ``filters_`` and
computes the attribute ``patterns_``.
Parameters
----------
X : array, shape (n_samples, n_features)
The training input samples to estimate the linear coefficients.
y : array, shape (n_samples, [n_targets])
The target values.
**fit_params : dict of string -> object
Parameters to pass to the fit method of the estimator.
Returns
-------
self : instance of LinearModel
Returns the modified instance.
"""
X, y = np.asarray(X), np.asarray(y)
if X.ndim != 2:
raise ValueError('LinearModel only accepts 2-dimensional X, got '
'%s instead.' % (X.shape,))
if y.ndim > 2:
raise ValueError('LinearModel only accepts up to 2-dimensional y, '
'got %s instead.' % (y.shape,))
# fit the Model
self.model.fit(X, y, **fit_params)
# Computes patterns using Haufe's trick: A = Cov_X . W . Precision_Y
inv_Y = 1.
X = X - X.mean(0, keepdims=True)
if y.ndim == 2 and y.shape[1] != 1:
y = y - y.mean(0, keepdims=True)
inv_Y = np.linalg.pinv(np.cov(y.T))
self.patterns_ = np.cov(X.T).dot(self.filters_.T.dot(inv_Y)).T
return self
@property
def filters_(self):
if hasattr(self.model, 'coef_'):
# Standard Linear Model
filters = self.model.coef_
elif hasattr(self.model.best_estimator_, 'coef_'):
# Linear Model with GridSearchCV
filters = self.model.best_estimator_.coef_
else:
raise ValueError('model does not have a `coef_` attribute.')
if filters.ndim == 2 and filters.shape[0] == 1:
filters = filters[0]
return filters
def transform(self, X):
"""Transform the data using the linear model.
Parameters
----------
X : array, shape (n_samples, n_features)
The data to transform.
Returns
-------
y_pred : array, shape (n_samples,)
The predicted targets.
"""
return self.model.transform(X)
def fit_transform(self, X, y):
"""Fit the data and transform it using the linear model.
Parameters
----------
X : array, shape (n_samples, n_features)
The training input samples to estimate the linear coefficients.
y : array, shape (n_samples,)
The target values.
Returns
-------
y_pred : array, shape (n_samples,)
The predicted targets.
"""
return self.fit(X, y).transform(X)
def predict(self, X):
"""Compute predictions of y from X.
Parameters
----------
X : array, shape (n_samples, n_features)
The data used to compute the predictions.
Returns
-------
y_pred : array, shape (n_samples,)
The predictions.
"""
return self.model.predict(X)
def predict_proba(self, X):
"""Compute probabilistic predictions of y from X.
Parameters
----------
X : array, shape (n_samples, n_features)
The data used to compute the predictions.
Returns
-------
y_pred : array, shape (n_samples, n_classes)
The probabilities.
"""
return self.model.predict_proba(X)
def decision_function(self, X):
"""Compute distance from the decision function of y from X.
Parameters
----------
X : array, shape (n_samples, n_features)
The data used to compute the predictions.
Returns
-------
y_pred : array, shape (n_samples, n_classes)
The distances.
"""
return self.model.decision_function(X)
def score(self, X, y):
"""Score the linear model computed on the given test data.
Parameters
----------
X : array, shape (n_samples, n_features)
The data to transform.
y : array, shape (n_samples,)
The target values.
Returns
-------
score : float
Score of the linear model.
"""
return self.model.score(X, y)
def _set_cv(cv, estimator=None, X=None, y=None):
"""Set the default CV depending on whether clf is classifier/regressor."""
# Detect whether classification or regression
if estimator in ['classifier', 'regressor']:
est_is_classifier = estimator == 'classifier'
else:
est_is_classifier = is_classifier(estimator)
# Setup CV
from sklearn import model_selection as models
from sklearn.model_selection import (check_cv, StratifiedKFold, KFold)
if isinstance(cv, (int, np.int64)):
XFold = StratifiedKFold if est_is_classifier else KFold
cv = XFold(n_splits=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
cv = cv()
cv = check_cv(cv=cv, y=y, classifier=est_is_classifier)
# Extract train and test set to retrieve them at predict time
if hasattr(cv, 'split'):
cv_splits = [(train, test) for train, test in
cv.split(X=np.zeros_like(y), y=y)]
else:
# XXX support sklearn.cross_validation cv
cv_splits = [(train, test) for train, test in cv]
if not np.all([len(train) for train, _ in cv_splits]):
raise ValueError('Some folds do not have any train epochs.')
return cv, cv_splits
def _check_estimator(estimator, get_params=True):
"""Check whether an object has the methods required by sklearn."""
valid_methods = ('predict', 'transform', 'predict_proba',
'decision_function')
if (
(not hasattr(estimator, 'fit')) or
(not any(hasattr(estimator, method) for method in valid_methods))
):
raise ValueError('estimator must be a scikit-learn transformer or '
'an estimator with the fit and a predict-like (e.g. '
'predict_proba) or a transform method.')
if get_params and not hasattr(estimator, 'get_params'):
raise ValueError('estimator must be a scikit-learn transformer or an '
'estimator with the get_params method that allows '
'cloning.')
def _get_inverse_funcs(estimator, terminal=True):
"""Retrieve the inverse functions of an pipeline or an estimator."""
inverse_func = [False]
if hasattr(estimator, 'steps'):
# if pipeline, retrieve all steps by nesting
inverse_func = list()
for _, est in estimator.steps:
inverse_func.extend(_get_inverse_funcs(est, terminal=False))
elif hasattr(estimator, 'inverse_transform'):
# if not pipeline attempt to retrieve inverse function
inverse_func = [estimator.inverse_transform]
# If terminal node, check that that the last estimator is a classifier,
# and remove it from the transformers.
if terminal:
last_is_estimator = inverse_func[-1] is False
all_invertible = not(False in inverse_func[:-1])
if last_is_estimator and all_invertible:
# keep all inverse transformation and remove last estimation
inverse_func = inverse_func[:-1]
else:
inverse_func = list()
return inverse_func
def get_coef(estimator, attr='filters_', inverse_transform=False):
"""Retrieve the coefficients of an estimator ending with a Linear Model.
This is typically useful to retrieve "spatial filters" or "spatial
patterns" of decoding models :footcite:`HaufeEtAl2014`.
Parameters
----------
estimator : object | None
An estimator from scikit-learn.
attr : str
The name of the coefficient attribute to retrieve, typically
``'filters_'`` (default) or ``'patterns_'``.
inverse_transform : bool
If True, returns the coefficients after inverse transforming them with
the transformer steps of the estimator.
Returns
-------
coef : array
The coefficients.
References
----------
.. footbibliography::
"""
# Get the coefficients of the last estimator in case of nested pipeline
est = estimator
while hasattr(est, 'steps'):
est = est.steps[-1][1]
squeeze_first_dim = False
# If SlidingEstimator, loop across estimators
if hasattr(est, 'estimators_'):
coef = list()
for this_est in est.estimators_:
coef.append(get_coef(this_est, attr, inverse_transform))
coef = np.transpose(coef)
coef = coef[np.newaxis] # fake a sample dimension
squeeze_first_dim = True
elif not hasattr(est, attr):
raise ValueError('This estimator does not have a %s attribute:\n%s'
% (attr, est))
else:
coef = getattr(est, attr)
if coef.ndim == 1:
coef = coef[np.newaxis]
squeeze_first_dim = True
# inverse pattern e.g. to get back physical units
if inverse_transform:
if not hasattr(estimator, 'steps') and not hasattr(est, 'estimators_'):
raise ValueError('inverse_transform can only be applied onto '
'pipeline estimators.')
# The inverse_transform parameter will call this method on any
# estimator contained in the pipeline, in reverse order.
for inverse_func in _get_inverse_funcs(estimator)[::-1]:
coef = inverse_func(coef)
if squeeze_first_dim:
coef = coef[0]
return coef
@fill_doc
def cross_val_multiscore(estimator, X, y=None, groups=None, scoring=None,
cv=None, n_jobs=1, verbose=0, fit_params=None,
pre_dispatch='2*n_jobs'):
"""Evaluate a score by cross-validation.
Parameters
----------
estimator : instance of sklearn.base.BaseEstimator
The object to use to fit the data.
Must implement the 'fit' method.
X : array-like, shape (n_samples, n_dimensional_features,)
The data to fit. Can be, for example a list, or an array at least 2d.
y : array-like, shape (n_samples, n_targets,)
The target variable to try to predict in the case of
supervised learning.
groups : array-like, with shape (n_samples,)
Group labels for the samples used while splitting the dataset into
train/test set.
scoring : str, callable | None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
Note that when using an estimator which inherently returns
multidimensional output - in particular, SlidingEstimator
or GeneralizingEstimator - you should set the scorer
there, not here.
cv : int, cross-validation generator | iterable
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross validation,
- integer, to specify the number of folds in a ``(Stratified)KFold``,
- An object to be used as a cross-validation generator.
- An iterable yielding train, test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass,
:class:`sklearn.model_selection.StratifiedKFold` is used. In all
other cases, :class:`sklearn.model_selection.KFold` is used.
%(n_jobs)s
verbose : int, optional
The verbosity level.
fit_params : dict, optional
Parameters to pass to the fit method of the estimator.
pre_dispatch : int, or str, optional
Controls the number of jobs that get dispatched during parallel
execution. Reducing this number can be useful to avoid an
explosion of memory consumption when more jobs get dispatched
than CPUs can process. This parameter can be:
- None, in which case all the jobs are immediately
created and spawned. Use this for lightweight and
fast-running jobs, to avoid delays due to on-demand
spawning of the jobs
- An int, giving the exact number of total jobs that are
spawned
- A string, giving an expression as a function of n_jobs,
as in '2*n_jobs'
Returns
-------
scores : array of float, shape (n_splits,) | shape (n_splits, n_scores)
Array of scores of the estimator for each run of the cross validation.
"""
# This code is copied from sklearn
from sklearn.base import clone
from sklearn.utils import indexable
from sklearn.model_selection._split import check_cv
check_scoring = _get_check_scoring()
X, y, groups = indexable(X, y, groups)
cv = check_cv(cv, y, classifier=is_classifier(estimator))
cv_iter = list(cv.split(X, y, groups))
scorer = check_scoring(estimator, scoring=scoring)
# We clone the estimator to make sure that all the folds are
# independent, and that it is pickle-able.
# Note: this parallelization is implemented using MNE Parallel
parallel, p_func, n_jobs = parallel_func(_fit_and_score, n_jobs,
pre_dispatch=pre_dispatch)
scores = parallel(p_func(clone(estimator), X, y, scorer, train, test,
0, None, fit_params)
for train, test in cv_iter)
return np.array(scores)[:, 0, ...] # flatten over joblib output.
def _fit_and_score(estimator, X, y, scorer, train, test, verbose,
parameters, fit_params, return_train_score=False,
return_parameters=False, return_n_test_samples=False,
return_times=False, error_score='raise'):
"""Fit estimator and compute scores for a given dataset split."""
# This code is adapted from sklearn
from ..fixes import _check_fit_params
from sklearn.utils.metaestimators import _safe_split
from sklearn.utils.validation import _num_samples
if verbose > 1:
if parameters is None:
msg = ''
else:
msg = '%s' % (', '.join('%s=%s' % (k, v)
for k, v in parameters.items()))
print("[CV] %s %s" % (msg, (64 - len(msg)) * '.'))
# Adjust length of sample weights
fit_params = fit_params if fit_params is not None else {}
fit_params = _check_fit_params(X, fit_params, train)
if parameters is not None:
estimator.set_params(**parameters)
start_time = time.time()
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
try:
if y_train is None:
estimator.fit(X_train, **fit_params)
else:
estimator.fit(X_train, y_train, **fit_params)
except Exception as e:
# Note fit time as time until error
fit_time = time.time() - start_time
score_time = 0.0
if error_score == 'raise':
raise
elif isinstance(error_score, numbers.Number):
test_score = error_score
if return_train_score:
train_score = error_score
warn("Classifier fit failed. The score on this train-test"
" partition for these parameters will be set to %f. "
"Details: \n%r" % (error_score, e))
else:
raise ValueError("error_score must be the string 'raise' or a"
" numeric value. (Hint: if using 'raise', please"
" make sure that it has been spelled correctly.)")
else:
fit_time = time.time() - start_time
test_score = _score(estimator, X_test, y_test, scorer)
score_time = time.time() - start_time - fit_time
if return_train_score:
train_score = _score(estimator, X_train, y_train, scorer)
if verbose > 2:
msg += ", score=%f" % test_score
if verbose > 1:
total_time = score_time + fit_time
end_msg = "%s, total=%s" % (msg, logger.short_format_time(total_time))
print("[CV] %s %s" % ((64 - len(end_msg)) * '.', end_msg))
ret = [train_score, test_score] if return_train_score else [test_score]
if return_n_test_samples:
ret.append(_num_samples(X_test))
if return_times:
ret.extend([fit_time, score_time])
if return_parameters:
ret.append(parameters)
return ret
def _score(estimator, X_test, y_test, scorer):
"""Compute the score of an estimator on a given test set.
This code is the same as sklearn.model_selection._validation._score
but accepts to output arrays instead of floats.
"""
if y_test is None:
score = scorer(estimator, X_test)
else:
score = scorer(estimator, X_test, y_test)
if hasattr(score, 'item'):
try:
# e.g. unwrap memmapped scalars
score = score.item()
except ValueError:
# non-scalar?
pass
return score
|
from homeassistant import config_entries
from homeassistant.components.volumio.config_flow import CannotConnectError
from homeassistant.components.volumio.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
TEST_SYSTEM_INFO = {"id": "1111-1111-1111-1111", "name": "TestVolumio"}
TEST_CONNECTION = {
"host": "1.1.1.1",
"port": 3000,
}
TEST_DISCOVERY = {
"host": "1.1.1.1",
"port": 3000,
"properties": {"volumioName": "discovered", "UUID": "2222-2222-2222-2222"},
}
TEST_DISCOVERY_RESULT = {
"host": TEST_DISCOVERY["host"],
"port": TEST_DISCOVERY["port"],
"id": TEST_DISCOVERY["properties"]["UUID"],
"name": TEST_DISCOVERY["properties"]["volumioName"],
}
async def test_form(hass):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "TestVolumio"
assert result2["data"] == {**TEST_SYSTEM_INFO, **TEST_CONNECTION}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_updates_unique_id(hass):
"""Test a duplicate id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_SYSTEM_INFO["id"],
data={
"host": "dummy",
"port": 11,
"name": "dummy",
"id": TEST_SYSTEM_INFO["id"],
},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch("homeassistant.components.volumio.async_setup", return_value=True), patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
assert entry.data == {**TEST_SYSTEM_INFO, **TEST_CONNECTION}
async def test_empty_system_info(hass):
"""Test old volumio versions with empty system info."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value={},
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_CONNECTION["host"]
assert result2["data"] == {
"host": TEST_CONNECTION["host"],
"port": TEST_CONNECTION["port"],
"name": TEST_CONNECTION["host"],
"id": None,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_exception(hass):
"""Test we handle generic error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_discovery(hass):
"""Test discovery flow works."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_DISCOVERY_RESULT["name"]
assert result2["data"] == TEST_DISCOVERY_RESULT
assert result2["result"]
assert result2["result"].unique_id == TEST_DISCOVERY_RESULT["id"]
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_discovery_cannot_connect(hass):
"""Test discovery aborts if cannot connect."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
assert result2["type"] == "abort"
assert result2["reason"] == "cannot_connect"
async def test_discovery_duplicate_data(hass):
"""Test discovery aborts if same mDNS packet arrives."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "discovery_confirm"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_discovery_updates_unique_id(hass):
"""Test a duplicate discovery id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DISCOVERY_RESULT["id"],
data={
"host": "dummy",
"port": 11,
"name": "dummy",
"id": TEST_DISCOVERY_RESULT["id"],
},
state=config_entries.ENTRY_STATE_SETUP_RETRY,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data == TEST_DISCOVERY_RESULT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
from pyramid.response import Response
from pyramid.view import view_config
from paasta_tools.api import settings
from paasta_tools.api.views.exception import ApiFailure
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.kubernetes_tools import KubernetesDeploymentConfig
from paasta_tools.marathon_tools import MarathonServiceConfig
@view_config(route_name="service.autoscaler.get", request_method="GET", renderer="json")
def get_autoscaler_count(request):
service = request.swagger_data.get("service")
instance = request.swagger_data.get("instance")
cluster = settings.cluster
soa_dir = settings.soa_dir
instance_config = get_instance_config(service, instance, cluster, soa_dir)
if not isinstance(
instance_config, (KubernetesDeploymentConfig, MarathonServiceConfig)
):
error_message = (
f"Autoscaling is not supported for {service}.{instance} because instance type is not "
f"marathon or kubernetes."
)
raise ApiFailure(error_message, 501)
response_body = {
"desired_instances": instance_config.get_instances(),
"calculated_instances": instance_config.get_instances(with_limit=False),
}
return Response(json_body=response_body, status_code=200)
@view_config(
route_name="service.autoscaler.post", request_method="POST", renderer="json"
)
def update_autoscaler_count(request):
service = request.swagger_data.get("service")
instance = request.swagger_data.get("instance")
cluster = settings.cluster
soa_dir = settings.soa_dir
desired_instances = request.swagger_data.get("json_body")["desired_instances"]
if not isinstance(desired_instances, int):
error_message = 'The provided body does not have an integer value for "desired_instances": {}'.format(
request.swagger_data.get("json_body")
)
raise ApiFailure(error_message, 500)
instance_config = get_instance_config(service, instance, cluster, soa_dir, True)
if not isinstance(
instance_config, (KubernetesDeploymentConfig, MarathonServiceConfig)
):
error_message = (
f"Autoscaling is not supported for {service}.{instance} because instance type is not "
f"marathon or kubernetes."
)
raise ApiFailure(error_message, 501)
max_instances = instance_config.get_max_instances()
if max_instances is None:
error_message = f"Autoscaling is not enabled for {service}.{instance}"
raise ApiFailure(error_message, 404)
min_instances = instance_config.get_min_instances()
status = "SUCCESS"
if desired_instances > max_instances:
desired_instances = max_instances
status = (
"WARNING desired_instances is greater than max_instances %d" % max_instances
)
elif desired_instances < min_instances:
desired_instances = min_instances
status = (
"WARNING desired_instances is less than min_instances %d" % min_instances
)
try:
if isinstance(instance_config, KubernetesDeploymentConfig):
instance_config.set_autoscaled_instances(
instance_count=desired_instances, kube_client=settings.kubernetes_client
)
else:
instance_config.set_autoscaled_instances(instance_count=desired_instances)
except Exception as err:
raise ApiFailure(err, 500)
response_body = {"desired_instances": desired_instances, "status": status}
return Response(json_body=response_body, status_code=202)
|
from __future__ import division
from collections import defaultdict
from datetime import timedelta
import glob
import json
import logging
import os
import re
from appdirs import AppDirs
from babelfish import Error as BabelfishError, Language
import click
from dogpile.cache.backends.file import AbstractFileLock
from dogpile.util.readwrite_lock import ReadWriteMutex
from six.moves import configparser
from subliminal import (AsyncProviderPool, Episode, Movie, Video, __version__, check_video, compute_score, get_scores,
provider_manager, refine, refiner_manager, region, save_subtitles, scan_video, scan_videos)
from subliminal.core import ARCHIVE_EXTENSIONS, search_external_subtitles
logger = logging.getLogger(__name__)
class MutexLock(AbstractFileLock):
""":class:`MutexLock` is a thread-based rw lock based on :class:`dogpile.core.ReadWriteMutex`."""
def __init__(self, filename):
self.mutex = ReadWriteMutex()
def acquire_read_lock(self, wait):
ret = self.mutex.acquire_read_lock(wait)
return wait or ret
def acquire_write_lock(self, wait):
ret = self.mutex.acquire_write_lock(wait)
return wait or ret
def release_read_lock(self):
return self.mutex.release_read_lock()
def release_write_lock(self):
return self.mutex.release_write_lock()
class Config(object):
"""A :class:`~configparser.ConfigParser` wrapper to store configuration.
Interaction with the configuration is done with the properties.
:param str path: path to the configuration file.
"""
def __init__(self, path):
#: Path to the configuration file
self.path = path
#: The underlying configuration object
self.config = configparser.SafeConfigParser()
self.config.add_section('general')
self.config.set('general', 'languages', json.dumps(['en']))
self.config.set('general', 'providers', json.dumps(sorted([p.name for p in provider_manager])))
self.config.set('general', 'refiners', json.dumps(sorted([r.name for r in refiner_manager])))
self.config.set('general', 'single', str(0))
self.config.set('general', 'embedded_subtitles', str(1))
self.config.set('general', 'age', str(int(timedelta(weeks=2).total_seconds())))
self.config.set('general', 'hearing_impaired', str(1))
self.config.set('general', 'min_score', str(0))
def read(self):
"""Read the configuration from :attr:`path`"""
self.config.read(self.path)
def write(self):
"""Write the configuration to :attr:`path`"""
with open(self.path, 'w') as f:
self.config.write(f)
@property
def languages(self):
return {Language.fromietf(l) for l in json.loads(self.config.get('general', 'languages'))}
@languages.setter
def languages(self, value):
self.config.set('general', 'languages', json.dumps(sorted([str(l) for l in value])))
@property
def providers(self):
return json.loads(self.config.get('general', 'providers'))
@providers.setter
def providers(self, value):
self.config.set('general', 'providers', json.dumps(sorted([p.lower() for p in value])))
@property
def refiners(self):
return json.loads(self.config.get('general', 'refiners'))
@refiners.setter
def refiners(self, value):
self.config.set('general', 'refiners', json.dumps([r.lower() for r in value]))
@property
def single(self):
return self.config.getboolean('general', 'single')
@single.setter
def single(self, value):
self.config.set('general', 'single', str(int(value)))
@property
def embedded_subtitles(self):
return self.config.getboolean('general', 'embedded_subtitles')
@embedded_subtitles.setter
def embedded_subtitles(self, value):
self.config.set('general', 'embedded_subtitles', str(int(value)))
@property
def age(self):
return timedelta(seconds=self.config.getint('general', 'age'))
@age.setter
def age(self, value):
self.config.set('general', 'age', str(int(value.total_seconds())))
@property
def hearing_impaired(self):
return self.config.getboolean('general', 'hearing_impaired')
@hearing_impaired.setter
def hearing_impaired(self, value):
self.config.set('general', 'hearing_impaired', str(int(value)))
@property
def min_score(self):
return self.config.getfloat('general', 'min_score')
@min_score.setter
def min_score(self, value):
self.config.set('general', 'min_score', str(value))
@property
def provider_configs(self):
rv = {}
for provider in provider_manager:
if self.config.has_section(provider.name):
rv[provider.name] = {k: v for k, v in self.config.items(provider.name)}
return rv
@provider_configs.setter
def provider_configs(self, value):
# loop over provider configurations
for provider, config in value.items():
# create the corresponding section if necessary
if not self.config.has_section(provider):
self.config.add_section(provider)
# add config options
for k, v in config.items():
self.config.set(provider, k, v)
@property
def refiner_configs(self):
rv = {}
for refiner in refiner_manager:
if self.config.has_section(refiner.name):
rv[refiner.name] = {k: v for k, v in self.config.items(refiner.name)}
return rv
@refiner_configs.setter
def refiner_configs(self, value):
# loop over refiner configurations
for refiner, config in value.items():
# create the corresponding section if necessary
if not self.config.has_section(refiner):
self.config.add_section(refiner)
# add config options
for k, v in config.items():
self.config.set(refiner, k, v)
class LanguageParamType(click.ParamType):
""":class:`~click.ParamType` for languages that returns a :class:`~babelfish.language.Language`"""
name = 'language'
def convert(self, value, param, ctx):
try:
return Language.fromietf(value)
except BabelfishError:
self.fail('%s is not a valid language' % value)
LANGUAGE = LanguageParamType()
class AgeParamType(click.ParamType):
""":class:`~click.ParamType` for age strings that returns a :class:`~datetime.timedelta`
An age string is in the form `number + identifier` with possible identifiers:
* ``w`` for weeks
* ``d`` for days
* ``h`` for hours
The form can be specified multiple times but only with that idenfier ordering. For example:
* ``1w2d4h`` for 1 week, 2 days and 4 hours
* ``2w`` for 2 weeks
* ``3w6h`` for 3 weeks and 6 hours
"""
name = 'age'
def convert(self, value, param, ctx):
match = re.match(r'^(?:(?P<weeks>\d+?)w)?(?:(?P<days>\d+?)d)?(?:(?P<hours>\d+?)h)?$', value)
if not match:
self.fail('%s is not a valid age' % value)
return timedelta(**{k: int(v) for k, v in match.groupdict(0).items()})
AGE = AgeParamType()
PROVIDER = click.Choice(sorted(provider_manager.names()))
REFINER = click.Choice(sorted(refiner_manager.names()))
dirs = AppDirs('subliminal')
cache_file = 'subliminal.dbm'
config_file = 'config.ini'
@click.group(context_settings={'max_content_width': 100}, epilog='Suggestions and bug reports are greatly appreciated: '
'https://github.com/Diaoul/subliminal/')
@click.option('--addic7ed', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='Addic7ed configuration.')
@click.option('--legendastv', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD', help='LegendasTV configuration.')
@click.option('--opensubtitles', type=click.STRING, nargs=2, metavar='USERNAME PASSWORD',
help='OpenSubtitles configuration.')
@click.option('--omdb', type=click.STRING, nargs=1, metavar='APIKEY', help='OMDB API key.')
@click.option('--cache-dir', type=click.Path(writable=True, file_okay=False), default=dirs.user_cache_dir,
show_default=True, expose_value=True, help='Path to the cache directory.')
@click.option('--debug', is_flag=True, help='Print useful information for debugging subliminal and for reporting bugs.')
@click.version_option(__version__)
@click.pass_context
def subliminal(ctx, addic7ed, legendastv, opensubtitles, omdb, cache_dir, debug):
"""Subtitles, faster than your thoughts."""
# create cache directory
try:
os.makedirs(cache_dir)
except OSError:
if not os.path.isdir(cache_dir):
raise
# configure cache
region.configure('dogpile.cache.dbm', expiration_time=timedelta(days=30),
arguments={'filename': os.path.join(cache_dir, cache_file), 'lock_factory': MutexLock})
# configure logging
if debug:
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
logging.getLogger('subliminal').addHandler(handler)
logging.getLogger('subliminal').setLevel(logging.DEBUG)
ctx.obj = {
'provider_configs': {},
'refiner_configs': {}
}
# provider configs
if addic7ed:
ctx.obj['provider_configs']['addic7ed'] = {'username': addic7ed[0], 'password': addic7ed[1]}
if legendastv:
ctx.obj['provider_configs']['legendastv'] = {'username': legendastv[0], 'password': legendastv[1]}
if opensubtitles:
ctx.obj['provider_configs']['opensubtitles'] = {'username': opensubtitles[0], 'password': opensubtitles[1]}
ctx.obj['provider_configs']['opensubtitlesvip'] = {'username': opensubtitles[0], 'password': opensubtitles[1]}
# refiner configs
if omdb:
ctx.obj['refiner_configs']['omdb'] = {'apikey': omdb}
@subliminal.command()
@click.option('--clear-subliminal', is_flag=True, help='Clear subliminal\'s cache. Use this ONLY if your cache is '
'corrupted or if you experience issues.')
@click.pass_context
def cache(ctx, clear_subliminal):
"""Cache management."""
if clear_subliminal:
for file in glob.glob(os.path.join(ctx.parent.params['cache_dir'], cache_file) + '*'):
os.remove(file)
click.echo('Subliminal\'s cache cleared.')
else:
click.echo('Nothing done.')
@subliminal.command()
@click.option('-l', '--language', type=LANGUAGE, required=True, multiple=True, help='Language as IETF code, '
'e.g. en, pt-BR (can be used multiple times).')
@click.option('-p', '--provider', type=PROVIDER, multiple=True, help='Provider to use (can be used multiple times).')
@click.option('-r', '--refiner', type=REFINER, multiple=True, help='Refiner to use (can be used multiple times).')
@click.option('-a', '--age', type=AGE, help='Filter videos newer than AGE, e.g. 12h, 1w2d.')
@click.option('-d', '--directory', type=click.STRING, metavar='DIR', help='Directory where to save subtitles, '
'default is next to the video file.')
@click.option('-e', '--encoding', type=click.STRING, metavar='ENC', help='Subtitle file encoding, default is to '
'preserve original encoding.')
@click.option('-s', '--single', is_flag=True, default=False, help='Save subtitle without language code in the file '
'name, i.e. use .srt extension. Do not use this unless your media player requires it.')
@click.option('-f', '--force', is_flag=True, default=False, help='Force download even if a subtitle already exist.')
@click.option('-hi', '--hearing-impaired', is_flag=True, default=False, help='Prefer hearing impaired subtitles.')
@click.option('-m', '--min-score', type=click.IntRange(0, 100), default=0, help='Minimum score for a subtitle '
'to be downloaded (0 to 100).')
@click.option('-w', '--max-workers', type=click.IntRange(1, 50), default=None, help='Maximum number of threads to use.')
@click.option('-z/-Z', '--archives/--no-archives', default=True, show_default=True, help='Scan archives for videos '
'(supported extensions: %s).' % ', '.join(ARCHIVE_EXTENSIONS))
@click.option('-v', '--verbose', count=True, help='Increase verbosity.')
@click.argument('path', type=click.Path(), required=True, nargs=-1)
@click.pass_obj
def download(obj, provider, refiner, language, age, directory, encoding, single, force, hearing_impaired, min_score,
max_workers, archives, verbose, path):
"""Download best subtitles.
PATH can be an directory containing videos, a video file path or a video file name. It can be used multiple times.
If an existing subtitle is detected (external or embedded) in the correct language, the download is skipped for
the associated video.
"""
# process parameters
language = set(language)
# scan videos
videos = []
ignored_videos = []
errored_paths = []
with click.progressbar(path, label='Collecting videos', item_show_func=lambda p: p or '') as bar:
for p in bar:
logger.debug('Collecting path %s', p)
# non-existing
if not os.path.exists(p):
try:
video = Video.fromname(p)
except:
logger.exception('Unexpected error while collecting non-existing path %s', p)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(search_external_subtitles(video.name, directory=directory).values())
if check_video(video, languages=language, age=age, undefined=single):
refine(video, episode_refiners=refiner, movie_refiners=refiner,
refiner_configs=obj['refiner_configs'],
embedded_subtitles=not force, providers=provider, languages=language)
videos.append(video)
continue
# directories
if os.path.isdir(p):
try:
scanned_videos = scan_videos(p, age=age, archives=archives)
except:
logger.exception('Unexpected error while collecting directory path %s', p)
errored_paths.append(p)
continue
for video in scanned_videos:
if not force:
video.subtitle_languages |= set(search_external_subtitles(video.name,
directory=directory).values())
if check_video(video, languages=language, age=age, undefined=single):
refine(video, episode_refiners=refiner, movie_refiners=refiner,
refiner_configs=obj['refiner_configs'], embedded_subtitles=not force,
providers=provider, languages=language)
videos.append(video)
else:
ignored_videos.append(video)
continue
# other inputs
try:
video = scan_video(p)
except:
logger.exception('Unexpected error while collecting path %s', p)
errored_paths.append(p)
continue
if not force:
video.subtitle_languages |= set(search_external_subtitles(video.name, directory=directory).values())
if check_video(video, languages=language, age=age, undefined=single):
refine(video, episode_refiners=refiner, movie_refiners=refiner,
refiner_configs=obj['refiner_configs'], embedded_subtitles=not force,
providers=provider, languages=language)
videos.append(video)
else:
ignored_videos.append(video)
# output errored paths
if verbose > 0:
for p in errored_paths:
click.secho('%s errored' % p, fg='red')
# output ignored videos
if verbose > 1:
for video in ignored_videos:
click.secho('%s ignored - subtitles: %s / age: %d day%s' % (
os.path.split(video.name)[1],
', '.join(str(s) for s in video.subtitle_languages) or 'none',
video.age.days,
's' if video.age.days > 1 else ''
), fg='yellow')
# report collected videos
click.echo('%s video%s collected / %s video%s ignored / %s error%s' % (
click.style(str(len(videos)), bold=True, fg='green' if videos else None),
's' if len(videos) > 1 else '',
click.style(str(len(ignored_videos)), bold=True, fg='yellow' if ignored_videos else None),
's' if len(ignored_videos) > 1 else '',
click.style(str(len(errored_paths)), bold=True, fg='red' if errored_paths else None),
's' if len(errored_paths) > 1 else '',
))
# exit if no video collected
if not videos:
return
# download best subtitles
downloaded_subtitles = defaultdict(list)
with AsyncProviderPool(max_workers=max_workers, providers=provider, provider_configs=obj['provider_configs']) as p:
with click.progressbar(videos, label='Downloading subtitles',
item_show_func=lambda v: os.path.split(v.name)[1] if v is not None else '') as bar:
for v in bar:
scores = get_scores(v)
subtitles = p.download_best_subtitles(p.list_subtitles(v, language - v.subtitle_languages),
v, language, min_score=scores['hash'] * min_score / 100,
hearing_impaired=hearing_impaired, only_one=single)
downloaded_subtitles[v] = subtitles
if p.discarded_providers:
click.secho('Some providers have been discarded due to unexpected errors: %s' %
', '.join(p.discarded_providers), fg='yellow')
# save subtitles
total_subtitles = 0
for v, subtitles in downloaded_subtitles.items():
saved_subtitles = save_subtitles(v, subtitles, single=single, directory=directory, encoding=encoding)
total_subtitles += len(saved_subtitles)
if verbose > 0:
click.echo('%s subtitle%s downloaded for %s' % (click.style(str(len(saved_subtitles)), bold=True),
's' if len(saved_subtitles) > 1 else '',
os.path.split(v.name)[1]))
if verbose > 1:
for s in saved_subtitles:
matches = s.get_matches(v)
score = compute_score(s, v)
# score color
score_color = None
scores = get_scores(v)
if isinstance(v, Movie):
if score < scores['title']:
score_color = 'red'
elif score < scores['title'] + scores['year'] + scores['release_group']:
score_color = 'yellow'
else:
score_color = 'green'
elif isinstance(v, Episode):
if score < scores['series'] + scores['season'] + scores['episode']:
score_color = 'red'
elif score < scores['series'] + scores['season'] + scores['episode'] + scores['release_group']:
score_color = 'yellow'
else:
score_color = 'green'
# scale score from 0 to 100 taking out preferences
scaled_score = score
if s.hearing_impaired == hearing_impaired:
scaled_score -= scores['hearing_impaired']
scaled_score *= 100 / scores['hash']
# echo some nice colored output
click.echo(' - [{score}] {language} subtitle from {provider_name} (match on {matches})'.format(
score=click.style('{:5.1f}'.format(scaled_score), fg=score_color, bold=score >= scores['hash']),
language=s.language.name if s.language.country is None else '%s (%s)' % (s.language.name,
s.language.country.name),
provider_name=s.provider_name,
matches=', '.join(sorted(matches, key=scores.get, reverse=True))
))
if verbose == 0:
click.echo('Downloaded %s subtitle%s' % (click.style(str(total_subtitles), bold=True),
's' if total_subtitles > 1 else ''))
|
import unittest
import numpy as np
import pandas as pd
from pgmpy.models.BayesianModel import BayesianModel
from pgmpy.inference.CausalInference import CausalInference
class TestCausalGraphMethods(unittest.TestCase):
def setUp(self):
self.game = BayesianModel(
[("A", "X"), ("A", "B"), ("C", "B"), ("C", "Y"), ("X", "Y"), ("B", "X")]
)
self.inference = CausalInference(self.game)
def test_is_d_separated(self):
self.assertFalse(self.inference._is_d_separated("X", "Y", Z=None))
self.assertTrue(self.inference._is_d_separated("B", "Y", Z=("C", "X")))
def test_backdoor_validation(self):
self.inference.is_valid_backdoor_adjustment_set("X", "Y", Z="C")
class TestBackdoorPaths(unittest.TestCase):
"""
These tests are drawn from games presented in The Book of Why by Judea Pearl. See the Jupyter Notebook called
Causal Games in the examples folder for further explanation about each of these.
"""
def test_game1(self):
game1 = BayesianModel([("X", "A"), ("A", "Y"), ("A", "B")])
inference = CausalInference(game1)
self.assertTrue(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(deconfounders, frozenset())
def test_game2(self):
game2 = BayesianModel(
[
("X", "E"),
("E", "Y"),
("A", "B"),
("A", "X"),
("B", "C"),
("D", "B"),
("D", "E"),
]
)
inference = CausalInference(game2)
self.assertTrue(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(deconfounders, frozenset())
def test_game3(self):
game3 = BayesianModel(
[("X", "Y"), ("X", "A"), ("B", "A"), ("B", "Y"), ("B", "X")]
)
inference = CausalInference(game3)
self.assertFalse(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(deconfounders, frozenset({frozenset({"B"})}))
def test_game4(self):
game4 = BayesianModel([("A", "X"), ("A", "B"), ("C", "B"), ("C", "Y")])
inference = CausalInference(game4)
self.assertTrue(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(deconfounders, frozenset())
def test_game5(self):
game5 = BayesianModel(
[("A", "X"), ("A", "B"), ("C", "B"), ("C", "Y"), ("X", "Y"), ("B", "X")]
)
inference = CausalInference(game5)
self.assertFalse(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(
deconfounders, frozenset({frozenset({"C"}), frozenset({"A", "B"})})
)
def test_game6(self):
game6 = BayesianModel(
[
("X", "F"),
("C", "X"),
("A", "C"),
("A", "D"),
("B", "D"),
("B", "E"),
("D", "X"),
("D", "Y"),
("E", "Y"),
("F", "Y"),
]
)
inference = CausalInference(game6)
self.assertFalse(inference.is_valid_backdoor_adjustment_set("X", "Y"))
deconfounders = inference.get_all_backdoor_adjustment_sets("X", "Y")
self.assertEqual(
deconfounders,
frozenset(
{
frozenset({"C", "D"}),
frozenset({"A", "D"}),
frozenset({"D", "E"}),
frozenset({"B", "D"}),
}
),
)
class TestEstimator(unittest.TestCase):
def test_create_estimator(self):
game1 = BayesianModel([("X", "A"), ("A", "Y"), ("A", "B")])
data = pd.DataFrame(
np.random.randint(2, size=(1000, 4)), columns=["X", "A", "B", "Y"]
)
inference = CausalInference(model=game1)
ate = inference.estimate_ate("X", "Y", data=data, estimator_type="linear")
self.assertAlmostEqual(ate, 0, places=0)
|
import logging
import pytest
from homeassistant.components.input_boolean import CONF_INITIAL, DOMAIN, is_on
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_NAME,
SERVICE_RELOAD,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Context, CoreState, State
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import mock_component, mock_restore_cache
_LOGGER = logging.getLogger(__name__)
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": [{"id": "from_storage", "name": "from storage"}]},
}
else:
hass_storage[DOMAIN] = items
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
async def test_config(hass):
"""Test config."""
invalid_configs = [None, 1, {}, {"name with space": None}]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_methods(hass):
"""Test is_on, turn_on, turn_off methods."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {"test_1": None}})
entity_id = "input_boolean.test_1"
assert not is_on(hass, entity_id)
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id})
await hass.async_block_till_done()
assert is_on(hass, entity_id)
await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}
)
await hass.async_block_till_done()
assert not is_on(hass, entity_id)
await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id})
await hass.async_block_till_done()
assert is_on(hass, entity_id)
async def test_config_options(hass):
"""Test configuration options."""
count_start = len(hass.states.async_entity_ids())
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_1": None,
"test_2": {"name": "Hello World", "icon": "mdi:work", "initial": True},
}
},
)
_LOGGER.debug("ENTITIES: %s", hass.states.async_entity_ids())
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_boolean.test_1")
state_2 = hass.states.get("input_boolean.test_2")
assert state_1 is not None
assert state_2 is not None
assert STATE_OFF == state_1.state
assert ATTR_ICON not in state_1.attributes
assert ATTR_FRIENDLY_NAME not in state_1.attributes
assert STATE_ON == state_2.state
assert "Hello World" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work" == state_2.attributes.get(ATTR_ICON)
async def test_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(
State("input_boolean.b1", "on"),
State("input_boolean.b2", "off"),
State("input_boolean.b3", "on"),
),
)
hass.state = CoreState.starting
mock_component(hass, "recorder")
await async_setup_component(hass, DOMAIN, {DOMAIN: {"b1": None, "b2": None}})
state = hass.states.get("input_boolean.b1")
assert state
assert state.state == "on"
state = hass.states.get("input_boolean.b2")
assert state
assert state.state == "off"
async def test_initial_state_overrules_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass, (State("input_boolean.b1", "on"), State("input_boolean.b2", "off"))
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {"b1": {CONF_INITIAL: False}, "b2": {CONF_INITIAL: True}}},
)
state = hass.states.get("input_boolean.b1")
assert state
assert state.state == "off"
state = hass.states.get("input_boolean.b2")
assert state
assert state.state == "on"
async def test_input_boolean_context(hass, hass_admin_user):
"""Test that input_boolean context works."""
assert await async_setup_component(
hass, "input_boolean", {"input_boolean": {"ac": {CONF_INITIAL: True}}}
)
state = hass.states.get("input_boolean.ac")
assert state is not None
await hass.services.async_call(
"input_boolean",
"turn_off",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("input_boolean.ac")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_reload(hass, hass_admin_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
ent_reg = await entity_registry.async_get_registry(hass)
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_1": None,
"test_2": {"name": "Hello World", "icon": "mdi:work", "initial": True},
}
},
)
_LOGGER.debug("ENTITIES: %s", hass.states.async_entity_ids())
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_boolean.test_1")
state_2 = hass.states.get("input_boolean.test_2")
state_3 = hass.states.get("input_boolean.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is None
assert STATE_ON == state_2.state
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is None
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: {
"test_2": {
"name": "Hello World reloaded",
"icon": "mdi:work_reloaded",
"initial": False,
},
"test_3": None,
}
},
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_boolean.test_1")
state_2 = hass.states.get("input_boolean.test_2")
state_3 = hass.states.get("input_boolean.test_3")
assert state_1 is None
assert state_2 is not None
assert state_3 is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is not None
assert STATE_ON == state_2.state # reload is not supposed to change entity state
assert "Hello World reloaded" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work_reloaded" == state_2.attributes.get(ATTR_ICON)
async def test_load_person_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == STATE_OFF
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(config={DOMAIN: {"from_yaml": None}})
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == STATE_OFF
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_EDITABLE)
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(config={DOMAIN: {"from_yaml": None}})
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_setup_no_config(hass, hass_admin_user):
"""Test component setup with no config."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(hass, DOMAIN, {})
with patch(
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start == len(hass.states.async_entity_ids())
|
import chainer
import numpy as np
import PIL
try:
import cv2
_cv2_available = True
except ImportError:
_cv2_available = False
def _resize_cv2(img, size, interpolation):
img = img.transpose((1, 2, 0))
if interpolation == PIL.Image.NEAREST:
cv_interpolation = cv2.INTER_NEAREST
elif interpolation == PIL.Image.BILINEAR:
cv_interpolation = cv2.INTER_LINEAR
elif interpolation == PIL.Image.BICUBIC:
cv_interpolation = cv2.INTER_CUBIC
elif interpolation == PIL.Image.LANCZOS:
cv_interpolation = cv2.INTER_LANCZOS4
H, W = size
img = cv2.resize(img, dsize=(W, H), interpolation=cv_interpolation)
# If input is a grayscale image, cv2 returns a two-dimentional array.
if len(img.shape) == 2:
img = img[:, :, np.newaxis]
return img.transpose((2, 0, 1))
def _resize_pil(img, size, interpolation):
C = img.shape[0]
H, W = size
out = np.empty((C, H, W), dtype=img.dtype)
for ch, out_ch in zip(img, out):
ch = PIL.Image.fromarray(ch, mode='F')
out_ch[:] = ch.resize((W, H), resample=interpolation)
return out
def resize(img, size, interpolation=PIL.Image.BILINEAR):
"""Resize image to match the given shape.
The backend used by :func:`resize` is configured by
:obj:`chainer.global_config.cv_resize_backend`.
Two backends are supported: "cv2" and "PIL".
If this is :obj:`None`, "cv2" is used whenever "cv2" is installed,
and "PIL" is used when "cv2" is not installed.
Args:
img (~numpy.ndarray): An array to be transformed.
This is in CHW format and the type should be :obj:`numpy.float32`.
size (tuple): This is a tuple of length 2. Its elements are
ordered as (height, width).
interpolation (int): Determines sampling strategy. This is one of
:obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BILINEAR`,
:obj:`PIL.Image.BICUBIC`, :obj:`PIL.Image.LANCZOS`.
Bilinear interpolation is the default strategy.
Returns:
~numpy.ndarray: A resize array in CHW format.
"""
if len(img) == 0:
assert len(size) == 2
return np.empty((0,) + size, dtype=img.dtype)
if chainer.config.cv_resize_backend is None:
if _cv2_available:
return _resize_cv2(img, size, interpolation)
else:
return _resize_pil(img, size, interpolation)
elif chainer.config.cv_resize_backend == 'cv2':
if not _cv2_available:
raise ValueError('cv2 is not installed even though '
'chainer.config.cv_resize_backend == \'cv2\'')
return _resize_cv2(img, size, interpolation)
elif chainer.config.cv_resize_backend == 'PIL':
return _resize_pil(img, size, interpolation)
else:
raise ValueError('chainer.config.cv_resize_backend must be one of:'
'[\'cv2\', \'PIL\']')
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from http import HttpCollector
##########################################################################
class TestHttpCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('HttpCollector', {
'req_vhost': 'www.my_server.com',
'req_url': ['http://www.my_server.com/']
})
self.collector = HttpCollector(config, None)
def test_import(self):
self.assertTrue(HttpCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('index')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'http__www_my_server_com_.size': 150,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany([publish_mock,
], metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import json
from homeassistant.components.airly.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
async def init_integration(hass, forecast=False) -> MockConfigEntry:
"""Set up the Airly integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
unique_id="55.55-122.12",
data={
"api_key": "foo",
"latitude": 55.55,
"longitude": 122.12,
"name": "Home",
},
)
with patch(
"airly._private._RequestsHandler.get",
return_value=json.loads(load_fixture("airly_valid_station.json")),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import json
import uuid
from absl import flags
from perfkitbenchmarker import placement_group
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers import aws
from perfkitbenchmarker.providers.aws import util
FLAGS = flags.FLAGS
class AwsPlacementGroupSpec(placement_group.BasePlacementGroupSpec):
"""Object containing the information needed to create an AwsPlacementGroup.
Attributes:
zone: The AWS zone the Placement Group is in.
"""
CLOUD = aws.CLOUD
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword
arguments to construct in order to decode the named option.
"""
result = super(AwsPlacementGroupSpec, cls)._GetOptionDecoderConstructions()
result.update({
'placement_group_style': (option_decoders.EnumDecoder, {
'valid_values': placement_group.PLACEMENT_GROUP_OPTIONS,
'default': placement_group.PLACEMENT_GROUP_CLUSTER,
})
})
return result
class AwsPlacementGroup(placement_group.BasePlacementGroup):
"""Object representing an AWS Placement Group."""
CLOUD = aws.CLOUD
def __init__(self, aws_placement_group_spec):
"""Init method for AwsPlacementGroup.
Args:
aws_placement_group_spec: Object containing the
information needed to create an AwsPlacementGroup.
"""
super(AwsPlacementGroup, self).__init__(aws_placement_group_spec)
self.name = (
'perfkit-%s-%s' % (FLAGS.run_uri, str(uuid.uuid4())[-12:]))
self.region = util.GetRegionFromZone(self.zone)
self.strategy = aws_placement_group_spec.placement_group_style
def _Create(self):
"""Creates the Placement Group."""
formatted_tags = util.FormatTagSpecifications('placement-group',
util.MakeDefaultTags())
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-placement-group',
'--region=%s' % self.region,
'--group-name=%s' % self.name,
'--strategy=%s' % self.strategy,
'--tag-specifications=%s' % formatted_tags
]
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Deletes the Placement Group."""
delete_cmd = util.AWS_PREFIX + [
'ec2',
'delete-placement-group',
'--region=%s' % self.region,
'--group-name=%s' % self.name]
# Failed deletes are ignorable (probably already deleted).
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def _Exists(self):
"""Returns true if the Placement Group exists."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-placement-groups',
'--region=%s' % self.region,
'--filter=Name=group-name,Values=%s' % self.name]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
placement_groups = response['PlacementGroups']
assert len(placement_groups) < 2, 'Too many placement groups.'
return bool(placement_groups)
|
from test import unittest
from test import run_only
from mock import patch
import configobj
from diamond.handler.stats_d import StatsdHandler
from diamond.metric import Metric
def run_only_if_statsd_is_available(func):
try:
import statsd
except ImportError:
statsd = None
pred = lambda: statsd is not None
return run_only(func, pred)
class TestStatsdHandler(unittest.TestCase):
@run_only_if_statsd_is_available
@patch('statsd.StatsClient')
def test_single_gauge(self, mock_client):
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric = Metric('servers.com.example.www.cpu.total.idle',
123, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='GAUGE')
expected_data = ('servers.com.example.www.cpu.total.idle', 123)
handler = StatsdHandler(config)
handler.process(metric)
handler.connection.gauge.assert_called_with(*expected_data)
handler.connection.send.assert_called_with()
@run_only_if_statsd_is_available
@patch('statsd.StatsClient')
def test_single_counter(self, mock_client):
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric = Metric('servers.com.example.www.cpu.total.idle',
5, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
expected_data = ('servers.com.example.www.cpu.total.idle', 123)
handler = StatsdHandler(config)
handler.process(metric)
handler.connection.incr.assert_called_with(*expected_data)
handler.connection.send.assert_called_with()
@run_only_if_statsd_is_available
@patch('statsd.StatsClient')
def test_multiple_counter(self, mock_client):
config = configobj.ConfigObj()
config['host'] = 'localhost'
config['port'] = '9999'
config['batch'] = 1
metric1 = Metric('servers.com.example.www.cpu.total.idle',
5, raw_value=123, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
metric2 = Metric('servers.com.example.www.cpu.total.idle',
7, raw_value=128, timestamp=1234567,
host='will-be-ignored', metric_type='COUNTER')
expected_data1 = ('servers.com.example.www.cpu.total.idle', 123)
expected_data2 = ('servers.com.example.www.cpu.total.idle', 5)
handler = StatsdHandler(config)
handler.process(metric1)
handler.connection.incr.assert_called_with(*expected_data1)
handler.connection.send.assert_called_with()
handler.process(metric2)
handler.connection.incr.assert_called_with(*expected_data2)
handler.connection.send.assert_called_with()
|
import os
from os import path as op
import shutil
import glob
import numpy as np
import pytest
from numpy.testing import assert_equal, assert_allclose
from mne import (concatenate_raws, read_bem_surfaces, read_surface,
read_source_spaces, read_bem_solution)
from mne.bem import ConductorModel
from mne.commands import (mne_browse_raw, mne_bti2fiff, mne_clean_eog_ecg,
mne_compute_proj_ecg, mne_compute_proj_eog,
mne_coreg, mne_kit2fiff,
mne_make_scalp_surfaces, mne_maxfilter,
mne_report, mne_surf2bem, mne_watershed_bem,
mne_compare_fiff, mne_flash_bem, mne_show_fiff,
mne_show_info, mne_what, mne_setup_source_space,
mne_setup_forward_model, mne_anonymize,
mne_prepare_bem_model, mne_sys_info)
from mne.datasets import testing
from mne.io import read_raw_fif, read_info
from mne.utils import (run_tests_if_main, requires_mne,
requires_mayavi, requires_vtk, requires_freesurfer,
requires_nibabel, traits_test, ArgvSetter, modified_env,
_stamp_to_dt)
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
def check_usage(module, force_help=False):
"""Ensure we print usage."""
args = ('--help',) if force_help else ()
with ArgvSetter(args) as out:
try:
module.run()
except SystemExit:
pass
assert 'Usage: ' in out.stdout.getvalue()
@pytest.mark.slowtest
def test_browse_raw():
"""Test mne browse_raw."""
check_usage(mne_browse_raw)
with ArgvSetter(('--raw', raw_fname)):
with pytest.warns(None): # mpl show warning sometimes
mne_browse_raw.run()
def test_what():
"""Test mne browse_raw."""
check_usage(mne_browse_raw)
with ArgvSetter((raw_fname,)) as out:
mne_what.run()
assert 'raw' == out.stdout.getvalue().strip()
def test_bti2fiff():
"""Test mne bti2fiff."""
check_usage(mne_bti2fiff)
def test_compare_fiff():
"""Test mne compare_fiff."""
check_usage(mne_compare_fiff)
def test_show_fiff():
"""Test mne compare_fiff."""
check_usage(mne_show_fiff)
with ArgvSetter((raw_fname,)):
mne_show_fiff.run()
with ArgvSetter((raw_fname, '--tag=102')):
mne_show_fiff.run()
@requires_mne
def test_clean_eog_ecg(tmpdir):
"""Test mne clean_eog_ecg."""
check_usage(mne_clean_eog_ecg)
tempdir = str(tmpdir)
raw = concatenate_raws([read_raw_fif(f)
for f in [raw_fname, raw_fname, raw_fname]])
raw.info['bads'] = ['MEG 2443']
use_fname = op.join(tempdir, op.basename(raw_fname))
raw.save(use_fname)
with ArgvSetter(('-i', use_fname, '--quiet')):
mne_clean_eog_ecg.run()
for key, count in (('proj', 2), ('-eve', 3)):
fnames = glob.glob(op.join(tempdir, '*%s.fif' % key))
assert len(fnames) == count
@pytest.mark.slowtest
@pytest.mark.parametrize('fun', (mne_compute_proj_ecg, mne_compute_proj_eog))
def test_compute_proj_exg(tmpdir, fun):
"""Test mne compute_proj_ecg/eog."""
check_usage(fun)
tempdir = str(tmpdir)
use_fname = op.join(tempdir, op.basename(raw_fname))
bad_fname = op.join(tempdir, 'bads.txt')
with open(bad_fname, 'w') as fid:
fid.write('MEG 2443\n')
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-i', use_fname, '--bad=' + bad_fname,
'--rej-eeg', '150')):
with pytest.warns(None): # samples, sometimes
fun.run()
fnames = glob.glob(op.join(tempdir, '*proj.fif'))
assert len(fnames) == 1
fnames = glob.glob(op.join(tempdir, '*-eve.fif'))
assert len(fnames) == 1
def test_coreg():
"""Test mne coreg."""
assert hasattr(mne_coreg, 'run')
def test_kit2fiff():
"""Test mne kit2fiff."""
# Can't check
check_usage(mne_kit2fiff, force_help=True)
@pytest.mark.slowtest # slow on Travis OSX
@requires_vtk
@testing.requires_testing_data
def test_make_scalp_surfaces(tmpdir):
"""Test mne make_scalp_surfaces."""
check_usage(mne_make_scalp_surfaces)
has = 'SUBJECTS_DIR' in os.environ
# Copy necessary files to avoid FreeSurfer call
tempdir = str(tmpdir)
surf_path = op.join(subjects_dir, 'sample', 'surf')
surf_path_new = op.join(tempdir, 'sample', 'surf')
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(surf_path_new)
subj_dir = op.join(tempdir, 'sample', 'bem')
os.mkdir(subj_dir)
shutil.copy(op.join(surf_path, 'lh.seghead'), surf_path_new)
cmd = ('-s', 'sample', '--subjects-dir', tempdir)
with modified_env(**{'_MNE_TESTING_SCALP': 'true'}):
dense_fname = op.join(subj_dir, 'sample-head-dense.fif')
medium_fname = op.join(subj_dir, 'sample-head-medium.fif')
with ArgvSetter(cmd, disable_stdout=False, disable_stderr=False):
with modified_env(FREESURFER_HOME=None):
pytest.raises(RuntimeError, mne_make_scalp_surfaces.run)
with modified_env(FREESURFER_HOME=tempdir):
mne_make_scalp_surfaces.run()
assert op.isfile(dense_fname)
assert op.isfile(medium_fname)
with pytest.raises(IOError, match='overwrite'):
mne_make_scalp_surfaces.run()
# actually check the outputs
head_py = read_bem_surfaces(dense_fname)
assert_equal(len(head_py), 1)
head_py = head_py[0]
head_c = read_bem_surfaces(op.join(subjects_dir, 'sample', 'bem',
'sample-head-dense.fif'))[0]
assert_allclose(head_py['rr'], head_c['rr'])
if not has:
assert 'SUBJECTS_DIR' not in os.environ
def test_maxfilter():
"""Test mne maxfilter."""
check_usage(mne_maxfilter)
with ArgvSetter(('-i', raw_fname, '--st', '--movecomp', '--linefreq', '60',
'--trans', raw_fname)) as out:
with pytest.warns(RuntimeWarning, match="Don't use"):
os.environ['_MNE_MAXFILTER_TEST'] = 'true'
try:
mne_maxfilter.run()
finally:
del os.environ['_MNE_MAXFILTER_TEST']
out = out.stdout.getvalue()
for check in ('maxfilter', '-trans', '-movecomp'):
assert check in out, check
@pytest.mark.slowtest
@requires_mayavi
@traits_test
@testing.requires_testing_data
def test_report(tmpdir):
"""Test mne report."""
check_usage(mne_report)
tempdir = str(tmpdir)
use_fname = op.join(tempdir, op.basename(raw_fname))
shutil.copyfile(raw_fname, use_fname)
with ArgvSetter(('-p', tempdir, '-i', use_fname, '-d', subjects_dir,
'-s', 'sample', '--no-browser', '-m', '30')):
with pytest.warns(None): # contour levels
mne_report.run()
fnames = glob.glob(op.join(tempdir, '*.html'))
assert len(fnames) == 1
def test_surf2bem():
"""Test mne surf2bem."""
check_usage(mne_surf2bem)
@pytest.mark.timeout(900) # took ~400 sec on a local test
@pytest.mark.slowtest
@pytest.mark.ultraslowtest
@requires_nibabel()
@requires_freesurfer('mri_watershed')
@testing.requires_testing_data
def test_watershed_bem(tmpdir):
"""Test mne watershed bem."""
check_usage(mne_watershed_bem)
# from T1.mgz
Mdc = np.array([[-1, 0, 0], [0, 0, -1], [0, 1, 0]])
Pxyz_c = np.array([-5.273613, 9.039085, -27.287964])
# Copy necessary files to tempdir
tempdir = str(tmpdir)
mridata_path = op.join(subjects_dir, 'sample', 'mri')
subject_path_new = op.join(tempdir, 'sample')
mridata_path_new = op.join(subject_path_new, 'mri')
os.makedirs(mridata_path_new)
new_fname = op.join(mridata_path_new, 'T1.mgz')
shutil.copyfile(op.join(mridata_path, 'T1.mgz'), new_fname)
old_mode = os.stat(new_fname).st_mode
os.chmod(new_fname, 0)
args = ('-d', tempdir, '-s', 'sample', '-o')
with pytest.raises(PermissionError, match=r'read permissions.*T1\.mgz'):
with ArgvSetter(args):
mne_watershed_bem.run()
os.chmod(new_fname, old_mode)
for s in ('outer_skin', 'outer_skull', 'inner_skull'):
assert not op.isfile(op.join(subject_path_new, 'bem', '%s.surf' % s))
with ArgvSetter(args):
mne_watershed_bem.run()
kwargs = dict(rtol=1e-5, atol=1e-5)
for s in ('outer_skin', 'outer_skull', 'inner_skull'):
rr, tris, vol_info = read_surface(op.join(subject_path_new, 'bem',
'%s.surf' % s),
read_metadata=True)
assert_equal(len(tris), 20480)
assert_equal(tris.min(), 0)
assert_equal(rr.shape[0], tris.max() + 1)
# compare the volume info to the mgz header
assert_allclose(vol_info['xras'], Mdc[0], **kwargs)
assert_allclose(vol_info['yras'], Mdc[1], **kwargs)
assert_allclose(vol_info['zras'], Mdc[2], **kwargs)
assert_allclose(vol_info['cras'], Pxyz_c, **kwargs)
@pytest.mark.timeout(120) # took ~70 sec locally
@pytest.mark.slowtest
@pytest.mark.ultraslowtest
@requires_freesurfer
@testing.requires_testing_data
def test_flash_bem(tmpdir):
"""Test mne flash_bem."""
check_usage(mne_flash_bem, force_help=True)
# Copy necessary files to tempdir
tempdir = str(tmpdir)
mridata_path = op.join(subjects_dir, 'sample', 'mri')
subject_path_new = op.join(tempdir, 'sample')
mridata_path_new = op.join(subject_path_new, 'mri')
os.makedirs(op.join(mridata_path_new, 'flash'))
os.makedirs(op.join(subject_path_new, 'bem'))
shutil.copyfile(op.join(mridata_path, 'T1.mgz'),
op.join(mridata_path_new, 'T1.mgz'))
shutil.copyfile(op.join(mridata_path, 'brain.mgz'),
op.join(mridata_path_new, 'brain.mgz'))
# Copy the available mri/flash/mef*.mgz files from the dataset
flash_path = op.join(mridata_path_new, 'flash')
for kind in (5, 30):
in_fname = op.join(mridata_path, 'flash', 'mef%02d.mgz' % kind)
shutil.copyfile(in_fname, op.join(flash_path, op.basename(in_fname)))
# Test mne flash_bem with --noconvert option
# (since there are no DICOM Flash images in dataset)
for s in ('outer_skin', 'outer_skull', 'inner_skull'):
assert not op.isfile(op.join(subject_path_new, 'bem', '%s.surf' % s))
with ArgvSetter(('-d', tempdir, '-s', 'sample', '-n'),
disable_stdout=False, disable_stderr=False):
mne_flash_bem.run()
kwargs = dict(rtol=1e-5, atol=1e-5)
for s in ('outer_skin', 'outer_skull', 'inner_skull'):
rr, tris = read_surface(op.join(subject_path_new, 'bem',
'%s.surf' % s))
assert_equal(len(tris), 5120)
assert_equal(tris.min(), 0)
assert_equal(rr.shape[0], tris.max() + 1)
# compare to the testing flash surfaces
rr_c, tris_c = read_surface(op.join(subjects_dir, 'sample', 'bem',
'%s.surf' % s))
assert_allclose(rr, rr_c, **kwargs)
assert_allclose(tris, tris_c, **kwargs)
@testing.requires_testing_data
def test_setup_source_space(tmpdir):
"""Test mne setup_source_space."""
check_usage(mne_setup_source_space, force_help=True)
# Using the sample dataset
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
use_fname = op.join(tmpdir, "sources-src.fif")
# Test command
with ArgvSetter(('--src', use_fname, '-d', subjects_dir,
'-s', 'sample', '--morph', 'sample',
'--add-dist', 'False', '--ico', '3', '--verbose')):
mne_setup_source_space.run()
src = read_source_spaces(use_fname)
assert len(src) == 2
with pytest.raises(Exception):
with ArgvSetter(('--src', use_fname, '-d', subjects_dir,
'-s', 'sample', '--ico', '3', '--oct', '3')):
assert mne_setup_source_space.run()
with pytest.raises(Exception):
with ArgvSetter(('--src', use_fname, '-d', subjects_dir,
'-s', 'sample', '--ico', '3', '--spacing', '10')):
assert mne_setup_source_space.run()
with pytest.raises(Exception):
with ArgvSetter(('--src', use_fname, '-d', subjects_dir,
'-s', 'sample', '--ico', '3', '--spacing', '10',
'--oct', '3')):
assert mne_setup_source_space.run()
@testing.requires_testing_data
def test_setup_forward_model(tmpdir):
"""Test mne setup_forward_model."""
check_usage(mne_setup_forward_model, force_help=True)
# Using the sample dataset
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
use_fname = op.join(tmpdir, "model-bem.fif")
# Test command
with ArgvSetter(('--model', use_fname, '-d', subjects_dir, '--homog',
'-s', 'sample', '--ico', '3', '--verbose')):
mne_setup_forward_model.run()
model = read_bem_surfaces(use_fname)
assert len(model) == 1
sol_fname = op.splitext(use_fname)[0] + '-sol.fif'
read_bem_solution(sol_fname)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_mne_prepare_bem_model(tmpdir):
"""Test mne setup_source_space."""
check_usage(mne_prepare_bem_model, force_help=True)
# Using the sample dataset
bem_model_fname = op.join(testing.data_path(download=False), 'subjects',
'sample', 'bem', 'sample-320-320-320-bem.fif')
bem_solution_fname = op.join(tmpdir, "bem_solution-bem-sol.fif")
# Test command
with ArgvSetter(('--bem', bem_model_fname, '--sol', bem_solution_fname,
'--verbose')):
mne_prepare_bem_model.run()
bem_solution = read_bem_solution(bem_solution_fname)
assert isinstance(bem_solution, ConductorModel)
def test_show_info():
"""Test mne show_info."""
check_usage(mne_show_info)
with ArgvSetter((raw_fname,)):
mne_show_info.run()
def test_sys_info():
"""Test mne show_info."""
check_usage(mne_sys_info, force_help=True)
with ArgvSetter((raw_fname,)):
with pytest.raises(SystemExit, match='1'):
mne_sys_info.run()
with ArgvSetter() as out:
mne_sys_info.run()
assert 'numpy' in out.stdout.getvalue()
def test_anonymize(tmpdir):
"""Test mne anonymize."""
check_usage(mne_anonymize)
out_fname = op.join(tmpdir, 'anon_test_raw.fif')
with ArgvSetter(('-f', raw_fname, '-o', out_fname)):
mne_anonymize.run()
info = read_info(out_fname)
assert(op.exists(out_fname))
assert info['meas_date'] == _stamp_to_dt((946684800, 0))
run_tests_if_main()
|
import re
import sys
import json
import os.path
import socket
from http import HTTPStatus
import attr
import pytest
from PyQt5.QtCore import pyqtSignal, QUrl
from end2end.fixtures import testprocess
class Request(testprocess.Line):
"""A parsed line from the flask log output.
Attributes:
verb/path/status: Parsed from the log output.
"""
def __init__(self, data):
super().__init__(data)
try:
parsed = json.loads(data)
except ValueError:
raise testprocess.InvalidLine(data)
assert isinstance(parsed, dict)
assert set(parsed.keys()) == {'path', 'verb', 'status'}
self.verb = parsed['verb']
path = parsed['path']
self.path = '/' if path == '/' else path.rstrip('/')
self.status = parsed['status']
self._check_status()
def _check_status(self):
"""Check if the http status is what we expected."""
path_to_statuses = {
'/favicon.ico': [HTTPStatus.OK, HTTPStatus.PARTIAL_CONTENT],
'/does-not-exist': [HTTPStatus.NOT_FOUND],
'/does-not-exist-2': [HTTPStatus.NOT_FOUND],
'/404': [HTTPStatus.NOT_FOUND],
'/redirect-later': [HTTPStatus.FOUND],
'/redirect-self': [HTTPStatus.FOUND],
'/redirect-to': [HTTPStatus.FOUND],
'/relative-redirect': [HTTPStatus.FOUND],
'/absolute-redirect': [HTTPStatus.FOUND],
'/cookies/set': [HTTPStatus.FOUND],
'/500-inline': [HTTPStatus.INTERNAL_SERVER_ERROR],
'/500': [HTTPStatus.INTERNAL_SERVER_ERROR],
}
for i in range(15):
path_to_statuses['/redirect/{}'.format(i)] = [HTTPStatus.FOUND]
for suffix in ['', '1', '2', '3', '4', '5', '6']:
key = ('/basic-auth/user{suffix}/password{suffix}'
.format(suffix=suffix))
path_to_statuses[key] = [HTTPStatus.UNAUTHORIZED, HTTPStatus.OK]
default_statuses = [HTTPStatus.OK, HTTPStatus.NOT_MODIFIED]
sanitized = QUrl('http://localhost' + self.path).path() # Remove ?foo
expected_statuses = path_to_statuses.get(sanitized, default_statuses)
if self.status not in expected_statuses:
raise AssertionError(
"{} loaded with status {} but expected {}".format(
sanitized, self.status,
' / '.join(repr(e) for e in expected_statuses)))
def __eq__(self, other):
return NotImplemented
@attr.s(frozen=True, eq=False, hash=True)
class ExpectedRequest:
"""Class to compare expected requests easily."""
verb = attr.ib()
path = attr.ib()
@classmethod
def from_request(cls, request):
"""Create an ExpectedRequest from a Request."""
return cls(request.verb, request.path)
def __eq__(self, other):
if isinstance(other, (Request, ExpectedRequest)):
return self.verb == other.verb and self.path == other.path
else:
return NotImplemented
class WebserverProcess(testprocess.Process):
"""Abstraction over a running Flask server process.
Reads the log from its stdout and parses it.
Signals:
new_request: Emitted when there's a new request received.
"""
new_request = pyqtSignal(Request)
Request = Request # So it can be used from the fixture easily.
ExpectedRequest = ExpectedRequest
KEYS = ['verb', 'path']
def __init__(self, request, script, parent=None):
super().__init__(request, parent)
self._script = script
self.port = self._random_port()
self.new_data.connect(self.new_request)
def _random_port(self) -> int:
"""Get a random free port."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
port = sock.getsockname()[1]
sock.close()
return port
def get_requests(self):
"""Get the requests to the server during this test."""
requests = self._get_data()
return [r for r in requests if r.path != '/favicon.ico']
def _parse_line(self, line):
self._log(line)
started_re = re.compile(r' \* Running on https?://127\.0\.0\.1:{}/ '
r'\(Press CTRL\+C to quit\)'.format(self.port))
if started_re.fullmatch(line):
self.ready.emit()
return None
return Request(line)
def _executable_args(self):
if hasattr(sys, 'frozen'):
executable = os.path.join(os.path.dirname(sys.executable),
self._script)
args = []
else:
executable = sys.executable
py_file = os.path.join(os.path.dirname(__file__),
self._script + '.py')
args = [py_file]
return executable, args
def _default_args(self):
return [str(self.port)]
@pytest.fixture(scope='session', autouse=True)
def server(qapp, request):
"""Fixture for an server object which ensures clean setup/teardown."""
server = WebserverProcess(request, 'webserver_sub')
server.start()
yield server
server.terminate()
@pytest.fixture(autouse=True)
def server_per_test(server, request):
"""Fixture to clean server request list after each test."""
request.node._server_log = server.captured_log
server.before_test()
yield
server.after_test()
@pytest.fixture
def ssl_server(request, qapp):
"""Fixture for a webserver with a self-signed SSL certificate.
This needs to be explicitly used in a test, and overwrites the server log
used in that test.
"""
server = WebserverProcess(request, 'webserver_sub_ssl')
request.node._server_log = server.captured_log
server.start()
yield server
server.after_test()
server.terminate()
|
from pydexcom import AccountError, SessionError
from homeassistant.components.dexcom.const import DOMAIN
from homeassistant.config_entries import ENTRY_STATE_LOADED, ENTRY_STATE_NOT_LOADED
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.dexcom import CONFIG, init_integration
async def test_setup_entry_account_error(hass):
"""Test entry setup failed due to account error."""
entry = MockConfigEntry(
domain=DOMAIN,
title="test_username",
unique_id="test_username",
data=CONFIG,
options=None,
)
with patch(
"homeassistant.components.dexcom.Dexcom",
side_effect=AccountError,
):
entry.add_to_hass(hass)
result = await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert result is False
async def test_setup_entry_session_error(hass):
"""Test entry setup failed due to session error."""
entry = MockConfigEntry(
domain=DOMAIN,
title="test_username",
unique_id="test_username",
data=CONFIG,
options=None,
)
with patch(
"homeassistant.components.dexcom.Dexcom",
side_effect=SessionError,
):
entry.add_to_hass(hass)
result = await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert result is False
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
|
from datetime import timedelta
import pytest
from homeassistant.components.homekit.accessories import (
HomeAccessory,
HomeBridge,
HomeDriver,
debounce,
)
from homeassistant.components.homekit.const import (
ATTR_DISPLAY_NAME,
ATTR_INTERGRATION,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SOFTWARE_VERSION,
ATTR_VALUE,
BRIDGE_MODEL,
BRIDGE_NAME,
BRIDGE_SERIAL_NUMBER,
CHAR_FIRMWARE_REVISION,
CHAR_MANUFACTURER,
CHAR_MODEL,
CHAR_NAME,
CHAR_SERIAL_NUMBER,
CONF_LINKED_BATTERY_CHARGING_SENSOR,
CONF_LINKED_BATTERY_SENSOR,
CONF_LOW_BATTERY_THRESHOLD,
MANUFACTURER,
SERV_ACCESSORY_INFO,
)
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
ATTR_ENTITY_ID,
ATTR_SERVICE,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
__version__,
)
from homeassistant.helpers.event import TRACK_STATE_CHANGE_CALLBACKS
import homeassistant.util.dt as dt_util
from tests.async_mock import Mock, patch
from tests.common import async_fire_time_changed, async_mock_service
async def test_debounce(hass):
"""Test add_timeout decorator function."""
def demo_func(*args):
nonlocal arguments, counter
counter += 1
arguments = args
arguments = None
counter = 0
mock = Mock(hass=hass, debounce={})
debounce_demo = debounce(demo_func)
assert debounce_demo.__name__ == "demo_func"
now = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=now):
await hass.async_add_executor_job(debounce_demo, mock, "value")
async_fire_time_changed(hass, now + timedelta(seconds=3))
await hass.async_block_till_done()
assert counter == 1
assert len(arguments) == 2
with patch("homeassistant.util.dt.utcnow", return_value=now):
await hass.async_add_executor_job(debounce_demo, mock, "value")
await hass.async_add_executor_job(debounce_demo, mock, "value")
async_fire_time_changed(hass, now + timedelta(seconds=3))
await hass.async_block_till_done()
assert counter == 2
async def test_accessory_cancels_track_state_change_on_stop(hass, hk_driver):
"""Ensure homekit state changed listeners are unsubscribed on reload."""
entity_id = "sensor.accessory"
hass.states.async_set(entity_id, None)
acc = HomeAccessory(
hass, hk_driver, "Home Accessory", entity_id, 2, {"platform": "isy994"}
)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run_handler()
assert len(hass.data[TRACK_STATE_CHANGE_CALLBACKS][entity_id]) == 1
acc.async_stop()
assert entity_id not in hass.data[TRACK_STATE_CHANGE_CALLBACKS]
async def test_home_accessory(hass, hk_driver):
"""Test HomeAccessory class."""
entity_id = "sensor.accessory"
entity_id2 = "light.accessory"
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id2, STATE_UNAVAILABLE)
await hass.async_block_till_done()
acc = HomeAccessory(
hass, hk_driver, "Home Accessory", entity_id, 2, {"platform": "isy994"}
)
assert acc.hass == hass
assert acc.display_name == "Home Accessory"
assert acc.aid == 2
assert acc.available is True
assert acc.category == 1 # Category.OTHER
assert len(acc.services) == 1
serv = acc.services[0] # SERV_ACCESSORY_INFO
assert serv.display_name == SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Isy994"
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == "sensor.accessory"
acc2 = HomeAccessory(hass, hk_driver, "Home Accessory", entity_id2, 3, {})
serv = acc2.services[0] # SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == f"{MANUFACTURER} Light"
assert serv.get_characteristic(CHAR_MODEL).value == "Light"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == "light.accessory"
acc3 = HomeAccessory(
hass,
hk_driver,
"Home Accessory",
entity_id2,
3,
{
ATTR_MODEL: "Awesome",
ATTR_MANUFACTURER: "Lux Brands",
ATTR_SOFTWARE_VERSION: "0.4.3",
ATTR_INTERGRATION: "luxe",
},
)
assert acc3.available is False
serv = acc3.services[0] # SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Lux Brands"
assert serv.get_characteristic(CHAR_MODEL).value == "Awesome"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == "light.accessory"
hass.states.async_set(entity_id, "on")
await hass.async_block_till_done()
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert mock_async_update_state.call_count == 1
with pytest.raises(NotImplementedError):
acc.async_update_state("new_state")
# Test model name from domain
entity_id = "test_model.demo"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "test_name", entity_id, 2, None)
serv = acc.services[0] # SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_MODEL).value == "Test Model"
async def test_battery_service(hass, hk_driver, caplog):
"""Test battery service."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 50})
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Battery Service", entity_id, 2, None)
assert acc._char_battery.value == 0
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 15})
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 15
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: "error"})
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 15
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 2
assert "ERROR" not in caplog.text
# Test charging
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(
entity_id, None, {ATTR_BATTERY_LEVEL: 10, ATTR_BATTERY_CHARGING: True}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
acc = HomeAccessory(hass, hk_driver, "Battery Service", entity_id, 2, None)
assert acc._char_battery.value == 0
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 10
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(
entity_id, None, {ATTR_BATTERY_LEVEL: 100, ATTR_BATTERY_CHARGING: False}
)
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_linked_battery_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery, 50, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery},
)
assert acc.linked_battery_sensor == linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
hass.states.async_set(linked_battery, 10, None)
await hass.async_block_till_done()
assert acc._char_battery.value == 10
assert acc._char_low_battery.value == 1
# Ignore battery change on entity if it has linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 90})
await hass.async_block_till_done()
assert acc._char_battery.value == 10
# Test none numeric state for linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(linked_battery, "error", None)
await hass.async_block_till_done()
assert acc._char_battery.value == 10
assert "ERROR" not in caplog.text
# Test charging & low battery threshold
hass.states.async_set(linked_battery, 20, {ATTR_BATTERY_CHARGING: True})
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery, CONF_LOW_BATTERY_THRESHOLD: 50},
)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 20
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 1
hass.states.async_set(linked_battery, 100, {ATTR_BATTERY_CHARGING: False})
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
hass.states.async_remove(linked_battery)
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_linked_battery_charging_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_charging_sensor."""
entity_id = "homekit.accessory"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor},
)
assert acc.linked_battery_charging_sensor == linked_battery_charging_sensor
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(linked_battery_charging_sensor, STATE_OFF, None)
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_charging.value == 0
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_remove(linked_battery_charging_sensor)
await acc.run_handler()
await hass.async_block_till_done()
assert acc._char_charging.value == 1
async def test_linked_battery_sensor_and_linked_battery_charging_sensor(
hass, hk_driver, caplog
):
"""Test battery service with linked_battery_sensor and a linked_battery_charging_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery, 50, None)
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{
CONF_LINKED_BATTERY_SENSOR: linked_battery,
CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor,
},
)
assert acc.linked_battery_sensor == linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 1
hass.states.async_set(linked_battery_charging_sensor, STATE_OFF, None)
await hass.async_block_till_done()
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
hass.states.async_remove(linked_battery_charging_sensor)
await hass.async_block_till_done()
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_missing_linked_battery_charging_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_charging_sensor that is mapping to a missing entity."""
entity_id = "homekit.accessory"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor},
)
assert acc.linked_battery_charging_sensor is None
# Make sure we don't throw if the linked_battery_charging_sensor
# is removed
hass.states.async_remove(linked_battery_charging_sensor)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run_handler()
await hass.async_block_till_done()
# Make sure we don't throw if the entity_id
# is removed
hass.states.async_remove(entity_id)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run_handler()
await hass.async_block_till_done()
async def test_missing_linked_battery_sensor(hass, hk_driver, caplog):
"""Test battery service with missing linked_battery_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
hass.states.async_set(entity_id, "open")
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery},
)
assert not acc.linked_battery_sensor
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert not acc.linked_battery_sensor
assert acc._char_battery is None
assert acc._char_low_battery is None
assert acc._char_charging is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_remove(entity_id)
await acc.run_handler()
await hass.async_block_till_done()
assert not acc.linked_battery_sensor
assert acc._char_battery is None
assert acc._char_low_battery is None
assert acc._char_charging is None
async def test_battery_appears_after_startup(hass, hk_driver, caplog):
"""Test battery level appears after homekit is started."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None, {})
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Accessory without battery", entity_id, 2, {})
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run_handler()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 15})
await hass.async_block_till_done()
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert acc._char_battery is None
async def test_call_service(hass, hk_driver, events):
"""Test call_service method."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Home Accessory", entity_id, 2, {})
call_service = async_mock_service(hass, "cover", "open_cover")
test_domain = "cover"
test_service = "open_cover"
test_value = "value"
await acc.async_call_service(
test_domain, test_service, {ATTR_ENTITY_ID: entity_id}, test_value
)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data == {
ATTR_ENTITY_ID: acc.entity_id,
ATTR_DISPLAY_NAME: acc.display_name,
ATTR_SERVICE: test_service,
ATTR_VALUE: test_value,
}
assert len(call_service) == 1
assert call_service[0].domain == test_domain
assert call_service[0].service == test_service
assert call_service[0].data == {ATTR_ENTITY_ID: entity_id}
def test_home_bridge(hk_driver):
"""Test HomeBridge class."""
bridge = HomeBridge("hass", hk_driver, BRIDGE_NAME)
assert bridge.hass == "hass"
assert bridge.display_name == BRIDGE_NAME
assert bridge.category == 2 # Category.BRIDGE
assert len(bridge.services) == 1
serv = bridge.services[0] # SERV_ACCESSORY_INFO
assert serv.display_name == SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == BRIDGE_NAME
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == __version__
assert serv.get_characteristic(CHAR_MANUFACTURER).value == MANUFACTURER
assert serv.get_characteristic(CHAR_MODEL).value == BRIDGE_MODEL
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == BRIDGE_SERIAL_NUMBER
bridge = HomeBridge("hass", hk_driver, "test_name")
assert bridge.display_name == "test_name"
assert len(bridge.services) == 1
serv = bridge.services[0] # SERV_ACCESSORY_INFO
# setup_message
bridge.setup_message()
def test_home_driver():
"""Test HomeDriver class."""
ip_address = "127.0.0.1"
port = 51826
path = ".homekit.state"
pin = b"123-45-678"
with patch("pyhap.accessory_driver.AccessoryDriver.__init__") as mock_driver:
driver = HomeDriver(
"hass", "entry_id", "name", address=ip_address, port=port, persist_file=path
)
mock_driver.assert_called_with(address=ip_address, port=port, persist_file=path)
driver.state = Mock(pincode=pin)
xhm_uri_mock = Mock(return_value="X-HM://0")
driver.accessory = Mock(xhm_uri=xhm_uri_mock)
# pair
with patch("pyhap.accessory_driver.AccessoryDriver.pair") as mock_pair, patch(
"homeassistant.components.homekit.accessories.dismiss_setup_message"
) as mock_dissmiss_msg:
driver.pair("client_uuid", "client_public")
mock_pair.assert_called_with("client_uuid", "client_public")
mock_dissmiss_msg.assert_called_with("hass", "entry_id")
# unpair
with patch("pyhap.accessory_driver.AccessoryDriver.unpair") as mock_unpair, patch(
"homeassistant.components.homekit.accessories.show_setup_message"
) as mock_show_msg:
driver.unpair("client_uuid")
mock_unpair.assert_called_with("client_uuid")
mock_show_msg.assert_called_with("hass", "entry_id", "name", pin, "X-HM://0")
|
import logging
from aiohttp import web
import voluptuous as vol
from homeassistant import util
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.util.json import load_json, save_json
from .hue_api import (
HueAllGroupsStateView,
HueAllLightsStateView,
HueConfigView,
HueFullStateView,
HueGroupView,
HueOneLightChangeView,
HueOneLightStateView,
HueUnauthorizedUser,
HueUsernameView,
)
from .upnp import DescriptionXmlView, create_upnp_datagram_endpoint
DOMAIN = "emulated_hue"
_LOGGER = logging.getLogger(__name__)
NUMBERS_FILE = "emulated_hue_ids.json"
CONF_ADVERTISE_IP = "advertise_ip"
CONF_ADVERTISE_PORT = "advertise_port"
CONF_ENTITIES = "entities"
CONF_ENTITY_HIDDEN = "hidden"
CONF_ENTITY_NAME = "name"
CONF_EXPOSE_BY_DEFAULT = "expose_by_default"
CONF_EXPOSED_DOMAINS = "exposed_domains"
CONF_HOST_IP = "host_ip"
CONF_LIGHTS_ALL_DIMMABLE = "lights_all_dimmable"
CONF_LISTEN_PORT = "listen_port"
CONF_OFF_MAPS_TO_ON_DOMAINS = "off_maps_to_on_domains"
CONF_TYPE = "type"
CONF_UPNP_BIND_MULTICAST = "upnp_bind_multicast"
TYPE_ALEXA = "alexa"
TYPE_GOOGLE = "google_home"
DEFAULT_LIGHTS_ALL_DIMMABLE = False
DEFAULT_LISTEN_PORT = 8300
DEFAULT_UPNP_BIND_MULTICAST = True
DEFAULT_OFF_MAPS_TO_ON_DOMAINS = ["script", "scene"]
DEFAULT_EXPOSE_BY_DEFAULT = True
DEFAULT_EXPOSED_DOMAINS = [
"switch",
"light",
"group",
"input_boolean",
"media_player",
"fan",
]
DEFAULT_TYPE = TYPE_GOOGLE
CONFIG_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ENTITY_NAME): cv.string,
vol.Optional(CONF_ENTITY_HIDDEN): cv.boolean,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST_IP): cv.string,
vol.Optional(CONF_LISTEN_PORT, default=DEFAULT_LISTEN_PORT): cv.port,
vol.Optional(CONF_ADVERTISE_IP): cv.string,
vol.Optional(CONF_ADVERTISE_PORT): cv.port,
vol.Optional(CONF_UPNP_BIND_MULTICAST): cv.boolean,
vol.Optional(CONF_OFF_MAPS_TO_ON_DOMAINS): cv.ensure_list,
vol.Optional(CONF_EXPOSE_BY_DEFAULT): cv.boolean,
vol.Optional(CONF_EXPOSED_DOMAINS): cv.ensure_list,
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE): vol.Any(
TYPE_ALEXA, TYPE_GOOGLE
),
vol.Optional(CONF_ENTITIES): vol.Schema(
{cv.entity_id: CONFIG_ENTITY_SCHEMA}
),
vol.Optional(
CONF_LIGHTS_ALL_DIMMABLE, default=DEFAULT_LIGHTS_ALL_DIMMABLE
): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
ATTR_EMULATED_HUE_NAME = "emulated_hue_name"
async def async_setup(hass, yaml_config):
"""Activate the emulated_hue component."""
config = Config(hass, yaml_config.get(DOMAIN, {}))
app = web.Application()
app["hass"] = hass
# We misunderstood the startup signal. You're not allowed to change
# anything during startup. Temp workaround.
# pylint: disable=protected-access
app._on_startup.freeze()
await app.startup()
runner = None
site = None
DescriptionXmlView(config).register(app, app.router)
HueUsernameView().register(app, app.router)
HueConfigView(config).register(app, app.router)
HueUnauthorizedUser().register(app, app.router)
HueAllLightsStateView(config).register(app, app.router)
HueOneLightStateView(config).register(app, app.router)
HueOneLightChangeView(config).register(app, app.router)
HueAllGroupsStateView(config).register(app, app.router)
HueGroupView(config).register(app, app.router)
HueFullStateView(config).register(app, app.router)
listen = create_upnp_datagram_endpoint(
config.host_ip_addr,
config.upnp_bind_multicast,
config.advertise_ip,
config.advertise_port or config.listen_port,
)
protocol = None
async def stop_emulated_hue_bridge(event):
"""Stop the emulated hue bridge."""
nonlocal protocol
nonlocal site
nonlocal runner
if protocol:
protocol.close()
if site:
await site.stop()
if runner:
await runner.cleanup()
async def start_emulated_hue_bridge(event):
"""Start the emulated hue bridge."""
nonlocal protocol
nonlocal site
nonlocal runner
_, protocol = await listen
runner = web.AppRunner(app)
await runner.setup()
site = web.TCPSite(runner, config.host_ip_addr, config.listen_port)
try:
await site.start()
except OSError as error:
_LOGGER.error(
"Failed to create HTTP server at port %d: %s", config.listen_port, error
)
if protocol:
protocol.close()
else:
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, stop_emulated_hue_bridge
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_emulated_hue_bridge)
return True
class Config:
"""Hold configuration variables for the emulated hue bridge."""
def __init__(self, hass, conf):
"""Initialize the instance."""
self.hass = hass
self.type = conf.get(CONF_TYPE)
self.numbers = None
self.cached_states = {}
self._exposed_cache = {}
if self.type == TYPE_ALEXA:
_LOGGER.warning(
"Emulated Hue running in legacy mode because type has been "
"specified. More info at https://goo.gl/M6tgz8"
)
# Get the IP address that will be passed to the Echo during discovery
self.host_ip_addr = conf.get(CONF_HOST_IP)
if self.host_ip_addr is None:
self.host_ip_addr = util.get_local_ip()
_LOGGER.info(
"Listen IP address not specified, auto-detected address is %s",
self.host_ip_addr,
)
# Get the port that the Hue bridge will listen on
self.listen_port = conf.get(CONF_LISTEN_PORT)
if not isinstance(self.listen_port, int):
self.listen_port = DEFAULT_LISTEN_PORT
_LOGGER.info(
"Listen port not specified, defaulting to %s", self.listen_port
)
# Get whether or not UPNP binds to multicast address (239.255.255.250)
# or to the unicast address (host_ip_addr)
self.upnp_bind_multicast = conf.get(
CONF_UPNP_BIND_MULTICAST, DEFAULT_UPNP_BIND_MULTICAST
)
# Get domains that cause both "on" and "off" commands to map to "on"
# This is primarily useful for things like scenes or scripts, which
# don't really have a concept of being off
self.off_maps_to_on_domains = conf.get(CONF_OFF_MAPS_TO_ON_DOMAINS)
if not isinstance(self.off_maps_to_on_domains, list):
self.off_maps_to_on_domains = DEFAULT_OFF_MAPS_TO_ON_DOMAINS
# Get whether or not entities should be exposed by default, or if only
# explicitly marked ones will be exposed
self.expose_by_default = conf.get(
CONF_EXPOSE_BY_DEFAULT, DEFAULT_EXPOSE_BY_DEFAULT
)
# Get domains that are exposed by default when expose_by_default is
# True
self.exposed_domains = set(
conf.get(CONF_EXPOSED_DOMAINS, DEFAULT_EXPOSED_DOMAINS)
)
# Calculated effective advertised IP and port for network isolation
self.advertise_ip = conf.get(CONF_ADVERTISE_IP) or self.host_ip_addr
self.advertise_port = conf.get(CONF_ADVERTISE_PORT) or self.listen_port
self.entities = conf.get(CONF_ENTITIES, {})
self._entities_with_hidden_attr_in_config = {}
for entity_id in self.entities:
hidden_value = self.entities[entity_id].get(CONF_ENTITY_HIDDEN)
if hidden_value is not None:
self._entities_with_hidden_attr_in_config[entity_id] = hidden_value
# Get whether all non-dimmable lights should be reported as dimmable
# for compatibility with older installations.
self.lights_all_dimmable = conf.get(CONF_LIGHTS_ALL_DIMMABLE)
def entity_id_to_number(self, entity_id):
"""Get a unique number for the entity id."""
if self.type == TYPE_ALEXA:
return entity_id
if self.numbers is None:
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
# Google Home
for number, ent_id in self.numbers.items():
if entity_id == ent_id:
return number
number = "1"
if self.numbers:
number = str(max(int(k) for k in self.numbers) + 1)
self.numbers[number] = entity_id
save_json(self.hass.config.path(NUMBERS_FILE), self.numbers)
return number
def number_to_entity_id(self, number):
"""Convert unique number to entity id."""
if self.type == TYPE_ALEXA:
return number
if self.numbers is None:
self.numbers = _load_json(self.hass.config.path(NUMBERS_FILE))
# Google Home
assert isinstance(number, str)
return self.numbers.get(number)
def get_entity_name(self, entity):
"""Get the name of an entity."""
if (
entity.entity_id in self.entities
and CONF_ENTITY_NAME in self.entities[entity.entity_id]
):
return self.entities[entity.entity_id][CONF_ENTITY_NAME]
return entity.attributes.get(ATTR_EMULATED_HUE_NAME, entity.name)
def is_entity_exposed(self, entity):
"""Cache determine if an entity should be exposed on the emulated bridge."""
entity_id = entity.entity_id
if entity_id not in self._exposed_cache:
self._exposed_cache[entity_id] = self._is_entity_exposed(entity)
return self._exposed_cache[entity_id]
def filter_exposed_entities(self, states):
"""Filter a list of all states down to exposed entities."""
exposed = []
for entity in states:
entity_id = entity.entity_id
if entity_id not in self._exposed_cache:
self._exposed_cache[entity_id] = self._is_entity_exposed(entity)
if self._exposed_cache[entity_id]:
exposed.append(entity)
return exposed
def _is_entity_exposed(self, entity):
"""Determine if an entity should be exposed on the emulated bridge.
Async friendly.
"""
if entity.attributes.get("view") is not None:
# Ignore entities that are views
return False
if entity.entity_id in self._entities_with_hidden_attr_in_config:
return not self._entities_with_hidden_attr_in_config[entity.entity_id]
if not self.expose_by_default:
return False
# Expose an entity if the entity's domain is exposed by default and
# the configuration doesn't explicitly exclude it from being
# exposed, or if the entity is explicitly exposed
if entity.domain in self.exposed_domains:
return True
return False
def _load_json(filename):
"""Load JSON, handling invalid syntax."""
try:
return load_json(filename)
except HomeAssistantError:
pass
return {}
|
import logging
from meteofrance.client import MeteoFranceClient
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE
from homeassistant.core import callback
from .const import CONF_CITY, FORECAST_MODE, FORECAST_MODE_DAILY
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
class MeteoFranceFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Meteo-France config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Init MeteoFranceFlowHandler."""
self.places = []
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return MeteoFranceOptionsFlowHandler(config_entry)
@callback
def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if user_input is None:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{vol.Required(CONF_CITY, default=user_input.get(CONF_CITY, "")): str}
),
errors=errors or {},
)
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return self._show_setup_form(user_input, errors)
city = user_input[CONF_CITY] # Might be a city name or a postal code
latitude = user_input.get(CONF_LATITUDE)
longitude = user_input.get(CONF_LONGITUDE)
if not latitude:
client = MeteoFranceClient()
self.places = await self.hass.async_add_executor_job(
client.search_places, city
)
_LOGGER.debug("Places search result: %s", self.places)
if not self.places:
errors[CONF_CITY] = "empty"
return self._show_setup_form(user_input, errors)
return await self.async_step_cities()
# Check if already configured
await self.async_set_unique_id(f"{latitude}, {longitude}")
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=city,
data={CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude},
)
async def async_step_import(self, user_input):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_cities(self, user_input=None):
"""Step where the user choose the city from the API search results."""
if not user_input:
if len(self.places) > 1 and self.source != SOURCE_IMPORT:
places_for_form = {}
for place in self.places:
places_for_form[_build_place_key(place)] = f"{place}"
return self.async_show_form(
step_id="cities",
data_schema=vol.Schema(
{
vol.Required(CONF_CITY): vol.All(
vol.Coerce(str), vol.In(places_for_form)
)
}
),
)
user_input = {CONF_CITY: _build_place_key(self.places[0])}
city_infos = user_input[CONF_CITY].split(";")
return await self.async_step_user(
{
CONF_CITY: city_infos[0],
CONF_LATITUDE: city_infos[1],
CONF_LONGITUDE: city_infos[2],
}
)
class MeteoFranceOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
data_schema = vol.Schema(
{
vol.Optional(
CONF_MODE,
default=self.config_entry.options.get(
CONF_MODE, FORECAST_MODE_DAILY
),
): vol.In(FORECAST_MODE)
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
def _build_place_key(place) -> str:
return f"{place};{place.latitude};{place.longitude}"
|
from unittest.mock import MagicMock
import pytest
from homeassistant import config_entries
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import discovery
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed, mock_coro
# One might consider to "mock" services, but it's easy enough to just use
# what is already available.
SERVICE = "yamaha"
SERVICE_COMPONENT = "media_player"
SERVICE_NO_PLATFORM = "hass_ios"
SERVICE_NO_PLATFORM_COMPONENT = "ios"
SERVICE_INFO = {"key": "value"} # Can be anything
UNKNOWN_SERVICE = "this_service_will_never_be_supported"
BASE_CONFIG = {discovery.DOMAIN: {"ignore": [], "enable": []}}
IGNORE_CONFIG = {discovery.DOMAIN: {"ignore": [SERVICE_NO_PLATFORM]}}
@pytest.fixture(autouse=True)
def netdisco_mock():
"""Mock netdisco."""
with patch.dict("sys.modules", {"netdisco.discovery": MagicMock()}):
yield
async def mock_discovery(hass, discoveries, config=BASE_CONFIG):
"""Mock discoveries."""
with patch("homeassistant.components.zeroconf.async_get_instance"), patch(
"homeassistant.components.zeroconf.async_setup", return_value=True
):
assert await async_setup_component(hass, "discovery", config)
await hass.async_block_till_done()
await hass.async_start()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
with patch.object(discovery, "_discover", discoveries), patch(
"homeassistant.components.discovery.async_discover", return_value=mock_coro()
) as mock_discover, patch(
"homeassistant.components.discovery.async_load_platform",
return_value=mock_coro(),
) as mock_platform:
async_fire_time_changed(hass, utcnow())
# Work around an issue where our loop.call_soon not get caught
await hass.async_block_till_done()
await hass.async_block_till_done()
return mock_discover, mock_platform
async def test_unknown_service(hass):
"""Test that unknown service is ignored."""
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [("this_service_will_never_be_supported", {"info": "some"})]
mock_discover, mock_platform = await mock_discovery(hass, discover)
assert not mock_discover.called
assert not mock_platform.called
async def test_load_platform(hass):
"""Test load a platform."""
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [(SERVICE, SERVICE_INFO)]
mock_discover, mock_platform = await mock_discovery(hass, discover)
assert not mock_discover.called
assert mock_platform.called
mock_platform.assert_called_with(
hass, SERVICE_COMPONENT, SERVICE, SERVICE_INFO, BASE_CONFIG
)
async def test_load_component(hass):
"""Test load a component."""
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [(SERVICE_NO_PLATFORM, SERVICE_INFO)]
mock_discover, mock_platform = await mock_discovery(hass, discover)
assert mock_discover.called
assert not mock_platform.called
mock_discover.assert_called_with(
hass,
SERVICE_NO_PLATFORM,
SERVICE_INFO,
SERVICE_NO_PLATFORM_COMPONENT,
BASE_CONFIG,
)
async def test_ignore_service(hass):
"""Test ignore service."""
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [(SERVICE_NO_PLATFORM, SERVICE_INFO)]
mock_discover, mock_platform = await mock_discovery(hass, discover, IGNORE_CONFIG)
assert not mock_discover.called
assert not mock_platform.called
async def test_discover_duplicates(hass):
"""Test load a component."""
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [
(SERVICE_NO_PLATFORM, SERVICE_INFO),
(SERVICE_NO_PLATFORM, SERVICE_INFO),
]
mock_discover, mock_platform = await mock_discovery(hass, discover)
assert mock_discover.called
assert mock_discover.call_count == 1
assert not mock_platform.called
mock_discover.assert_called_with(
hass,
SERVICE_NO_PLATFORM,
SERVICE_INFO,
SERVICE_NO_PLATFORM_COMPONENT,
BASE_CONFIG,
)
async def test_discover_config_flow(hass):
"""Test discovery triggering a config flow."""
discovery_info = {"hello": "world"}
def discover(netdisco, zeroconf_instance):
"""Fake discovery."""
return [("mock-service", discovery_info)]
with patch.dict(
discovery.CONFIG_ENTRY_HANDLERS, {"mock-service": "mock-component"}
), patch("homeassistant.data_entry_flow.FlowManager.async_init") as m_init:
await mock_discovery(hass, discover)
assert len(m_init.mock_calls) == 1
args, kwargs = m_init.mock_calls[0][1:]
assert args == ("mock-component",)
assert kwargs["context"]["source"] == config_entries.SOURCE_DISCOVERY
assert kwargs["data"] == discovery_info
|
import itertools
import re
import warnings
import numpy as np
import pandas as pd
import scipy
from sklearn.metrics.pairwise import cosine_similarity
from scattertext import ParsedCorpus
from scattertext.representations.Word2VecFromParsedCorpus import Word2VecDefault
class CategoryEmbeddingsResolver:
def __init__(self, corpus, term_acceptance_re=re.compile('[a-z]{3,}')):
self.corpus_ = corpus
self.category_embeddings_ = {}
self.category_word2vec_model_ = {}
self.term_acceptance_re = term_acceptance_re
def _verify_category(self, category):
if category not in self.corpus_.get_categories():
raise Exception("Category %s is not in corpus." % category)
if category in self.category_embeddings_:
raise Exception("You have already set embeddings by running set_embeddings or set_embeddings_model.")
def embed_category(self, category, model=None):
'''
:param model: gensim word2vec.Word2Vec model
:param term_acceptance_re : SRE_Pattern, Regular expression to identify
valid terms, default re.compile('[a-z]{3,}')
:return: EmbeddingsResolver
'''
self._verify_category(category)
if self.term_acceptance_re is not None:
acceptable_terms = set([t for t in self.corpus_.get_terms() if self.term_acceptance_re.match(t)])
else:
acceptable_terms = set(self.corpus_.get_terms())
trained_model = CategorySpecificWord2VecFromParsedCorpus(self.corpus_, category, model).train()
self.category_word2vec_model_[category] = trained_model
word2dwe = {word: trained_model[word] for word in trained_model.wv.vocab.keys()}
self.category_embeddings_[category] = word2dwe
return self
def embed_all_categories(self):
for category in self.corpus_.get_categories():
self.embed_category(category)
return self
class CartegorySpecificCorpusAdapterForGensim(object):
@staticmethod
def get_sentences(corpus, category):
'''
Parameters
----------
corpus, ParsedCorpus
category, str
Returns
-------
iter: [sentence1word1, ...], [sentence2word1, ...]
'''
#assert isinstance(corpus, ParsedCorpus)
return itertools.chain(*[[[t.lower_ for t in sent if not t.is_punct]
for sent in doc.sents]
for doc_catgory, doc
in zip(corpus.get_category_names_by_row(), corpus.get_parsed_docs())
if category == doc_catgory])
class CategorySpecificWord2VecFromParsedCorpus(Word2VecDefault):
def __init__(self, corpus, category, word2vec_model=None):
'''
Parameters
----------
corpus: ParsedCorpus
from which to build word2vec model
category, str
word2vec_model: word2vec.Word2Vec
Gensim instance to be used to train word2vec model
'''
try:
from gensim.models import word2vec
assert word2vec_model is None or isinstance(word2vec_model, word2vec.Word2Vec)
except:
warnings.warn("You should really install gensim, but we're going to duck-type your model and hope it works")
#print(type(corpus))
#assert isinstance(corpus, ParsedCorpus)
self.corpus = corpus
self.category = category
self.model = self._get_word2vec_model(word2vec_model)
def train(self, epochs=2000, training_iterations=5):
'''
Parameters
----------
epochs : int
Number of epochs to train for. Default is 2000.
training_iterations : int
Number of times to repeat training process. Default is training_iterations.
Returns
-------
A trained word2vec model.
'''
self._scan_and_build_vocab()
for _ in range(training_iterations):
self.model.train(CartegorySpecificCorpusAdapterForGensim.get_sentences(self.corpus, self.category),
total_examples=self.model.corpus_count,
epochs=epochs)
return self.model
def _get_word2vec_model(self, word2vec_model):
return (self._default_word2vec_model()
if word2vec_model is None
else word2vec_model)
def _scan_and_build_vocab(self):
try:
self.model.scan_vocab(CartegorySpecificCorpusAdapterForGensim.get_sentences(self.corpus, self.category))
except:
pass
self.model.build_vocab(CartegorySpecificCorpusAdapterForGensim.get_sentences(self.corpus, self.category))
class EmbeddingAligner(object):
def __init__(self, category_embedding_resolver, category1, category2, prefix1=None, prefix2=None):
'''
:param category_embedding_resolver: CategoryEmbeddingsResolver
:param category1: str
:param category2: str
:param prefix1: str
:param prefix2: str
'''
#assert issubclass(type(category_embedding_resolver), CategoryEmbeddingsResolver)
self.category_embedding_resolver = category_embedding_resolver
valid_categories = category_embedding_resolver.corpus_.get_categories()
assert category1 in valid_categories
assert category2 in valid_categories
self.category1 = category1
self.category2 = category2
cat1_dwe_dict = category_embedding_resolver.category_embeddings_[category1]
cat2_dwe_dict = category_embedding_resolver.category_embeddings_[category2]
self.terms = np.array(list((set(cat1_dwe_dict.keys()) & set(cat2_dwe_dict.keys()))))
self.cat1_dwe_ar = np.stack([cat1_dwe_dict[word] for word in self.terms])
self.cat2_dwe_ar = np.stack([cat2_dwe_dict[word] for word in self.terms])
#self.cat1_dwe_ar_norm, self.cat2_dwe_ar_norm, self.disparity = \
# scipy.spatial.procrustes(self.cat1_dwe_ar, self.cat2_dwe_ar)
self.pairwise_sim, sv = scipy.linalg.orthogonal_procrustes(self.cat1_dwe_ar, self.cat2_dwe_ar)
import pdb; pdb.set_trace()
#self.pairwise_sim = cosine_similarity(np.vstack([self.cat1_dwe_ar_norm,
# self.cat2_dwe_ar_norm]))
#
self.pairwise_sim_sort = np.argsort(-self.pairwise_sim, axis=1)
def distinct_prefix(x, y):
for i, (xc, yc) in enumerate(zip(x, y)):
if xc != yc:
return (x[:i + 1], y[:i + 1])
return x, y
myprefix1, myprefix2 = distinct_prefix(category1, category2)
self.prefix1 = myprefix1 if prefix1 is None else prefix1
self.prefix2 = myprefix2 if prefix2 is None else prefix2
self.labeled_terms = np.array([self.prefix1 + '_' + w for w in self.terms]
+ [self.prefix2 + '_' + w for w in self.terms])
def get_terms(self):
return self.terms
def project_separate(self, projector=None):
if projector is None:
from umap import UMAP
projector = UMAP(n_components=2, metric='cosine')
both_category_embeddings = np.vstack([self.cat1_dwe_ar_norm,
self.cat2_dwe_ar_norm])
projected_ar = projector.fit_transform(both_category_embeddings)
df = pd.DataFrame(projected_ar, columns=['x', 'y'], index=self.labeled_terms)
df['category'] = [self.category1] * len(self.terms) + [self.category2] * len(self.terms)
return df
def get_report_df(self, n_terms=5):
conterpart_idx = np.hstack([np.arange(len(self.terms)) + len(self.terms),
np.arange(len(self.terms))])
idx = np.arange(len(self.terms))
similarity_df = pd.DataFrame({
'cosine_distance': self.pairwise_sim[[idx], conterpart_idx[idx]][0],
'rank_' + self.prefix1: np.where(
self.pairwise_sim_sort[idx] == conterpart_idx[idx][:, None]
)[1],
'rank_' + self.prefix2: np.where(
self.pairwise_sim_sort[conterpart_idx[idx]] == idx[:, None]
)[1],
'context_' + self.prefix1: pd.DataFrame(
self.labeled_terms[self.pairwise_sim_sort[idx, 1:1 + n_terms]]
).apply(', '.join, axis=1).values,
'context_' + self.prefix2: pd.DataFrame(
self.labeled_terms[self.pairwise_sim_sort[conterpart_idx[idx], 1:1 + n_terms]]
).apply(', '.join, axis=1).values,
},
index=self.terms
)
return pd.merge(
similarity_df
.assign(min_rank=lambda x: np.max(x[['rank_' + self.prefix1, 'rank_' + self.prefix1]], axis=1))
.sort_values(by='min_rank', ascending=False),
self.category_embedding_resolver.corpus_
.get_term_freq_df(),
left_index=True,
right_index=True
)
|
import asyncio
import pytest
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.thread import ThreadWithException
async def test_thread_with_exception_invalid(hass):
"""Test throwing an invalid thread exception."""
finish_event = asyncio.Event()
def _do_nothing(*_):
run_callback_threadsafe(hass.loop, finish_event.set)
test_thread = ThreadWithException(target=_do_nothing)
test_thread.start()
await asyncio.wait_for(finish_event.wait(), timeout=0.1)
with pytest.raises(TypeError):
test_thread.raise_exc(_EmptyClass())
test_thread.join()
async def test_thread_not_started(hass):
"""Test throwing when the thread is not started."""
test_thread = ThreadWithException(target=lambda *_: None)
with pytest.raises(AssertionError):
test_thread.raise_exc(TimeoutError)
async def test_thread_fails_raise(hass):
"""Test throwing after already ended."""
finish_event = asyncio.Event()
def _do_nothing(*_):
run_callback_threadsafe(hass.loop, finish_event.set)
test_thread = ThreadWithException(target=_do_nothing)
test_thread.start()
await asyncio.wait_for(finish_event.wait(), timeout=0.1)
test_thread.join()
with pytest.raises(SystemError):
test_thread.raise_exc(ValueError)
class _EmptyClass:
"""An empty class."""
|
import mock
from paasta_tools.cli.cmds import metastatus
from paasta_tools.utils import SystemPaastaConfig
@mock.patch("paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True)
def test_report_cluster_status(mock_load_system_paasta_config, capfd):
cluster = "fake_cluster"
fake_system_paasta_config = SystemPaastaConfig(
{
"dashboard_links": {
"fake_cluster": {"URL": "http://paasta-fake_cluster.yelp:5050"}
}
},
"fake_directory",
)
mock_load_system_paasta_config.return_value = fake_system_paasta_config
with mock.patch(
"paasta_tools.cli.cmds.metastatus.paasta_metastatus_on_api_endpoint",
autospec=True,
) as mock_paasta_metastatus_on_api_endpoint:
mock_paasta_metastatus_on_api_endpoint.return_value = (
mock.sentinel.return_value,
"mock_status",
)
return_code = metastatus.print_cluster_status(
cluster, fake_system_paasta_config, [], verbose=0
)
mock_paasta_metastatus_on_api_endpoint.assert_called_once_with(
cluster=cluster,
system_paasta_config=fake_system_paasta_config,
groupings=[],
verbose=0,
autoscaling_info=False,
use_mesos_cache=False,
)
actual, _ = capfd.readouterr()
assert "Cluster: %s" % cluster in actual
assert "mock_status" in actual
assert return_code == mock.sentinel.return_value
def test_figure_out_clusters_to_inspect_respects_the_user():
fake_args = mock.Mock()
fake_args.clusters = "a,b,c"
fake_all_clusters = ["a", "b", "c", "d"]
assert ["a", "b", "c"] == metastatus.figure_out_clusters_to_inspect(
fake_args, fake_all_clusters
)
def test_get_cluster_dashboards():
with mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config:
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{
"dashboard_links": {
"fake_cluster": {"URL": "http://paasta-fake_cluster.yelp:5050"}
}
},
"fake_directory",
)
output_text = metastatus.get_cluster_dashboards("fake_cluster")
assert "http://paasta-fake_cluster.yelp:5050" in output_text
assert "URL: " in output_text
def test_get_cluster_dashboards_for_sharded_frameworks():
with mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config:
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{
"dashboard_links": {
"fake_cluster": {
"URL": [
"http://paasta-fake_cluster.yelp:5050",
"http://paasta-fake_cluster1.yelp:5050",
]
}
}
},
"fake_directory",
)
output_text = metastatus.get_cluster_dashboards("fake_cluster")
assert "http://paasta-fake_cluster.yelp:5050" in output_text
assert "http://paasta-fake_cluster1.yelp:5050" in output_text
assert "URL: " in output_text
def test_get_cluster_no_dashboards():
with mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config:
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{}, "fake_directory"
)
output_text = metastatus.get_cluster_dashboards("fake_cluster")
assert "No dashboards configured" in output_text
def test_get_cluster_dashboards_unknown_cluster():
with mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config:
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{
"dashboard_links": {
"another_fake_cluster": {
"URL": "http://paasta-fake_cluster.yelp:5050"
}
}
},
"fake_directory",
)
output_text = metastatus.get_cluster_dashboards("fake_cluster")
assert "No dashboards configured for fake_cluster" in output_text
def test_paasta_metastatus_returns_zero_all_clusters_ok():
args = mock.Mock(
soa_dir=mock.sentinel.soa_dir, clusters="cluster1,cluster2,cluster3"
)
with mock.patch(
"paasta_tools.cli.cmds.metastatus.list_clusters", autospec=True
) as mock_list_clusters, mock.patch(
"paasta_tools.cli.cmds.metastatus.print_cluster_status", autospec=True
) as mock_print_cluster_status, mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
):
mock_list_clusters.return_value = ["cluster1", "cluster2", "cluster3"]
mock_print_cluster_status.side_effect = [0, 0, 0]
return_code = metastatus.paasta_metastatus(args)
assert return_code == 0
assert mock_print_cluster_status.call_count == 3
def test_paasta_metastatus_returns_one_on_error():
args = mock.Mock(
soa_dir=mock.sentinel.soa_dir, clusters="cluster1,cluster2,cluster3"
)
with mock.patch(
"paasta_tools.cli.cmds.metastatus.list_clusters", autospec=True
) as mock_list_clusters, mock.patch(
"paasta_tools.cli.cmds.metastatus.print_cluster_status", autospec=True
) as mock_print_cluster_status, mock.patch(
"paasta_tools.cli.cmds.metastatus.load_system_paasta_config", autospec=True
):
mock_list_clusters.return_value = ["cluster1", "cluster2", "cluster3"]
mock_print_cluster_status.side_effect = [0, 0, 255]
return_code = metastatus.paasta_metastatus(args)
assert return_code == 1
assert mock_print_cluster_status.call_count == 3
|
import os
import re
import sys
# When perfkitbenchmarker is run with multiple processes, the output
# from each thread is interleaved in the output. This program takes
# a log file with interleaved messages, and separates them back into their
# own file.
class LogDeInterlace(object):
def __init__(self, root_file_name):
self.root_file_name = root_file_name
self.map_index_to_stream = {}
def _GetRootComponents(self, fullpath):
path = os.path.dirname(fullpath)
(filename, extension) = os.path.splitext(os.path.basename(fullpath))
return (path, filename, extension)
def _CreateStreamForIndex(self, index):
(path, filename, extension) = self._GetRootComponents(self.root_file_name)
filename = os.path.join(path,
filename + '-' + str(index) + extension)
if os.path.exists(filename):
print 'Warning file %s already exists. Log will be lost' % filename
return None
print 'Creating %s' % filename
file_object = open(filename, 'w')
return file_object
def GetStreamForIndex(self, index):
if index not in self.map_index_to_stream:
self.map_index_to_stream[index] = self._CreateStreamForIndex(index)
return self.map_index_to_stream[index]
def __enter__(self):
return self
def __exit__(self, types, value, traceback):
for file_object in self.map_index_to_stream.itervalues():
if file_object is not None:
file_object.close()
def main(argv):
if len(argv) != 2 or argv[1] == '--help':
print 'usage: SeparateLogFileRuns <filename>'
print ''
print ('Takes a pkb.log which was created from a single invocation of '
'perfkitbenchmarker running multiple benchmarks. The output '
'from each thread is written out to its own stream.')
sys.exit(1)
input_file = argv[1]
# the threads are numbered starting at 1 ... so use 0 for beginning
# and ending stream output
sentinel_stream = 0
with LogDeInterlace(input_file) as logs:
with open(input_file) as f:
current_stream = logs.GetStreamForIndex(sentinel_stream)
for line in f:
# matches lines like:
# 2018-02-13 22:30:41,701 6538b6ae MainThread pgbench(1/9) ...
stream_match = re.match(r'^\d\d\d\d-\d\d-\d\d .*?Thread'
r'.*?\((\d*)\/\d*\)', line)
# matches lines like (one line):
# 2018-02-14 17:59:57,297 6538b6ae MainThread pkb.py:856 INFO
# Benchmark run statuses:
end_match = re.match(r'^\d\d\d\d-\d\d-\d\d.*'
r'Benchmark run statuses:', line)
if stream_match:
stream_index = int(stream_match.group(1))
current_stream = logs.GetStreamForIndex(stream_index)
elif end_match:
current_stream = logs.GetStreamForIndex(sentinel_stream)
if current_stream is not None:
current_stream.write(line)
if __name__ == '__main__':
main(sys.argv)
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import t2t_benchmark
from perfkitbenchmarker.sample import Sample
class Tensor2TensorBenchmarkTestCase(unittest.TestCase,
test_util.SamplesTestMixin):
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testT2TTpuOutput(self):
self.maxDiff = None
path = os.path.join(
os.path.dirname(__file__), '..', 'data', 't2t_tpu_output.txt')
with open(path) as fp:
t2t_contents = fp.read()
samples = t2t_benchmark._MakeSamplesFromOutput({
'use_tpu': True
}, t2t_contents)
golden = [
Sample(
metric='Global Steps Per Second',
value=1.85777,
unit='global_steps/sec',
metadata={
'use_tpu': True,
'index': 0
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=5.06989,
unit='global_steps/sec',
metadata={
'use_tpu': True,
'index': 1
},
timestamp=0),
Sample(
metric='Examples Per Second',
value=118.897,
unit='examples/sec',
metadata={
'use_tpu': True,
'index': 0
},
timestamp=0),
Sample(
metric='Examples Per Second',
value=324.473,
unit='examples/sec',
metadata={
'use_tpu': True,
'index': 1
},
timestamp=0),
Sample(
metric='Eval Loss',
value=3.9047337,
unit='',
metadata={
'use_tpu': True,
'step': 1000
},
timestamp=0),
Sample(
metric='Accuracy',
value=32.064167,
unit='%',
metadata={
'use_tpu': True,
'step': 1000
},
timestamp=0),
Sample(
metric='Accuracy Per Sequence',
value=0.0,
unit='%',
metadata={
'use_tpu': True,
'step': 1000
},
timestamp=0),
Sample(
metric='Negative Log Perplexity',
value=-4.501835,
unit='perplexity',
metadata={
'use_tpu': True,
'step': 1000
},
timestamp=0),
Sample(
metric='Top 5 Accuracy',
value=50.96436,
unit='%',
metadata={
'use_tpu': True,
'step': 1000
},
timestamp=0),
Sample(
metric='Eval Loss',
value=3.7047337,
unit='',
metadata={
'use_tpu': True,
'step': 1200
},
timestamp=0),
Sample(
metric='Accuracy',
value=33.064167,
unit='%',
metadata={
'use_tpu': True,
'step': 1200
},
timestamp=0),
Sample(
metric='Accuracy Per Sequence',
value=0.0,
unit='%',
metadata={
'use_tpu': True,
'step': 1200
},
timestamp=0),
Sample(
metric='Negative Log Perplexity',
value=-4.101835,
unit='perplexity',
metadata={
'use_tpu': True,
'step': 1200
},
timestamp=0),
Sample(
metric='Top 5 Accuracy',
value=55.96436,
unit='%',
metadata={
'use_tpu': True,
'step': 1200
},
timestamp=0)
]
self.assertEqual(samples, golden)
@mock.patch('time.time', mock.MagicMock(return_value=0))
def testT2TGpuOutput(self):
self.maxDiff = None
path = os.path.join(
os.path.dirname(__file__), '..', 'data', 't2t_gpu_output.txt')
with open(path) as fp:
t2t_contents = fp.read()
samples = t2t_benchmark._MakeSamplesFromOutput({
'use_tpu': False
}, t2t_contents)
golden = [
Sample(
metric='Global Steps Per Second',
value=3.04983,
unit='global_steps/sec',
metadata={
'index': 0,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.12771,
unit='global_steps/sec',
metadata={
'index': 1,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.11027,
unit='global_steps/sec',
metadata={
'index': 2,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.10924,
unit='global_steps/sec',
metadata={
'index': 3,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.12186,
unit='global_steps/sec',
metadata={
'index': 4,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.08434,
unit='global_steps/sec',
metadata={
'index': 5,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.10174,
unit='global_steps/sec',
metadata={
'index': 6,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.11809,
unit='global_steps/sec',
metadata={
'index': 7,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Global Steps Per Second',
value=4.10496,
unit='global_steps/sec',
metadata={
'index': 8,
'use_tpu': False
},
timestamp=0),
Sample(
metric='Eval Loss',
value=7.2263174,
unit='',
metadata={
'use_tpu': False,
'step': 1000
},
timestamp=0),
Sample(
metric='Accuracy',
value=13.972055999999998,
unit='%',
metadata={
'use_tpu': False,
'step': 1000
},
timestamp=0),
Sample(
metric='Accuracy Per Sequence',
value=0.0,
unit='%',
metadata={
'use_tpu': False,
'step': 1000
},
timestamp=0),
Sample(
metric='Negative Log Perplexity',
value=-7.2263174,
unit='perplexity',
metadata={
'use_tpu': False,
'step': 1000
},
timestamp=0),
Sample(
metric='Top 5 Accuracy',
value=24.800399000000002,
unit='%',
metadata={
'use_tpu': False,
'step': 1000
},
timestamp=0)
]
self.assertEqual(samples, golden)
if __name__ == '__main__':
unittest.main()
|
import math
import sys
import textwrap
import requests
import qutebrowser.config.websettings
def version(ua):
"""Comparable version of a user agent."""
return tuple(int(v) for v in ua.upstream_browser_version.split('.')[:2])
def wrap(ini, sub, string):
return textwrap.wrap(string, width=80, initial_indent=ini, subsequent_indent=sub)
response = requests.get('https://raw.githubusercontent.com/Kikobeats/top-user-agents/master/index.json')
if response.status_code != 200:
print('Unable to fetch the user agent index', file=sys.stderr)
sys.exit(1)
ua_checks = {
'Win10': lambda ua: ua.os_info.startswith('Windows NT'),
'macOS': lambda ua: ua.os_info.startswith('Macintosh'),
'Linux': lambda ua: ua.os_info.startswith('X11'),
}
ua_strings = {}
ua_versions = {}
ua_names = {}
for ua_string in reversed(response.json()):
# reversed to prefer more common versions
# Filter out browsers that are not Chrome-based
parts = ua_string.split()
if not any(part.startswith("Chrome/") for part in parts):
continue
if any(part.startswith("OPR/") or part.startswith("Edg/") for part in parts):
continue
user_agent = qutebrowser.config.websettings.UserAgent.parse(ua_string)
# check which os_string conditions are met and select the most recent version
for key, check in ua_checks.items():
if check(user_agent):
v = version(user_agent)
if v >= ua_versions.get(key, (-math.inf,)):
ua_versions[key] = v
ua_strings[key] = ua_string
ua_names[key] = f'Chrome {v[0]} {key}'
for key, ua_string in ua_strings.items():
quoted_ua_string = f'"{ua_string}"'
for line in wrap(" - - ", " ", quoted_ua_string):
print(line)
for line in wrap(" - ", " ", ua_names[key]):
print(line)
|
import pytest
import numpy as np
from elephas.utils import functional_utils
pytest.mark.usefixtures("spark_context")
def test_add_params():
p1 = [np.ones((5, 5)) for _ in range(10)]
p2 = [np.ones((5, 5)) for _ in range(10)]
res = functional_utils.add_params(p1, p2)
assert len(res) == 10
for i in range(5):
for j in range(5):
assert res[0][i, j] == 2
def test_subtract_params():
p1 = [np.ones((5, 5)) for _ in range(10)]
p2 = [np.ones((5, 5)) for _ in range(10)]
res = functional_utils.subtract_params(p1, p2)
assert len(res) == 10
for i in range(5):
for j in range(5):
assert res[0][i, j] == 0
def test_get_neutral():
x = [np.ones((3, 4))]
res = functional_utils.get_neutral(x)
assert res[0].shape == x[0].shape
assert res[0][0, 0] == 0
def test_divide_by():
x = [np.ones((3, 4))]
res = functional_utils.divide_by(x, num_workers=10)
assert res[0].shape == x[0].shape
assert res[0][0, 0] == 0.1
|
import configparser
import logging
import os
logger = logging.getLogger(__name__)
def get_conf(path, filename, default_conf: configparser.ConfigParser) \
-> configparser.ConfigParser:
conf = configparser.ConfigParser()
conf.read_dict(default_conf)
conffn = os.path.join(path, filename)
try:
with open(conffn) as cf:
conf.read_file(cf)
except (OSError, IOError):
pass
logger.debug('configuration resulting from merging default and %s: %s' % (filename,
{section: dict(conf[section]) for section in conf}))
return conf
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
result = tf.add(2, 3)
self.assertEqual(5, result.numpy())
def test_conv2d(self):
input = tf.random.normal([1,2,2,1])
filter = tf.random.normal([1,1,1,1])
result = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
self.assertEqual(4, len(result.shape))
def test_tf_keras(self):
x_train = np.random.random((100, 28, 28))
y_train = np.random.randint(10, size=(100, 1))
x_test = np.random.random((20, 28, 28))
y_test = np.random.randint(10, size=(20, 1))
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(
optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=1)
model.evaluate(x_test, y_test)
def test_lstm(self):
x_train = np.random.random((100, 28, 28))
y_train = np.random.randint(10, size=(100, 1))
x_test = np.random.random((20, 28, 28))
y_test = np.random.randint(10, size=(20, 1))
model = tf.keras.Sequential([
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64, input_shape=(28, 28))),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(
loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=1)
model.evaluate(x_test, y_test)
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
result = tf.matmul(m1, m2)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result.numpy())
@gpu_test
def test_is_built_with_cuda(self):
self.assertTrue(tf.test.is_built_with_cuda())
@gpu_test
def test_is_gpu_available(self):
self.assertTrue(tf.test.is_gpu_available(cuda_only=True))
|
from .util import async_init_integration
async def test_automation_scenes(hass):
"""Test creation automation scenes."""
await async_init_integration(hass)
state = hass.states.get("scene.away_short")
expected_attributes = {
"attribution": "Data provided by mynexia.com",
"description": "When IFTTT activates the automation Upstairs "
"West Wing will permanently hold the heat to 63.0 "
"and cool to 80.0 AND Downstairs East Wing will "
"permanently hold the heat to 63.0 and cool to "
"79.0 AND Downstairs West Wing will permanently "
"hold the heat to 63.0 and cool to 79.0 AND "
"Upstairs West Wing will permanently hold the "
"heat to 63.0 and cool to 81.0 AND Upstairs West "
"Wing will change Fan Mode to Auto AND Downstairs "
"East Wing will change Fan Mode to Auto AND "
"Downstairs West Wing will change Fan Mode to "
"Auto AND Activate the mode named 'Away Short' "
"AND Master Suite will permanently hold the heat "
"to 63.0 and cool to 79.0 AND Master Suite will "
"change Fan Mode to Auto",
"friendly_name": "Away Short",
"icon": "mdi:script-text-outline",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
state = hass.states.get("scene.power_outage")
expected_attributes = {
"attribution": "Data provided by mynexia.com",
"description": "When IFTTT activates the automation Upstairs "
"West Wing will permanently hold the heat to 55.0 "
"and cool to 90.0 AND Downstairs East Wing will "
"permanently hold the heat to 55.0 and cool to "
"90.0 AND Downstairs West Wing will permanently "
"hold the heat to 55.0 and cool to 90.0 AND "
"Activate the mode named 'Power Outage'",
"friendly_name": "Power Outage",
"icon": "mdi:script-text-outline",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
state = hass.states.get("scene.power_restored")
expected_attributes = {
"attribution": "Data provided by mynexia.com",
"description": "When IFTTT activates the automation Upstairs "
"West Wing will Run Schedule AND Downstairs East "
"Wing will Run Schedule AND Downstairs West Wing "
"will Run Schedule AND Activate the mode named "
"'Home'",
"friendly_name": "Power Restored",
"icon": "mdi:script-text-outline",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
|
class HashedSeq(list):
"""Hashed Sequence.
Type used for hash() to make sure the hash is not generated
multiple times.
"""
__slots__ = 'hashvalue'
def __init__(self, *seq):
self[:] = seq
self.hashvalue = hash(seq)
def __hash__(self):
return self.hashvalue
def eqhash(o):
"""Call ``obj.__eqhash__``."""
try:
return o.__eqhash__()
except AttributeError:
return hash(o)
class EqualityDict(dict):
"""Dict using the eq operator for keying."""
def __getitem__(self, key):
h = eqhash(key)
if h not in self:
return self.__missing__(key)
return dict.__getitem__(self, h)
def __setitem__(self, key, value):
return dict.__setitem__(self, eqhash(key), value)
def __delitem__(self, key):
return dict.__delitem__(self, eqhash(key))
|
from unittest import mock
import pytest
from vcr.request import Request
from vcr.serialize import deserialize, serialize
from vcr.serializers import yamlserializer, jsonserializer, compat
def test_deserialize_old_yaml_cassette():
with open("tests/fixtures/migration/old_cassette.yaml", "r") as f:
with pytest.raises(ValueError):
deserialize(f.read(), yamlserializer)
def test_deserialize_old_json_cassette():
with open("tests/fixtures/migration/old_cassette.json", "r") as f:
with pytest.raises(ValueError):
deserialize(f.read(), jsonserializer)
def test_deserialize_new_yaml_cassette():
with open("tests/fixtures/migration/new_cassette.yaml", "r") as f:
deserialize(f.read(), yamlserializer)
def test_deserialize_new_json_cassette():
with open("tests/fixtures/migration/new_cassette.json", "r") as f:
deserialize(f.read(), jsonserializer)
REQBODY_TEMPLATE = """\
interactions:
- request:
body: {req_body}
headers:
Content-Type: [application/x-www-form-urlencoded]
Host: [httpbin.org]
method: POST
uri: http://httpbin.org/post
response:
body: {{string: ""}}
headers:
content-length: ['0']
content-type: [application/json]
status: {{code: 200, message: OK}}
"""
# A cassette generated under Python 2 stores the request body as a string,
# but the same cassette generated under Python 3 stores it as "!!binary".
# Make sure we accept both forms, regardless of whether we're running under
# Python 2 or 3.
@pytest.mark.parametrize(
"req_body, expect",
[
# Cassette written under Python 2 (pure ASCII body)
("x=5&y=2", b"x=5&y=2"),
# Cassette written under Python 3 (pure ASCII body)
("!!binary |\n eD01Jnk9Mg==", b"x=5&y=2"),
# Request body has non-ASCII chars (x=föo&y=2), encoded in UTF-8.
('!!python/str "x=f\\xF6o&y=2"', b"x=f\xc3\xb6o&y=2"),
("!!binary |\n eD1mw7ZvJnk9Mg==", b"x=f\xc3\xb6o&y=2"),
# Same request body, this time encoded in UTF-16. In this case, we
# write the same YAML file under both Python 2 and 3, so there's only
# one test case here.
(
"!!binary |\n //54AD0AZgD2AG8AJgB5AD0AMgA=",
b"\xff\xfex\x00=\x00f\x00\xf6\x00o\x00&\x00y\x00=\x002\x00",
),
# Same again, this time encoded in ISO-8859-1.
("!!binary |\n eD1m9m8meT0y", b"x=f\xf6o&y=2"),
],
)
def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
cfile = tmpdir.join("test_cassette.yaml")
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
with open(str(cfile)) as f:
(requests, responses) = deserialize(f.read(), yamlserializer)
assert requests[0].body == expect
@mock.patch.object(
jsonserializer.json,
"dumps",
side_effect=UnicodeDecodeError("utf-8", b"unicode error in serialization", 0, 10, "blew up"),
)
def test_serialize_constructs_UnicodeDecodeError(mock_dumps):
with pytest.raises(UnicodeDecodeError):
jsonserializer.serialize({})
def test_serialize_empty_request():
request = Request(method="POST", uri="http://localhost/", body="", headers={})
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
def test_serialize_json_request():
request = Request(method="POST", uri="http://localhost/", body="{'hello': 'world'}", headers={})
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
def test_serialize_binary_request():
msg = "Does this HTTP interaction contain binary data?"
request = Request(method="POST", uri="http://localhost/", body=b"\x8c", headers={})
try:
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
except (UnicodeDecodeError, TypeError) as exc:
assert msg in str(exc)
def test_deserialize_no_body_string():
data = {"body": {"string": None}}
output = compat.convert_to_bytes(data)
assert data == output
|
from homeassistant.components.group import GroupIntegrationRegistry
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import callback
from homeassistant.helpers.typing import HomeAssistantType
from . import STATE_CLEANING, STATE_ERROR, STATE_RETURNING
@callback
def async_describe_on_off_states(
hass: HomeAssistantType, registry: GroupIntegrationRegistry
) -> None:
"""Describe group on off states."""
registry.on_off_states(
{STATE_CLEANING, STATE_ON, STATE_RETURNING, STATE_ERROR}, STATE_OFF
)
|
from datetime import timedelta
import logging
from aiohttp import ClientConnectorError
import async_timeout
from pygti.exceptions import InvalidAuth
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_PROBLEM,
BinarySensorEntity,
)
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import ATTRIBUTION, CONF_STATION, DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the binary_sensor platform."""
hub = hass.data[DOMAIN][entry.entry_id]
station_name = entry.data[CONF_STATION]["name"]
station = entry.data[CONF_STATION]
def get_elevator_entities_from_station_information(
station_name, station_information
):
"""Convert station information into a list of elevators."""
elevators = {}
if station_information is None:
return {}
for partial_station in station_information.get("partialStations", []):
for elevator in partial_station.get("elevators", []):
state = elevator.get("state") != "READY"
available = elevator.get("state") != "UNKNOWN"
label = elevator.get("label")
description = elevator.get("description")
if label is not None:
name = f"Elevator {label} at {station_name}"
else:
name = f"Unknown elevator at {station_name}"
if description is not None:
name += f" ({description})"
lines = elevator.get("lines")
idx = f"{station_name}-{label}-{lines}"
elevators[idx] = {
"state": state,
"name": name,
"available": available,
"attributes": {
"cabin_width": elevator.get("cabinWidth"),
"cabin_length": elevator.get("cabinLength"),
"door_width": elevator.get("doorWidth"),
"elevator_type": elevator.get("elevatorType"),
"button_type": elevator.get("buttonType"),
"cause": elevator.get("cause"),
"lines": lines,
ATTR_ATTRIBUTION: ATTRIBUTION,
},
}
return elevators
async def async_update_data():
"""Fetch data from API endpoint.
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
"""
payload = {"station": station}
try:
async with async_timeout.timeout(10):
return get_elevator_entities_from_station_information(
station_name, await hub.gti.stationInformation(payload)
)
except InvalidAuth as err:
raise UpdateFailed(f"Authentication failed: {err}") from err
except ClientConnectorError as err:
raise UpdateFailed(f"Network not available: {err}") from err
except Exception as err: # pylint: disable=broad-except
raise UpdateFailed(f"Error occurred while fetching data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
# Name of the data. For logging purposes.
name="hvv_departures.binary_sensor",
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(hours=1),
)
# Fetch initial data so we have data when entities subscribe
await coordinator.async_refresh()
async_add_entities(
HvvDepartureBinarySensor(coordinator, idx, entry)
for (idx, ent) in coordinator.data.items()
)
class HvvDepartureBinarySensor(CoordinatorEntity, BinarySensorEntity):
"""HVVDepartureBinarySensor class."""
def __init__(self, coordinator, idx, config_entry):
"""Initialize."""
super().__init__(coordinator)
self.coordinator = coordinator
self.idx = idx
self.config_entry = config_entry
@property
def is_on(self):
"""Return entity state."""
return self.coordinator.data[self.idx]["state"]
@property
def should_poll(self):
"""No need to poll. Coordinator notifies entity of updates."""
return False
@property
def available(self):
"""Return if entity is available."""
return (
self.coordinator.last_update_success
and self.coordinator.data[self.idx]["available"]
)
@property
def device_info(self):
"""Return the device info for this sensor."""
return {
"identifiers": {
(
DOMAIN,
self.config_entry.entry_id,
self.config_entry.data[CONF_STATION]["id"],
self.config_entry.data[CONF_STATION]["type"],
)
},
"name": f"Departures at {self.config_entry.data[CONF_STATION]['name']}",
"manufacturer": MANUFACTURER,
}
@property
def name(self):
"""Return the name of the sensor."""
return self.coordinator.data[self.idx]["name"]
@property
def unique_id(self):
"""Return a unique ID to use for this sensor."""
return self.idx
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_PROBLEM
@property
def device_state_attributes(self):
"""Return the state attributes."""
if not (
self.coordinator.last_update_success
and self.coordinator.data[self.idx]["available"]
):
return None
return {
k: v
for k, v in self.coordinator.data[self.idx]["attributes"].items()
if v is not None
}
async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.coordinator.async_request_refresh()
|
from unittest import mock
import pytest
import voluptuous as vol
import homeassistant.components.statsd as statsd
from homeassistant.const import EVENT_STATE_CHANGED, STATE_OFF, STATE_ON
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
@pytest.fixture
def mock_client():
"""Pytest fixture for statsd library."""
with patch("statsd.StatsClient") as mock_client:
yield mock_client.return_value
def test_invalid_config():
"""Test configuration with defaults."""
config = {"statsd": {"host1": "host1"}}
with pytest.raises(vol.Invalid):
statsd.CONFIG_SCHEMA(None)
with pytest.raises(vol.Invalid):
statsd.CONFIG_SCHEMA(config)
async def test_statsd_setup_full(hass):
"""Test setup with all data."""
config = {"statsd": {"host": "host", "port": 123, "rate": 1, "prefix": "foo"}}
hass.bus.listen = MagicMock()
with patch("statsd.StatsClient") as mock_init:
assert await async_setup_component(hass, statsd.DOMAIN, config)
assert mock_init.call_count == 1
assert mock_init.call_args == mock.call(host="host", port=123, prefix="foo")
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
async def test_statsd_setup_defaults(hass):
"""Test setup with defaults."""
config = {"statsd": {"host": "host"}}
config["statsd"][statsd.CONF_PORT] = statsd.DEFAULT_PORT
config["statsd"][statsd.CONF_PREFIX] = statsd.DEFAULT_PREFIX
hass.bus.listen = MagicMock()
with patch("statsd.StatsClient") as mock_init:
assert await async_setup_component(hass, statsd.DOMAIN, config)
assert mock_init.call_count == 1
assert mock_init.call_args == mock.call(host="host", port=8125, prefix="hass")
assert hass.bus.listen.called
async def test_event_listener_defaults(hass, mock_client):
"""Test event listener."""
config = {"statsd": {"host": "host", "value_mapping": {"custom": 3}}}
config["statsd"][statsd.CONF_RATE] = statsd.DEFAULT_RATE
hass.bus.listen = MagicMock()
await async_setup_component(hass, statsd.DOMAIN, config)
assert hass.bus.listen.called
handler_method = hass.bus.listen.call_args_list[0][0][1]
valid = {"1": 1, "1.0": 1.0, "custom": 3, STATE_ON: 1, STATE_OFF: 0}
for in_, out in valid.items():
state = MagicMock(state=in_, attributes={"attribute key": 3.2})
handler_method(MagicMock(data={"new_state": state}))
mock_client.gauge.assert_has_calls(
[mock.call(state.entity_id, out, statsd.DEFAULT_RATE)]
)
mock_client.gauge.reset_mock()
assert mock_client.incr.call_count == 1
assert mock_client.incr.call_args == mock.call(
state.entity_id, rate=statsd.DEFAULT_RATE
)
mock_client.incr.reset_mock()
for invalid in ("foo", "", object):
handler_method(
MagicMock(data={"new_state": ha.State("domain.test", invalid, {})})
)
assert not mock_client.gauge.called
assert mock_client.incr.called
async def test_event_listener_attr_details(hass, mock_client):
"""Test event listener."""
config = {"statsd": {"host": "host", "log_attributes": True}}
config["statsd"][statsd.CONF_RATE] = statsd.DEFAULT_RATE
hass.bus.listen = MagicMock()
await async_setup_component(hass, statsd.DOMAIN, config)
assert hass.bus.listen.called
handler_method = hass.bus.listen.call_args_list[0][0][1]
valid = {"1": 1, "1.0": 1.0, STATE_ON: 1, STATE_OFF: 0}
for in_, out in valid.items():
state = MagicMock(state=in_, attributes={"attribute key": 3.2})
handler_method(MagicMock(data={"new_state": state}))
mock_client.gauge.assert_has_calls(
[
mock.call("%s.state" % state.entity_id, out, statsd.DEFAULT_RATE),
mock.call(
"%s.attribute_key" % state.entity_id, 3.2, statsd.DEFAULT_RATE
),
]
)
mock_client.gauge.reset_mock()
assert mock_client.incr.call_count == 1
assert mock_client.incr.call_args == mock.call(
state.entity_id, rate=statsd.DEFAULT_RATE
)
mock_client.incr.reset_mock()
for invalid in ("foo", "", object):
handler_method(
MagicMock(data={"new_state": ha.State("domain.test", invalid, {})})
)
assert not mock_client.gauge.called
assert mock_client.incr.called
|
from homeassistant.components.water_heater import (
SUPPORT_AWAY_MODE,
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
SUPPORT_FLAGS_HEATER = (
SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE | SUPPORT_AWAY_MODE
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo water_heater devices."""
async_add_entities(
[
DemoWaterHeater("Demo Water Heater", 119, TEMP_FAHRENHEIT, False, "eco"),
DemoWaterHeater("Demo Water Heater Celsius", 45, TEMP_CELSIUS, True, "eco"),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoWaterHeater(WaterHeaterEntity):
"""Representation of a demo water_heater device."""
def __init__(
self, name, target_temperature, unit_of_measurement, away, current_operation
):
"""Initialize the water_heater device."""
self._name = name
self._support_flags = SUPPORT_FLAGS_HEATER
if target_temperature is not None:
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE
if away is not None:
self._support_flags = self._support_flags | SUPPORT_AWAY_MODE
if current_operation is not None:
self._support_flags = self._support_flags | SUPPORT_OPERATION_MODE
self._target_temperature = target_temperature
self._unit_of_measurement = unit_of_measurement
self._away = away
self._current_operation = current_operation
self._operation_list = [
"eco",
"electric",
"performance",
"high_demand",
"heat_pump",
"gas",
"off",
]
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the water_heater device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return self._current_operation
@property
def operation_list(self):
"""Return the list of available operation modes."""
return self._operation_list
@property
def is_away_mode_on(self):
"""Return if away mode is on."""
return self._away
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
self.schedule_update_ha_state()
def set_operation_mode(self, operation_mode):
"""Set new operation mode."""
self._current_operation = operation_mode
self.schedule_update_ha_state()
def turn_away_mode_on(self):
"""Turn away mode on."""
self._away = True
self.schedule_update_ha_state()
def turn_away_mode_off(self):
"""Turn away mode off."""
self._away = False
self.schedule_update_ha_state()
|
from datetime import timedelta
import logging
from travispy import TravisPy
from travispy.errors import TravisError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_MONITORED_CONDITIONS,
CONF_SCAN_INTERVAL,
TIME_SECONDS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Information provided by https://travis-ci.org/"
CONF_BRANCH = "branch"
CONF_REPOSITORY = "repository"
DEFAULT_BRANCH_NAME = "master"
SCAN_INTERVAL = timedelta(seconds=30)
# sensor_type [ description, unit, icon ]
SENSOR_TYPES = {
"last_build_id": ["Last Build ID", "", "mdi:card-account-details"],
"last_build_duration": ["Last Build Duration", TIME_SECONDS, "mdi:timelapse"],
"last_build_finished_at": ["Last Build Finished At", "", "mdi:timetable"],
"last_build_started_at": ["Last Build Started At", "", "mdi:timetable"],
"last_build_state": ["Last Build State", "", "mdi:github"],
"state": ["State", "", "mdi:github"],
}
NOTIFICATION_ID = "travisci"
NOTIFICATION_TITLE = "Travis CI Sensor Setup"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Required(CONF_BRANCH, default=DEFAULT_BRANCH_NAME): cv.string,
vol.Optional(CONF_REPOSITORY, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Travis CI sensor."""
token = config.get(CONF_API_KEY)
repositories = config.get(CONF_REPOSITORY)
branch = config.get(CONF_BRANCH)
try:
travis = TravisPy.github_auth(token)
user = travis.user()
except TravisError as ex:
_LOGGER.error("Unable to connect to Travis CI service: %s", str(ex))
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
sensors = []
# non specific repository selected, then show all associated
if not repositories:
all_repos = travis.repos(member=user.login)
repositories = [repo.slug for repo in all_repos]
for repo in repositories:
if "/" not in repo:
repo = f"{user.login}/{repo}"
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
sensors.append(TravisCISensor(travis, repo, user, branch, sensor_type))
add_entities(sensors, True)
return True
class TravisCISensor(Entity):
"""Representation of a Travis CI sensor."""
def __init__(self, data, repo_name, user, branch, sensor_type):
"""Initialize the sensor."""
self._build = None
self._sensor_type = sensor_type
self._data = data
self._repo_name = repo_name
self._user = user
self._branch = branch
self._state = None
self._name = "{} {}".format(self._repo_name, SENSOR_TYPES[self._sensor_type][0])
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return SENSOR_TYPES[self._sensor_type][1]
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
if self._build and self._state is not None:
if self._user and self._sensor_type == "state":
attrs["Owner Name"] = self._user.name
attrs["Owner Email"] = self._user.email
else:
attrs["Committer Name"] = self._build.commit.committer_name
attrs["Committer Email"] = self._build.commit.committer_email
attrs["Commit Branch"] = self._build.commit.branch
attrs["Committed Date"] = self._build.commit.committed_at
attrs["Commit SHA"] = self._build.commit.sha
return attrs
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return SENSOR_TYPES[self._sensor_type][2]
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Updating sensor %s", self._name)
repo = self._data.repo(self._repo_name)
self._build = self._data.build(repo.last_build_id)
if self._build:
if self._sensor_type == "state":
branch_stats = self._data.branch(self._branch, self._repo_name)
self._state = branch_stats.state
else:
param = self._sensor_type.replace("last_build_", "")
self._state = getattr(self._build, param)
|
from homeassistant.components import binary_sensor
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.helpers.event as evt
from .const import DATA_REMOVE_DISCOVER_COMPONENT, DOMAIN as TASMOTA_DOMAIN
from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW
from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Tasmota binary sensor dynamically through discovery."""
@callback
def async_discover(tasmota_entity, discovery_hash):
"""Discover and add a Tasmota binary sensor."""
async_add_entities(
[
TasmotaBinarySensor(
tasmota_entity=tasmota_entity, discovery_hash=discovery_hash
)
]
)
hass.data[
DATA_REMOVE_DISCOVER_COMPONENT.format(binary_sensor.DOMAIN)
] = async_dispatcher_connect(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format(binary_sensor.DOMAIN, TASMOTA_DOMAIN),
async_discover,
)
class TasmotaBinarySensor(
TasmotaAvailability,
TasmotaDiscoveryUpdate,
BinarySensorEntity,
):
"""Representation a Tasmota binary sensor."""
def __init__(self, **kwds):
"""Initialize the Tasmota binary sensor."""
self._delay_listener = None
self._state = None
super().__init__(
discovery_update=self.discovery_update,
**kwds,
)
@callback
def off_delay_listener(self, now):
"""Switch device off after a delay."""
self._delay_listener = None
self._state = False
self.async_write_ha_state()
@callback
def state_updated(self, state, **kwargs):
"""Handle state updates."""
self._state = state
if self._delay_listener is not None:
self._delay_listener()
self._delay_listener = None
off_delay = self._tasmota_entity.off_delay
if self._state and off_delay is not None:
self._delay_listener = evt.async_call_later(
self.hass, off_delay, self.off_delay_listener
)
self.async_write_ha_state()
@property
def force_update(self):
"""Force update."""
return True
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
|
import sys
import threading
from .shcommon import _SYS_STDIN, _SYS_STDOUT, _SYS_STDERR
from .shthreads import ShBaseThread
class ShStdinWrapper(object):
def __getattribute__(self, item):
thread = threading.currentThread()
if isinstance(thread, ShBaseThread):
return getattr(thread.state.sys_stdin, item)
else:
return getattr(_SYS_STDIN, item)
class ShStdoutWrapper(object):
def __getattribute__(self, item):
thread = threading.currentThread()
if isinstance(thread, ShBaseThread):
return getattr(thread.state.sys_stdout, item)
else:
return getattr(_SYS_STDOUT, item)
class ShStderrWrapper(object):
def __getattribute__(self, item):
thread = threading.currentThread()
if isinstance(thread, ShBaseThread):
return getattr(thread.state.sys_stderr, item)
else:
return getattr(_SYS_STDERR, item)
stdinWrapper = ShStdinWrapper()
stdoutWrapper = ShStdoutWrapper()
stderrWrapper = ShStderrWrapper()
def enable():
sys.stdin = stdinWrapper
sys.stdout = stdoutWrapper
sys.stderr = stderrWrapper
def disable():
sys.stdin = _SYS_STDIN
sys.stdout = _SYS_STDOUT
sys.stderr = _SYS_STDERR
|
import re
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import docker
flags.DEFINE_string('cloudsuite_data_caching_memcached_flags',
'-t 1 -m 2048 -n 550',
'Flags to be given to memcached.')
flags.DEFINE_integer('cloudsuite_data_caching_rps',
18000,
'Number of requests per second.')
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'cloudsuite_data_caching'
BENCHMARK_CONFIG = """
cloudsuite_data_caching:
description: >
Runs Cloudsuite3.0 Data Caching benchmark.
vm_groups:
server:
vm_spec: *default_single_core
client:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Install docker. Pull the required images from DockerHub. Create datasets.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
server_vm = benchmark_spec.vm_groups['server'][0]
client_vm = benchmark_spec.vm_groups['client'][0]
# Make sure docker is installed on all VMs.
for vm in (server_vm, client_vm):
if not docker.IsInstalled(vm):
vm.Install('docker')
# Prepare and start the server VM.
server_vm.Install('cloudsuite/data-caching:server')
server_vm.RemoteCommand("echo '%s dc-client' | sudo tee -a /etc/hosts >"
" /dev/null" % client_vm.internal_ip)
server_vm.RemoteCommand('sudo docker run --name dc-server --net host -d '
'cloudsuite/data-caching:server %s' %
FLAGS.cloudsuite_data_caching_memcached_flags)
# Prepare the client.
client_vm.Install('cloudsuite/data-caching:client')
client_vm.RemoteCommand("echo '%s dc-server' | sudo tee -a /etc/hosts >"
" /dev/null" % server_vm.internal_ip)
def _ParseOutput(output_str):
numbers = [float(f) for f in re.findall(r"([-+]?\d*\.\d+|\d+)",
" ".join(output_str.splitlines(1)[-4:]))]
results = []
results.append(sample.Sample("Requests per second",
numbers[1], "req/s"))
results.append(sample.Sample("Average latency",
numbers[7], "ms"))
results.append(sample.Sample("90th percentile latency",
numbers[8], "ms"))
results.append(sample.Sample("95th percentile latency",
numbers[9], "ms"))
results.append(sample.Sample("99th percentile latency",
numbers[10], "ms"))
req_rems = numbers[15:-1]
results.append(sample.Sample("Average outstanding requests per requester",
sum(req_rems) / len(req_rems), "reqs"))
return results
def Run(benchmark_spec):
"""Run the data-caching benchmark.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
client_vm = benchmark_spec.vm_groups['client'][0]
benchmark_cmd = ('sudo docker run --rm --name dc-client --net host'
' cloudsuite/data-caching:client -rps %d' %
FLAGS.cloudsuite_data_caching_rps)
stdout, _ = client_vm.RemoteCommand(benchmark_cmd, should_log=True)
return _ParseOutput(stdout)
def Cleanup(benchmark_spec):
"""Stop and remove docker containers. Remove images.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
server_vm = benchmark_spec.vm_groups['server'][0]
client_vm = benchmark_spec.vm_groups['client'][0]
server_vm.RemoteCommand('sudo docker stop dc-server')
server_vm.RemoteCommand('sudo docker rm dc-server')
client_vm.RemoteCommand('sudo docker stop dc-client')
client_vm.RemoteCommand('sudo docker rm dc-client')
|
import copy
import logging
import multiprocessing
import string
from pkg_resources import resource_string
from yandextank.common.util import AddressWizard
from ...stepper import StepperWrapper
from ...stepper.util import parse_duration
logger = logging.getLogger(__name__)
class PhantomConfig:
""" config file generator """
OPTION_PHOUT = "phout_file"
def __init__(self, core, cfg, stat_log):
self.core = core
self.cfg = cfg
self.streams = []
# common
self.timeout = 11000
self.answ_log = None
self.answ_log_level = None
self._phout_file = None
self.stat_log = stat_log
self.phantom_log = None
self.phantom_start_time = None
self.phantom_modules_path = None
self.threads = None
self.additional_libs = None
self.enum_ammo = False
self._config_file = None
@staticmethod
def get_available_options():
opts = [
"threads",
"phantom_modules_path",
"additional_libs",
"writelog",
"enum_ammo",
"timeout",
]
opts += StreamConfig.get_available_options()
return opts
@property
def phout_file(self):
if not self._phout_file:
self._phout_file = self.cfg['phout_file'] or self.core.mkstemp(".log", "phout_")
return self._phout_file
def read_config(self):
""" Read phantom tool specific options """
self.threads = self.cfg["threads"] or str(int(multiprocessing.cpu_count() / 2) + 1)
self.phantom_modules_path = self.cfg["phantom_modules_path"]
self.additional_libs = ' '.join(self.cfg["additional_libs"])
self.answ_log_level = self.cfg["writelog"]
if self.answ_log_level.lower() in ['0', 'false']:
self.answ_log_level = 'none'
elif self.answ_log_level.lower() in ['1', 'true']:
self.answ_log_level = 'all'
self.timeout = parse_duration(self.cfg["timeout"])
if self.timeout > 120000:
logger.warning(
"You've set timeout over 2 minutes."
" Are you a functional tester?")
self.answ_log = self.core.mkstemp(".log", "answ_")
self.core.add_artifact_file(self.answ_log)
self.core.add_artifact_file(self.phout_file)
self.core.add_artifact_file(self.stat_log)
self.phantom_log = self.core.mkstemp(".log", "phantom_")
self.core.add_artifact_file(self.phantom_log)
main_stream = StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, self.cfg, True)
self.streams.append(main_stream)
for section in self.multi():
self.streams.append(
StreamConfig(
self.core,
len(self.streams), self.phout_file, self.answ_log,
self.answ_log_level, self.timeout, section))
for stream in self.streams:
stream.read_config()
if any(stream.ssl for stream in self.streams):
self.additional_libs += ' ssl io_benchmark_method_stream_transport_ssl'
def multi(self):
return (dict(self.cfg, **section) for section in self.cfg['multi'])
@property
def config_file(self):
if self._config_file is None:
self._config_file = self.compose_config()
return self._config_file
def compose_config(self):
""" Generate phantom tool run config """
streams_config = ''
stat_benchmarks = ''
for stream in self.streams:
streams_config += stream.compose_config()
if not stream.is_main:
stat_benchmarks += " " + "benchmark_io%s" % stream.sequence_no
kwargs = {}
kwargs['threads'] = self.threads
kwargs['phantom_log'] = self.phantom_log
kwargs['stat_log'] = self.stat_log
kwargs['benchmarks_block'] = streams_config
kwargs['stat_benchmarks'] = stat_benchmarks
kwargs['additional_libs'] = self.additional_libs
kwargs['phantom_modules_path'] = self.phantom_modules_path
filename = self.core.mkstemp(".conf", "phantom_")
self.core.add_artifact_file(filename)
logger.debug("Generating phantom config: %s", filename)
template_str = resource_string(__name__, "config/phantom.conf.tpl").decode('utf8')
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
with open(filename, 'w') as conffile:
conffile.write(config)
return filename
def set_timeout(self, timeout):
""" pass timeout to all streams """
for stream in self.streams:
stream.timeout = timeout
def get_info(self):
""" get merged info about phantom conf """
result = copy.copy(self.streams[0])
result.stat_log = self.stat_log
result.steps = []
result.ammo_file = ''
result.rps_schedule = None
result.ammo_count = 0
result.duration = 0
result.instances = 0
result.loadscheme = []
result.loop_count = 0
for stream in self.streams:
sec_no = 0
logger.debug("Steps: %s", stream.stepper_wrapper.steps)
for item in stream.stepper_wrapper.steps:
for x in range(0, int(item[1])):
if len(result.steps) > sec_no:
result.steps[sec_no][0] += item[0]
else:
result.steps.append([item[0], 1])
sec_no += 1
if result.rps_schedule:
result.rps_schedule = []
else:
result.rps_schedule = stream.stepper_wrapper.loadscheme
if result.loadscheme:
result.loadscheme = ''
else:
# FIXME: add formatted load scheme for server:
# <step_size,step_type,first_rps,last_rps,original_step_params>
# as a string
result.loadscheme = ''
if result.loop_count:
result.loop_count = '0'
else:
result.loop_count = stream.stepper_wrapper.loop_count
result.ammo_file += '{} '.format(stream.stepper_wrapper.ammo_file)
result.ammo_count += stream.stepper_wrapper.ammo_count
result.duration = max(
result.duration, stream.stepper_wrapper.duration)
result.instances += stream.instances
if not result.ammo_count:
raise ValueError("Total ammo count cannot be zero")
return result
class StreamConfig:
""" each test stream's config """
OPTION_INSTANCES_LIMIT = 'instances'
def __init__(self, core, sequence, phout, answ, answ_level, timeout, cfg, is_main=False):
self.core = core
self.cfg = cfg
self.address_wizard = AddressWizard()
self.sequence_no = sequence
self.stepper_wrapper = StepperWrapper(core, cfg)
self.phout_file = phout
self.answ_log = answ
self.answ_log_level = answ_level
self.timeout = timeout
self.is_main = is_main
# per benchmark
self.instances = self.get_option('instances')
self.ipv6 = None
self.ssl = None
self.address = None
self.port = None
self.tank_type = None
self.stpd = None
self.gatling = None
self.phantom_http_line = None
self.phantom_http_field_num = None
self.phantom_http_field = None
self.phantom_http_entity = None
self.resolved_ip = None
self.method_prefix = None
self.source_log_prefix = None
self.method_options = None
self.client_cipher_suites = None
self.client_certificate = None
self.client_key = None
def get_option(self, option, default=None):
""" get option wrapper """
return self.cfg[option]
@staticmethod
def get_available_options():
opts = [
"ssl", "tank_type", 'gatling_ip', "method_prefix",
"source_log_prefix"
]
opts += [
"phantom_http_line", "phantom_http_field_num", "phantom_http_field",
"phantom_http_entity"
]
opts += ['address', "port", StreamConfig.OPTION_INSTANCES_LIMIT]
opts += StepperWrapper.get_available_options()
opts += ["connection_test"]
return opts
def read_config(self):
""" reads config """
# multi-options
self.ssl = self.get_option("ssl")
self.tank_type = self.get_option("tank_type")
# TODO: refactor. Maybe we should decide how to interact with
# StepperWrapper here.
# self.instances = self.get_option('instances')
self.gatling = ' '.join(self.get_option('gatling_ip').split("\n"))
self.method_prefix = self.get_option("method_prefix")
self.method_options = self.get_option("method_options")
self.source_log_prefix = self.get_option("source_log_prefix")
self.phantom_http_line = self.get_option("phantom_http_line")
self.phantom_http_field_num = self.get_option("phantom_http_field_num")
self.phantom_http_field = self.get_option("phantom_http_field")
self.phantom_http_entity = self.get_option("phantom_http_entity")
self.address = self.get_option('address')
do_test_connect = self.get_option("connection_test")
explicit_port = self.get_option('port', '')
self.ipv6, self.resolved_ip, self.port, self.address = self.address_wizard.resolve(
self.address, do_test_connect, explicit_port)
logger.info(
"Resolved %s into %s:%s", self.address, self.resolved_ip, self.port)
self.client_cipher_suites = self.get_option("client_cipher_suites", "")
self.client_certificate = self.get_option("client_certificate", "")
self.client_key = self.get_option("client_key", "")
self.stepper_wrapper.read_config()
def compose_config(self):
""" compose benchmark block """
# step file
self.stepper_wrapper.prepare_stepper()
self.stpd = self.stepper_wrapper.stpd
if self.stepper_wrapper.instances:
self.instances = self.stepper_wrapper.instances
if not self.stpd:
raise RuntimeError("Cannot proceed with no STPD file")
kwargs = {}
kwargs['sequence_no'] = self.sequence_no
if self.ssl:
_auth_section = ''
_ciphers = ''
ssl_template = "transport_t ssl_transport = transport_ssl_t {\n" \
" timeout = 1s\n" \
" %s\n" \
" %s}\n" \
" transport = ssl_transport"
if self.client_certificate or self.client_key:
_auth_section = 'auth_t def_auth = auth_t { key = "%s" cert = "%s"} auth = def_auth' \
% (self.client_key, self.client_certificate)
if self.client_cipher_suites:
_ciphers = 'ciphers = "%s"' % self.client_cipher_suites
kwargs['ssl_transport'] = ssl_template % (_auth_section, _ciphers)
else:
kwargs['ssl_transport'] = ""
kwargs['method_stream'] = self.method_prefix + \
"_ipv6_t" if self.ipv6 else self.method_prefix + "_ipv4_t"
kwargs['phout'] = self.phout_file
kwargs['answ_log'] = self.answ_log
kwargs['answ_log_level'] = self.answ_log_level
kwargs['comment_answ'] = "# " if self.answ_log_level == 'none' else ''
kwargs['stpd'] = self.stpd
kwargs['source_log_prefix'] = self.source_log_prefix
kwargs['method_options'] = self.method_options
if self.tank_type:
kwargs[
'proto'] = "proto=http_proto%s" % self.sequence_no if self.tank_type == 'http' else "proto=none_proto"
kwargs['comment_proto'] = ""
else:
kwargs['proto'] = ""
kwargs['comment_proto'] = "#"
if self.gatling:
kwargs['bind'] = 'bind={ ' + self.gatling + ' }'
else:
kwargs['bind'] = ''
kwargs['ip'] = self.resolved_ip
kwargs['port'] = self.port
kwargs['timeout'] = self.timeout
kwargs['instances'] = self.instances
tune = ''
if self.phantom_http_entity:
tune += "entity = " + self.phantom_http_entity + "\n"
if self.phantom_http_field:
tune += "field = " + self.phantom_http_field + "\n"
if self.phantom_http_field_num:
tune += "field_num = {}\n".format(self.phantom_http_field_num)
if self.phantom_http_line:
tune += "line = " + self.phantom_http_line + "\n"
if tune:
kwargs['reply_limits'] = 'reply_limits = {\n' + tune + "}"
else:
kwargs['reply_limits'] = ''
if self.is_main:
fname = 'phantom_benchmark_main.tpl'
else:
fname = 'phantom_benchmark_additional.tpl'
template_str = resource_string(
__name__, "config/" + fname).decode('utf8')
tpl = string.Template(template_str)
config = tpl.substitute(kwargs)
return config
# ========================================================================
|
import keras
from matchzoo.contrib.layers import MatchingTensorLayer
from matchzoo.contrib.layers import SpatialGRU
from matchzoo.engine import hyper_spaces
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
class MatchSRNN(BaseModel):
"""
Match-SRNN Model.
Examples:
>>> model = MatchSRNN()
>>> model.params['channels'] = 4
>>> model.params['units'] = 10
>>> model.params['dropout_rate'] = 0.0
>>> model.params['direction'] = 'lt'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params.add(Param(name='channels', value=4,
desc="Number of word interaction tensor channels"))
params.add(Param(name='units', value=10,
desc="Number of SpatialGRU units"))
params.add(Param(name='direction', value='lt',
desc="Direction of SpatialGRU scanning"))
params.add(Param(
name='dropout_rate', value=0.0,
hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
q=0.01),
desc="The dropout rate."
))
return params
def build(self):
"""
Build model structure.
Match-SRNN: Modeling the Recursive Matching Structure
with Spatial RNN
"""
# Scalar dimensions referenced here:
# B = batch size (number of sequences)
# D = embedding size
# L = `input_left` sequence length
# R = `input_right` sequence length
# C = number of channels
# Left input and right input.
# query = [B, L]
# doc = [B, R]
query, doc = self._make_inputs()
# Process left and right input.
# embed_query = [B, L, D]
# embed_doc = [B, R, D]
embedding = self._make_embedding_layer()
embed_query = embedding(query)
embed_doc = embedding(doc)
# Get matching tensor
# matching_tensor = [B, C, L, R]
matching_tensor_layer = MatchingTensorLayer(
channels=self._params['channels'])
matching_tensor = matching_tensor_layer([embed_query, embed_doc])
# Apply spatial GRU to the word level interaction tensor
# h_ij = [B, U]
spatial_gru = SpatialGRU(
units=self._params['units'],
direction=self._params['direction'])
h_ij = spatial_gru(matching_tensor)
# Apply Dropout
x = keras.layers.Dropout(
rate=self._params['dropout_rate'])(h_ij)
# Make output layer
x_out = self._make_output_layer()(x)
self._backend = keras.Model(inputs=[query, doc], outputs=x_out)
|
from ... import event
from .._widget import Widget, create_element
class ProgressBar(Widget):
""" A widget to show progress.
The ``node`` of this widget is a
`<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_
containing a few HTML elements for rendering.
"""
DEFAULT_MIN_SIZE = 40, 16
CSS = """
.flx-ProgressBar {
border: 1px solid #ddd;
border-radius: 6px;
background: #eee;
}
.flx-ProgressBar > .progress-bar {
/* Use flexbox to vertically align label text */
display: -webkit-flex;
display: -ms-flexbox;
display: -ms-flex;
display: -moz-flex;
display: flex;
-webkit-flex-flow: column;
-ms-flex-flow: column;
-moz-flex-flow: column;
flex-flow: column;
-webkit-justify-content: center;
-ms-justify-content: center;
-moz-justify-content: center;
justify-content: center;
white-space: nowrap;
align-self: stretch;
position: absolute; /* need this on Chrome when in a VBox */
background: #8be;
text-align: center;
/*transition: width 0.2s ease; behaves silly on Chrome */
}
"""
value = event.FloatProp(0, settable=True, doc="""
The progress value.
""")
min = event.FloatProp(0, settable=True, doc="""
The minimum progress value.
""")
max = event.FloatProp(1, settable=True, doc="""
The maximum progress value.
""")
text = event.StringProp('', settable=True, doc="""
The label to display on the progress bar. Occurances of
"{percent}" are replaced with the current percentage, and
"{value}" with the current value.
""")
@event.action
def set_value(self, value):
value = max(self.min, value)
value = min(self.max, value)
self._mutate_value(value)
@event.reaction('min', 'max')
def __keep_value_constrained(self, *events):
self.set_value(self.value)
def _render_dom(self):
global Math
value = self.value
mi, ma = self.min, self.max
perc = 100 * (value - mi) / (ma - mi)
label = self.text
label = label.replace('{value}', str(value))
label = label.replace('{percent}', Math.round(perc) + '%')
attr = {'style__width': perc+'%',
'style__height': '100%',
'className': 'progress-bar',
}
return [create_element('div', attr, label)]
|
import json
import unittest
import pytest
import requests_mock
import voluptuous as vol
from homeassistant.components import vultr as base_vultr
from homeassistant.components.vultr import (
ATTR_ALLOWED_BANDWIDTH,
ATTR_AUTO_BACKUPS,
ATTR_COST_PER_MONTH,
ATTR_CREATED_AT,
ATTR_IPV4_ADDRESS,
ATTR_SUBSCRIPTION_ID,
CONF_SUBSCRIPTION,
binary_sensor as vultr,
)
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from tests.async_mock import patch
from tests.common import get_test_home_assistant, load_fixture
from tests.components.vultr.test_init import VALID_CONFIG
class TestVultrBinarySensorSetup(unittest.TestCase):
"""Test the Vultr binary sensor platform."""
DEVICES = []
def add_entities(self, devices, action):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Init values for this testcase class."""
self.hass = get_test_home_assistant()
self.configs = [
{CONF_SUBSCRIPTION: "576965", CONF_NAME: "A Server"},
{CONF_SUBSCRIPTION: "123456", CONF_NAME: "Failed Server"},
{CONF_SUBSCRIPTION: "555555", CONF_NAME: vultr.DEFAULT_NAME},
]
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop our started services."""
self.hass.stop()
@requests_mock.Mocker()
def test_binary_sensor(self, mock):
"""Test successful instance."""
mock.get(
"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567",
text=load_fixture("vultr_account_info.json"),
)
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
):
# Setup hub
base_vultr.setup(self.hass, VALID_CONFIG)
# Setup each of our test configs
for config in self.configs:
vultr.setup_platform(self.hass, config, self.add_entities, None)
assert len(self.DEVICES) == 3
for device in self.DEVICES:
# Test pre data retrieval
if device.subscription == "555555":
assert "Vultr {}" == device.name
device.update()
device_attrs = device.device_state_attributes
if device.subscription == "555555":
assert "Vultr Another Server" == device.name
if device.name == "A Server":
assert device.is_on is True
assert "power" == device.device_class
assert "on" == device.state
assert "mdi:server" == device.icon
assert "1000" == device_attrs[ATTR_ALLOWED_BANDWIDTH]
assert "yes" == device_attrs[ATTR_AUTO_BACKUPS]
assert "123.123.123.123" == device_attrs[ATTR_IPV4_ADDRESS]
assert "10.05" == device_attrs[ATTR_COST_PER_MONTH]
assert "2013-12-19 14:45:41" == device_attrs[ATTR_CREATED_AT]
assert "576965" == device_attrs[ATTR_SUBSCRIPTION_ID]
elif device.name == "Failed Server":
assert device.is_on is False
assert "off" == device.state
assert "mdi:server-off" == device.icon
assert "1000" == device_attrs[ATTR_ALLOWED_BANDWIDTH]
assert "no" == device_attrs[ATTR_AUTO_BACKUPS]
assert "192.168.100.50" == device_attrs[ATTR_IPV4_ADDRESS]
assert "73.25" == device_attrs[ATTR_COST_PER_MONTH]
assert "2014-10-13 14:45:41" == device_attrs[ATTR_CREATED_AT]
assert "123456" == device_attrs[ATTR_SUBSCRIPTION_ID]
def test_invalid_sensor_config(self):
"""Test config type failures."""
with pytest.raises(vol.Invalid): # No subs
vultr.PLATFORM_SCHEMA({CONF_PLATFORM: base_vultr.DOMAIN})
@requests_mock.Mocker()
def test_invalid_sensors(self, mock):
"""Test the VultrBinarySensor fails."""
mock.get(
"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567",
text=load_fixture("vultr_account_info.json"),
)
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
):
# Setup hub
base_vultr.setup(self.hass, VALID_CONFIG)
bad_conf = {} # No subscription
no_subs_setup = vultr.setup_platform(
self.hass, bad_conf, self.add_entities, None
)
assert not no_subs_setup
bad_conf = {
CONF_NAME: "Missing Server",
CONF_SUBSCRIPTION: "555555",
} # Sub not associated with API key (not in server_list)
wrong_subs_setup = vultr.setup_platform(
self.hass, bad_conf, self.add_entities, None
)
assert not wrong_subs_setup
|
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.utils import timezone
from weblate.accounts.views import mail_admins_contact
from weblate.billing.forms import HostingForm
from weblate.billing.models import Billing, Invoice, Plan
from weblate.utils.views import show_form_errors
HOSTING_TEMPLATE = """
%(name)s <%(email)s> wants to host %(project)s
Project: %(project)s
Website: %(url)s
Message:
%(message)s
Please review at https://hosted.weblate.org%(billing_url)s
"""
@login_required
def download_invoice(request, pk):
"""Download invoice PDF."""
invoice = get_object_or_404(Invoice, pk=pk)
if not invoice.ref:
raise Http404("No reference!")
if not request.user.has_perm("billing.view", invoice.billing):
raise PermissionDenied()
if not invoice.filename_valid:
raise Http404(f"File {invoice.filename} does not exist!")
with open(invoice.full_filename, "rb") as handle:
data = handle.read()
response = HttpResponse(data, content_type="application/pdf")
response["Content-Disposition"] = f"attachment; filename={invoice.filename}"
response["Content-Length"] = len(data)
return response
def handle_post(request, billing):
if "extend" in request.POST and request.user.is_superuser:
billing.state = Billing.STATE_TRIAL
billing.expiry = timezone.now() + timedelta(days=14)
billing.removal = None
billing.save(update_fields=["expiry", "removal", "state"])
elif "recurring" in request.POST:
if "recurring" in billing.payment:
del billing.payment["recurring"]
billing.save()
elif "terminate" in request.POST:
billing.state = Billing.STATE_TERMINATED
billing.save()
elif billing.valid_libre:
if "approve" in request.POST and request.user.is_superuser:
billing.state = Billing.STATE_ACTIVE
billing.plan = Plan.objects.get(slug="libre")
billing.removal = None
billing.save(update_fields=["state", "plan", "removal"])
elif "request" in request.POST:
form = HostingForm(request.POST)
if form.is_valid():
project = billing.projects.get()
billing.payment["libre_request"] = True
billing.save(update_fields=["payment"])
mail_admins_contact(
request,
"Hosting request for %(billing)s",
HOSTING_TEMPLATE,
{
"billing": billing,
"name": request.user.full_name,
"email": request.user.email,
"project": project,
"url": project.web,
"message": form.cleaned_data["message"],
"billing_url": billing.get_absolute_url(),
},
request.user.email,
settings.ADMINS_HOSTING,
)
else:
show_form_errors(request, form)
@login_required
def overview(request):
billings = Billing.objects.for_user(request.user).prefetch_related(
"plan", "projects", "invoice_set"
)
if not request.user.is_superuser and len(billings) == 1:
return redirect(billings[0])
return render(
request,
"billing/overview.html",
{
"billings": billings,
"active_billing_count": billings.filter(
state__in=(Billing.STATE_ACTIVE, Billing.STATE_TRIAL)
).count(),
},
)
@login_required
def detail(request, pk):
billing = get_object_or_404(Billing, pk=pk)
if not request.user.has_perm("billing.view", billing):
raise PermissionDenied()
if request.method == "POST":
handle_post(request, billing)
return redirect(billing)
return render(
request,
"billing/detail.html",
{"billing": billing, "hosting_form": HostingForm()},
)
|
from __future__ import print_function
import sys
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import rospkg
import rosunit.junitxml as junitxml
def create_summary(result, packages):
buff = StringIO()
buff.write('-' * 80 + '\n')
buff.write('\033[1m[AGGREGATED TEST RESULTS SUMMARY]\033[0m\n\n')
errors_failures = [r for r in result.test_case_results if r.errors or r.failures]
if errors_failures:
buff.write('ERRORS/FAILURES:\n')
for tc_result in errors_failures:
buff.write(tc_result.description)
buff.write('PACKAGES: \n%s\n\n' % '\n'.join([' * %s' % p for p in packages]))
buff.write('\nSUMMARY\n')
if (result.num_errors + result.num_failures) == 0:
buff.write('\033[32m * RESULT: SUCCESS\033[0m\n')
else:
buff.write('\033[1;31m * RESULT: FAIL\033[0m\n')
# TODO: still some issues with the numbers adding up if tests fail to launch
# number of errors from the inner tests, plus add in count for tests
# that didn't run properly ('result' object).
buff.write(' * TESTS: %s\n' % result.num_tests)
if result.num_errors:
buff.write('\033[1;31m * ERRORS: %s\033[0m\n' % result.num_errors)
else:
buff.write(' * ERRORS: 0\n')
if result.num_failures:
buff.write('\033[1;31m * FAILURES: %s\033[0m\n' % result.num_failures)
else:
buff.write(' * FAILURES: 0\n')
return buff.getvalue()
def main():
from optparse import OptionParser
parser = OptionParser(usage='usage: summarize_results.py [options] package')
parser.add_option('--nodeps',
dest='no_deps', default=False,
action='store_true',
help="don't compute test results for the specified package only")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error('Only one package may be specified')
package = args[0]
r = rospkg.RosPack()
if options.no_deps:
packages = [package]
else:
packages = [package] + r.get_depends_on(package, implicit=True)
packages = [p for p in packages if p]
result = junitxml.read_all(packages)
print(create_summary(result, packages))
if result.num_errors or result.num_failures:
sys.exit(1)
if __name__ == '__main__':
main()
|
from collections import Counter
from scattertext.emojis.EmojiExtractor import extract_emoji
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class FeatsFromSpacyDocOnlyEmoji(FeatsFromSpacyDoc):
'''
Strips away everything but emoji tokens from spaCy
'''
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Docs
Returns
-------
Counter emoji -> count
'''
return Counter(extract_emoji(str(doc)))
|
import pytest
from homeassistant.components.light import Profiles
from tests.async_mock import AsyncMock, patch
@pytest.fixture(autouse=True)
def mock_light_profiles():
"""Mock loading of profiles."""
data = {}
def mock_profiles_class(hass):
profiles = Profiles(hass)
profiles.data = data
profiles.async_initialize = AsyncMock()
return profiles
with patch(
"homeassistant.components.light.Profiles",
SCHEMA=Profiles.SCHEMA,
side_effect=mock_profiles_class,
):
yield data
|
from collections import OrderedDict
import logging
from typing import Any, Dict, Optional
from urllib.parse import urlparse
from huawei_lte_api.AuthorizedConnection import AuthorizedConnection
from huawei_lte_api.Client import Client
from huawei_lte_api.Connection import Connection
from huawei_lte_api.exceptions import (
LoginErrorPasswordWrongException,
LoginErrorUsernamePasswordOverrunException,
LoginErrorUsernamePasswordWrongException,
LoginErrorUsernameWrongException,
ResponseErrorException,
)
from requests.exceptions import Timeout
from url_normalize import url_normalize
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_RECIPIENT,
CONF_URL,
CONF_USERNAME,
)
from homeassistant.core import callback
from .const import CONNECTION_TIMEOUT, DEFAULT_DEVICE_NAME, DEFAULT_NOTIFY_SERVICE_NAME
# see https://github.com/PyCQA/pylint/issues/3202 about the DOMAIN's pylint issue
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
class ConfigFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle Huawei LTE config flow."""
VERSION = 2
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> "OptionsFlowHandler":
"""Get options flow."""
return OptionsFlowHandler(config_entry)
async def _async_show_user_form(
self,
user_input: Optional[Dict[str, Any]] = None,
errors: Optional[Dict[str, str]] = None,
) -> Dict[str, Any]:
if user_input is None:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
OrderedDict(
(
(
vol.Required(
CONF_URL,
default=user_input.get(
CONF_URL,
# https://github.com/PyCQA/pylint/issues/3167
self.context.get( # pylint: disable=no-member
CONF_URL, ""
),
),
),
str,
),
(
vol.Optional(
CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")
),
str,
),
(
vol.Optional(
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
),
str,
),
)
)
),
errors=errors or {},
)
async def async_step_import(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle import initiated config flow."""
return await self.async_step_user(user_input)
def _already_configured(self, user_input: Dict[str, Any]) -> bool:
"""See if we already have a router matching user input configured."""
existing_urls = {
url_normalize(entry.data[CONF_URL], default_scheme="http")
for entry in self._async_current_entries()
}
return user_input[CONF_URL] in existing_urls
async def async_step_user(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle user initiated config flow."""
if user_input is None:
return await self._async_show_user_form()
errors = {}
# Normalize URL
user_input[CONF_URL] = url_normalize(
user_input[CONF_URL], default_scheme="http"
)
if "://" not in user_input[CONF_URL]:
errors[CONF_URL] = "invalid_url"
return await self._async_show_user_form(
user_input=user_input, errors=errors
)
if self._already_configured(user_input):
return self.async_abort(reason="already_configured")
conn: Optional[Connection] = None
def logout() -> None:
if isinstance(conn, AuthorizedConnection):
try:
conn.user.logout()
except Exception: # pylint: disable=broad-except
_LOGGER.debug("Could not logout", exc_info=True)
def try_connect(user_input: Dict[str, Any]) -> Connection:
"""Try connecting with given credentials."""
username = user_input.get(CONF_USERNAME)
password = user_input.get(CONF_PASSWORD)
conn: Connection
if username or password:
conn = AuthorizedConnection(
user_input[CONF_URL],
username=username,
password=password,
timeout=CONNECTION_TIMEOUT,
)
else:
try:
conn = AuthorizedConnection(
user_input[CONF_URL],
username="",
password="",
timeout=CONNECTION_TIMEOUT,
)
user_input[CONF_USERNAME] = ""
user_input[CONF_PASSWORD] = ""
except ResponseErrorException:
_LOGGER.debug(
"Could not login with empty credentials, proceeding unauthenticated",
exc_info=True,
)
conn = Connection(user_input[CONF_URL], timeout=CONNECTION_TIMEOUT)
del user_input[CONF_USERNAME]
del user_input[CONF_PASSWORD]
return conn
def get_router_title(conn: Connection) -> str:
"""Get title for router."""
title = None
client = Client(conn)
try:
info = client.device.basic_information()
except Exception: # pylint: disable=broad-except
_LOGGER.debug("Could not get device.basic_information", exc_info=True)
else:
title = info.get("devicename")
if not title:
try:
info = client.device.information()
except Exception: # pylint: disable=broad-except
_LOGGER.debug("Could not get device.information", exc_info=True)
else:
title = info.get("DeviceName")
return title or DEFAULT_DEVICE_NAME
assert self.hass is not None
try:
conn = await self.hass.async_add_executor_job(try_connect, user_input)
except LoginErrorUsernameWrongException:
errors[CONF_USERNAME] = "incorrect_username"
except LoginErrorPasswordWrongException:
errors[CONF_PASSWORD] = "incorrect_password"
except LoginErrorUsernamePasswordWrongException:
errors[CONF_USERNAME] = "invalid_auth"
except LoginErrorUsernamePasswordOverrunException:
errors["base"] = "login_attempts_exceeded"
except ResponseErrorException:
_LOGGER.warning("Response error", exc_info=True)
errors["base"] = "response_error"
except Timeout:
_LOGGER.warning("Connection timeout", exc_info=True)
errors[CONF_URL] = "connection_timeout"
except Exception: # pylint: disable=broad-except
_LOGGER.warning("Unknown error connecting to device", exc_info=True)
errors[CONF_URL] = "unknown"
if errors:
await self.hass.async_add_executor_job(logout)
return await self._async_show_user_form(
user_input=user_input, errors=errors
)
# pylint: disable=no-member
title = self.context.get("title_placeholders", {}).get(
CONF_NAME
) or await self.hass.async_add_executor_job(get_router_title, conn)
await self.hass.async_add_executor_job(logout)
return self.async_create_entry(title=title, data=user_input)
async def async_step_ssdp( # type: ignore # mypy says signature incompatible with supertype, but it's the same?
self, discovery_info: Dict[str, Any]
) -> Dict[str, Any]:
"""Handle SSDP initiated config flow."""
await self.async_set_unique_id(discovery_info[ssdp.ATTR_UPNP_UDN])
self._abort_if_unique_id_configured()
# Attempt to distinguish from other non-LTE Huawei router devices, at least
# some ones we are interested in have "Mobile Wi-Fi" friendlyName.
if "mobile" not in discovery_info.get(ssdp.ATTR_UPNP_FRIENDLY_NAME, "").lower():
return self.async_abort(reason="not_huawei_lte")
# https://github.com/PyCQA/pylint/issues/3167
url = self.context[CONF_URL] = url_normalize( # pylint: disable=no-member
discovery_info.get(
ssdp.ATTR_UPNP_PRESENTATION_URL,
f"http://{urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION]).hostname}/",
)
)
if any(
url == flow["context"].get(CONF_URL) for flow in self._async_in_progress()
):
return self.async_abort(reason="already_in_progress")
user_input = {CONF_URL: url}
if self._already_configured(user_input):
return self.async_abort(reason="already_configured")
# pylint: disable=no-member
self.context["title_placeholders"] = {
CONF_NAME: discovery_info.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)
}
return await self._async_show_user_form(user_input)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Huawei LTE options flow."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle options flow."""
# Recipients are persisted as a list, but handled as comma separated string in UI
if user_input is not None:
# Preserve existing options, for example *_from_yaml markers
data = {**self.config_entry.options, **user_input}
if not isinstance(data[CONF_RECIPIENT], list):
data[CONF_RECIPIENT] = [
x.strip() for x in data[CONF_RECIPIENT].split(",")
]
return self.async_create_entry(title="", data=data)
data_schema = vol.Schema(
{
vol.Optional(
CONF_NAME,
default=self.config_entry.options.get(
CONF_NAME, DEFAULT_NOTIFY_SERVICE_NAME
),
): str,
vol.Optional(
CONF_RECIPIENT,
default=", ".join(
self.config_entry.options.get(CONF_RECIPIENT, [])
),
): str,
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
|
from datetime import timedelta
import logging
from typing import List
import boto3
import requests
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_TTL, CONF_ZONE, HTTP_OK
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
CONF_ACCESS_KEY_ID = "aws_access_key_id"
CONF_SECRET_ACCESS_KEY = "aws_secret_access_key"
CONF_RECORDS = "records"
DOMAIN = "route53"
INTERVAL = timedelta(minutes=60)
DEFAULT_TTL = 300
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_ACCESS_KEY_ID): cv.string,
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_RECORDS): vol.All(cv.ensure_list, [cv.string]),
vol.Required(CONF_SECRET_ACCESS_KEY): cv.string,
vol.Required(CONF_ZONE): cv.string,
vol.Optional(CONF_TTL, default=DEFAULT_TTL): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Route53 component."""
domain = config[DOMAIN][CONF_DOMAIN]
records = config[DOMAIN][CONF_RECORDS]
zone = config[DOMAIN][CONF_ZONE]
aws_access_key_id = config[DOMAIN][CONF_ACCESS_KEY_ID]
aws_secret_access_key = config[DOMAIN][CONF_SECRET_ACCESS_KEY]
ttl = config[DOMAIN][CONF_TTL]
def update_records_interval(now):
"""Set up recurring update."""
_update_route53(
aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl
)
def update_records_service(now):
"""Set up service for manual trigger."""
_update_route53(
aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl
)
track_time_interval(hass, update_records_interval, INTERVAL)
hass.services.register(DOMAIN, "update_records", update_records_service)
return True
def _get_fqdn(record, domain):
if record == ".":
return domain
return f"{record}.{domain}"
def _update_route53(
aws_access_key_id: str,
aws_secret_access_key: str,
zone: str,
domain: str,
records: List[str],
ttl: int,
):
_LOGGER.debug("Starting update for zone %s", zone)
client = boto3.client(
DOMAIN,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
)
# Get the IP Address and build an array of changes
try:
ipaddress = requests.get("https://api.ipify.org/", timeout=5).text
except requests.RequestException:
_LOGGER.warning("Unable to reach the ipify service")
return
changes = []
for record in records:
_LOGGER.debug("Processing record: %s", record)
changes.append(
{
"Action": "UPSERT",
"ResourceRecordSet": {
"Name": _get_fqdn(record, domain),
"Type": "A",
"TTL": ttl,
"ResourceRecords": [{"Value": ipaddress}],
},
}
)
_LOGGER.debug("Submitting the following changes to Route53")
_LOGGER.debug(changes)
response = client.change_resource_record_sets(
HostedZoneId=zone, ChangeBatch={"Changes": changes}
)
_LOGGER.debug("Response is %s", response)
if response["ResponseMetadata"]["HTTPStatusCode"] != HTTP_OK:
_LOGGER.warning(response)
|
from roomba import Roomba
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import callback
from . import CannotConnect, async_connect_or_timeout, async_disconnect_or_timeout
from .const import (
CONF_BLID,
CONF_CONTINUOUS,
CONF_DELAY,
CONF_NAME,
DEFAULT_CONTINUOUS,
DEFAULT_DELAY,
ROOMBA_SESSION,
)
from .const import DOMAIN # pylint:disable=unused-import
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_BLID): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_CONTINUOUS, default=DEFAULT_CONTINUOUS): bool,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): int,
}
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
roomba = Roomba(
address=data[CONF_HOST],
blid=data[CONF_BLID],
password=data[CONF_PASSWORD],
continuous=data[CONF_CONTINUOUS],
delay=data[CONF_DELAY],
)
info = await async_connect_or_timeout(hass, roomba)
return {
ROOMBA_SESSION: info[ROOMBA_SESSION],
CONF_NAME: info[CONF_NAME],
CONF_HOST: data[CONF_HOST],
}
class RoombaConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Roomba configuration flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_import(self, import_info):
"""Set the config entry up from yaml."""
return await self.async_step_user(import_info)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_BLID])
self._abort_if_unique_id_configured()
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors = {"base": "cannot_connect"}
if "base" not in errors:
await async_disconnect_or_timeout(self.hass, info[ROOMBA_SESSION])
return self.async_create_entry(title=info[CONF_NAME], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_CONTINUOUS,
default=self.config_entry.options.get(
CONF_CONTINUOUS, DEFAULT_CONTINUOUS
),
): bool,
vol.Optional(
CONF_DELAY,
default=self.config_entry.options.get(
CONF_DELAY, DEFAULT_DELAY
),
): int,
}
),
)
|
import gettext
from io import StringIO
from itertools import chain
from unittest import SkipTest
from django.conf import settings
from django.core.management import call_command
from django.test import TestCase
from django.urls import reverse
from django.utils.translation import activate
from weblate_language_data.languages import LANGUAGES
from weblate_language_data.plurals import EXTRAPLURALS
from weblate.lang import data
from weblate.lang.models import Language, Plural, get_plural_type
from weblate.trans.tests.test_models import BaseTestCase
from weblate.trans.tests.test_views import FixtureTestCase
TEST_LANGUAGES = (
("cs_CZ", "cs", "ltr", "(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2", "Czech", False),
("cs (2)", "cs", "ltr", "(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2", "Czech", False),
("cscz", "cs", "ltr", "(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2", "Czech", False),
("czech", "cs", "ltr", "(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2", "Czech", False),
(
"cs_CZ@hantec",
"cs_CZ@hantec",
"ltr",
"(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2",
"Czech (cs_CZ@hantec)",
True,
),
("de-DE", "de", "ltr", "n != 1", "German", False),
("de_AT", "de_AT", "ltr", "n != 1", "German (Austria)", False),
("de_CZ", "de_CZ", "ltr", "n != 1", "German (de_CZ)", True),
("portuguese_portugal", "pt_PT", "ltr", "n > 1", "Portuguese (Portugal)", False),
("pt-rBR", "pt_BR", "ltr", "n > 1", "Portuguese (Brazil)", False),
("ptbr", "pt_BR", "ltr", "n > 1", "Portuguese (Brazil)", False),
(
"sr+latn",
"sr_Latn",
"ltr",
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && "
"(n%100<10 || n%100>=20) ? 1 : 2",
"Serbian (latin)",
False,
),
(
"sr_RS@latin",
"sr_Latn",
"ltr",
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && "
"(n%100<10 || n%100>=20) ? 1 : 2",
"Serbian (latin)",
False,
),
(
"sr-RS@latin",
"sr_Latn",
"ltr",
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && "
"(n%100<10 || n%100>=20) ? 1 : 2",
"Serbian (latin)",
False,
),
(
"sr_RS_latin",
"sr_Latn",
"ltr",
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && "
"(n%100<10 || n%100>=20) ? 1 : 2",
"Serbian (latin)",
False,
),
(
"en_CA_MyVariant",
"en_CA@myvariant",
"ltr",
"n != 1",
"English (Canada) (en_CA@myvariant)",
True,
),
("en_CZ", "en_CZ", "ltr", "n != 1", "English (en_CZ)", True),
("zh_CN", "zh_Hans", "ltr", "0", "Chinese (Simplified)", False),
("zh-CN", "zh_Hans", "ltr", "0", "Chinese (Simplified)", False),
("zh_HANT", "zh_Hant", "ltr", "0", "Chinese (Traditional)", False),
("zh-HANT", "zh_Hant", "ltr", "0", "Chinese (Traditional)", False),
("zh-CN@test", "zh_CN@test", "ltr", "0", "Chinese (Simplified) (zh_CN@test)", True),
("zh-rCN", "zh_Hans", "ltr", "0", "Chinese (Simplified)", False),
("zh_rCN", "zh_Hans", "ltr", "0", "Chinese (Simplified)", False),
("zh_HK", "zh_Hant_HK", "ltr", "0", "Chinese (Traditional, Hong Kong)", False),
(
"zh_Hant-rHK",
"zh_Hant_HK",
"ltr",
"0",
"Chinese (Traditional, Hong Kong)",
False,
),
(
"ar",
"ar",
"rtl",
"n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 "
": n%100>=11 ? 4 : 5",
"Arabic",
False,
),
(
"ar_AA",
"ar",
"rtl",
"n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 "
": n%100>=11 ? 4 : 5",
"Arabic",
False,
),
(
"ar_XX",
"ar_XX",
"rtl",
"n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 "
": n%100>=11 ? 4 : 5",
"Arabic (ar_XX)",
True,
),
("xx", "xx", "ltr", "n != 1", "xx (generated) (xx)", True),
("nb_NO", "nb_NO", "ltr", "n != 1", "Norwegian Bokmål", False),
("nb-NO", "nb_NO", "ltr", "n != 1", "Norwegian Bokmål", False),
("nb", "nb_NO", "ltr", "n != 1", "Norwegian Bokmål", False),
("nono", "nb_NO", "ltr", "n != 1", "Norwegian Bokmål", False),
(
"b+zh+Hant+HK",
"zh_Hant_HK",
"ltr",
"0",
"Chinese (Traditional, Hong Kong)",
False,
),
(
"plPL",
"pl",
"ltr",
"n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2",
"Polish",
False,
),
)
class TestSequenceMeta(type):
def __new__(mcs, name, bases, dict): # noqa: N804
def gen_test(original, expected, direction, plural, name, create):
def test(self):
self.run_create(original, expected, direction, plural, name, create)
return test
for params in TEST_LANGUAGES:
test_name = "test_create_%s" % params[0].replace("@", "___").replace(
"+", "_"
).replace("-", "__")
if test_name in dict:
raise ValueError(f"Duplicate test: {params[0]}, mapped to {test_name}")
dict[test_name] = gen_test(*params)
return type.__new__(mcs, name, bases, dict)
class LanguagesTest(BaseTestCase, metaclass=TestSequenceMeta):
def setUp(self):
# Ensure we're using English
activate("en")
def run_create(self, original, expected, direction, plural, name, create):
"""Test that auto create correctly handles languages."""
# Lookup language
lang = Language.objects.auto_get_or_create(original, create=False)
self.assertEqual(
create,
not bool(lang.pk),
f"Failed to assert creation for {original}: {create}",
)
# Create language
lang = Language.objects.auto_get_or_create(original)
# Check language code
self.assertEqual(
lang.code, expected, f"Invalid code for {original}: {lang.code}"
)
# Check direction
self.assertEqual(lang.direction, direction, f"Invalid direction for {original}")
# Check plurals
plural_obj = lang.plural_set.get(source=Plural.SOURCE_DEFAULT)
self.assertEqual(
plural_obj.formula,
plural,
f"Invalid plural for {original} "
f"(expected {plural}, got {plural_obj.formula})",
)
# Check whether html contains both language code and direction
self.assertIn(direction, lang.get_html())
self.assertIn(expected, lang.get_html())
# Check name
self.assertEqual(str(lang), name)
def test_private_use(self, code="de-x-a123", expected="de-x-a123"):
lang = Language.objects.auto_get_or_create(code, create=False)
self.assertEqual(lang.code, expected)
Language.objects.create(name="Test", code=code)
lang = Language.objects.auto_get_or_create(code, create=False)
self.assertEqual(lang.code, code)
def test_private_country(self):
self.test_private_use("en-US-x-twain", "en_US-x-twain")
def test_private_fuzzy_get(self):
Language.objects.auto_get_or_create("cs_FOO")
self.run_create(
"czech", "cs", "ltr", "(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2", "Czech", False
)
def test_chinese_fuzzy_get(self):
"""Test handling of manually created zh_CN language."""
language = Language.objects.create(code="zh_CN", name="Chinese")
language.plural_set.create(
number=0,
formula="0",
source=Plural.SOURCE_DEFAULT,
)
self.run_create("zh-rCN", "zh_CN", "ltr", "0", "Chinese (zh_CN)", False)
def test_case_sensitive_fuzzy_get(self):
"""Test handling of manually created zh-TW, zh-TW and zh_TW languages."""
if settings.DATABASES["default"]["ENGINE"] == "django.db.backends.mysql":
raise SkipTest("Not supported on MySQL")
language = Language.objects.create(code="zh_TW", name="Chinese (Taiwan)")
language.plural_set.create(
number=0,
formula="0",
source=Plural.SOURCE_DEFAULT,
)
self.run_create("zh_TW", "zh_TW", "ltr", "0", "Chinese (Taiwan) (zh_TW)", False)
language = Language.objects.create(code="zh-TW", name="Chinese Taiwan")
language.plural_set.create(
number=0,
formula="0",
source=Plural.SOURCE_DEFAULT,
)
self.run_create("zh-TW", "zh-TW", "ltr", "0", "Chinese Taiwan (zh-TW)", False)
language = Language.objects.create(code="zh-tw", name="Traditional Chinese")
language.plural_set.create(
number=0,
formula="0",
source=Plural.SOURCE_DEFAULT,
)
self.run_create(
"zh-tw", "zh-tw", "ltr", "0", "Traditional Chinese (zh-tw)", False
)
class CommandTest(BaseTestCase):
"""Test for management commands."""
def test_setuplang(self):
call_command("setuplang")
self.assertTrue(Language.objects.exists())
def test_setuplang_noupdate(self):
call_command("setuplang", update=False)
self.assertTrue(Language.objects.exists())
def check_list(self, **kwargs):
output = StringIO()
call_command("list_languages", "cs", stdout=output, **kwargs)
self.assertIn("Czech", output.getvalue())
def test_list_languages(self):
self.check_list()
def test_list_languages_lower(self):
self.check_list(lower=True)
def test_move_language(self):
Language.objects.auto_create("cs_CZ")
call_command("move_language", "cs_CZ", "cs")
class VerifyPluralsTest(TestCase):
"""In database plural form verification."""
@staticmethod
def all_data():
return chain(LANGUAGES, EXTRAPLURALS)
def test_valid(self):
"""Validate that we can name all plural formulas."""
for code, _name, _nplurals, plural_formula in self.all_data():
self.assertNotEqual(
get_plural_type(code.replace("_", "-").split("-")[0], plural_formula),
data.PLURAL_UNKNOWN,
f"Can not guess plural type for {code} ({plural_formula})",
)
def test_formula(self):
"""Validate that all formulas can be parsed by gettext."""
# Verify we get an error on invalid syntax
with self.assertRaises((SyntaxError, ValueError)):
gettext.c2py("n==0 ? 1 2")
for code, _name, nplurals, plural_formula in self.all_data():
# Validate plurals can be parsed
plural = gettext.c2py(plural_formula)
# Get maximal plural
calculated = max(plural(x) for x in range(200)) + 1
# Check it matches ours
self.assertEqual(
calculated,
nplurals,
"Invalid nplurals for {}: {} ({}, {})".format(
code, calculated, nplurals, plural_formula
),
)
class LanguagesViewTest(FixtureTestCase):
def test_languages(self):
response = self.client.get(reverse("languages"))
self.assertContains(response, "Czech")
def test_language(self):
response = self.client.get(reverse("show_language", kwargs={"lang": "cs"}))
self.assertContains(response, "Czech")
self.assertContains(response, "test/test")
def test_language_br(self):
response = self.client.get(reverse("show_language", kwargs={"lang": "br"}))
self.assertContains(response, "Breton")
# Example is listed
self.assertContains(response, "1000000")
def test_project_language(self):
response = self.client.get(
reverse("project-language", kwargs={"lang": "cs", "project": "test"})
)
self.assertContains(response, "Czech")
self.assertContains(response, "/projects/test/test/cs/")
def test_language_redirect(self):
response = self.client.get(reverse("show_language", kwargs={"lang": "cs_CZ"}))
self.assertRedirects(response, reverse("show_language", kwargs={"lang": "cs"}))
def test_language_nonexisting(self):
response = self.client.get(
reverse("show_language", kwargs={"lang": "nonexisting"})
)
self.assertEqual(response.status_code, 404)
def test_add(self):
response = self.client.get(reverse("create-language"))
self.assertEqual(response.status_code, 302)
self.user.is_superuser = True
self.user.save()
response = self.client.get(reverse("create-language"))
self.assertEqual(response.status_code, 200)
response = self.client.post(reverse("create-language"), {"code": "x"})
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse("create-language"),
{
"code": "xx",
"name": "XX",
"direction": "ltr",
"number": "2",
"formula": "n != 1",
},
)
self.assertRedirects(response, reverse("show_language", kwargs={"lang": "xx"}))
def test_delete(self):
response = self.client.post(reverse("show_language", kwargs={"lang": "br"}))
self.assertEqual(response.status_code, 200)
self.user.is_superuser = True
self.user.save()
response = self.client.post(reverse("show_language", kwargs={"lang": "cs"}))
self.assertEqual(response.status_code, 200)
response = self.client.post(reverse("show_language", kwargs={"lang": "br"}))
self.assertRedirects(response, reverse("languages"))
def test_edit(self):
language = Language.objects.get(code="cs")
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("edit-language", kwargs={"pk": language.pk}),
{"code": "xx", "name": "XX", "direction": "ltr"},
)
self.assertRedirects(response, reverse("show_language", kwargs={"lang": "xx"}))
def test_edit_plural(self):
language = Language.objects.get(code="cs")
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("edit-plural", kwargs={"pk": language.plural.pk}),
{"number": "2", "formula": "n != 1"},
)
self.assertRedirects(
response, reverse("show_language", kwargs={"lang": "cs"}) + "#information"
)
class PluralsCompareTest(TestCase):
def test_match(self):
plural = Plural.objects.get(language__code="cs", source=Plural.SOURCE_DEFAULT)
self.assertTrue(plural.same_plural(plural.number, plural.formula))
def test_formula(self):
plural = Plural.objects.get(language__code="pt", source=Plural.SOURCE_DEFAULT)
self.assertFalse(plural.same_plural(2, "(n != 1)"))
def test_different_formula(self):
plural = Plural.objects.get(language__code="pt", source=Plural.SOURCE_DEFAULT)
self.assertTrue(plural.same_plural(2, "(n > 1)"))
def test_different_count(self):
plural = Plural.objects.get(language__code="lt", source=Plural.SOURCE_DEFAULT)
self.assertFalse(
plural.same_plural(
4,
"(n%10==1 ? 0 : n%10==1 && n%100!=11 ?"
" 1 : n %10>=2 && (n%100<10 || n%100>=20) ? 2 : 3)",
)
)
def test_invalid(self):
plural = Plural.objects.get(language__code="lt", source=Plural.SOURCE_DEFAULT)
self.assertFalse(plural.same_plural(1, "bogus"))
class PluralTest(BaseTestCase):
def test_examples(self):
plural = Plural(number=2, formula="n!=1")
self.assertEqual(
plural.examples,
{0: ["1"], 1: ["0", "2", "3", "4", "5", "6", "7", "8", "9", "10"]},
)
def test_plurals(self):
"""Test whether plural form is correctly calculated."""
plural = Plural.objects.get(language__code="cs")
self.assertEqual(
plural.plural_form,
"nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;",
)
def test_plural_names(self):
plural = Plural.objects.get(language__code="cs")
self.assertEqual(plural.get_plural_name(0), "One")
self.assertEqual(plural.get_plural_name(1), "Few")
self.assertEqual(plural.get_plural_name(2), "Other")
def test_plural_names_invalid(self):
plural = Plural.objects.get(language__code="cs")
plural.type = -1
self.assertEqual(plural.get_plural_name(0), "Singular")
self.assertEqual(plural.get_plural_name(1), "Plural")
self.assertEqual(plural.get_plural_name(2), "Plural form 2")
def test_plural_labels(self):
plural = Plural.objects.get(language__code="cs")
label = plural.get_plural_label(0)
self.assertIn("One", label)
self.assertIn("1", label)
label = plural.get_plural_label(1)
self.assertIn("Few", label)
self.assertIn("2, 3, 4", label)
label = plural.get_plural_label(2)
self.assertIn("Other", label)
self.assertIn("5, 6, 7", label)
def test_plural_type(self):
language = Language.objects.get(code="cs")
plural = Plural.objects.create(
language=language,
number=3,
formula=(
"(n%10==1 && n%100!=11 ? 0 : "
"n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)"
),
source=Plural.SOURCE_GETTEXT,
)
self.assertEqual(plural.type, data.PLURAL_ONE_FEW_OTHER)
def test_definitions(self):
"""Verify consistency of plural definitions."""
plurals = [x[1] for x in data.PLURAL_MAPPINGS]
choices = [x[0] for x in Plural.PLURAL_CHOICES]
for plural in plurals:
self.assertIn(plural, choices)
self.assertIn(plural, data.PLURAL_NAMES)
|
import tempfile
import unittest
import mock
from kalliope.core.Models import Singleton
from kalliope.core.Models.settings.Player import Player
from kalliope.core.Models.settings.Stt import Stt
from kalliope.core.Models.settings.Trigger import Trigger
from kalliope.core.Models.settings.Tts import Tts
from kalliope.neurons.settings import Settings
class TestSettings(unittest.TestCase):
def setUp(self):
self.neuron_settings = Settings()
def tearDown(self):
# Cleaning for settings singleton
Singleton._instances = {}
def test_is_parameters_ok(self):
# TODO this code relies on the current settings.yml file, should create a full mock instead
# tts
self.neuron_settings.default_tts = "pico2wave"
self.assertTrue(self.neuron_settings._is_parameters_ok())
self.neuron_settings.text_to_speech = [{"pico2wave": {"language": "fr-FR"}}]
self.assertTrue(self.neuron_settings._is_parameters_ok())
# stt
self.neuron_settings.default_stt = "google"
self.assertTrue(self.neuron_settings._is_parameters_ok())
self.neuron_settings.speech_to_text = [{"google": {"language": "fr-FR"}}]
self.assertTrue(self.neuron_settings._is_parameters_ok())
# player
self.neuron_settings.default_player = "mplayer"
self.assertTrue(self.neuron_settings._is_parameters_ok())
self.neuron_settings.players = [{"mplayer": {}}]
# trigger
self.neuron_settings.default_trigger = "snowboy"
self.assertTrue(self.neuron_settings._is_parameters_ok())
# hooks
self.neuron_settings.hooks = {"blabla": ["coucou", "test"]}
self.assertTrue(self.neuron_settings._is_parameters_ok())
self.neuron_settings.hooks = {"blabla": "string"}
self.assertTrue(self.neuron_settings._is_parameters_ok())
# variables
tmpfile = tempfile.NamedTemporaryFile()
self.neuron_settings.var_files = [tmpfile.name]
self.assertTrue(self.neuron_settings._is_parameters_ok())
# deaf
self.neuron_settings.deaf = 60
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.deaf = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.deaf = 0
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.deaf = True
self.assertTrue(self.neuron_settings._is_parameters_ok())
# mute
self.neuron_settings.mute = 60
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.mute = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.mute = 0
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.mute = True
self.assertTrue(self.neuron_settings._is_parameters_ok())
# recognizer_multiplier
self.neuron_settings.recognizer_multiplier = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.recognizer_multiplier = 60
self.assertTrue(self.neuron_settings._is_parameters_ok())
# recognizer_energy_ratio
self.neuron_settings.recognizer_energy_ratio = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.recognizer_energy_ratio = 60
self.assertTrue(self.neuron_settings._is_parameters_ok())
# recognizer_recording_timeout
self.neuron_settings.recognizer_recording_timeout = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.recognizer_recording_timeout = 60
self.assertTrue(self.neuron_settings._is_parameters_ok())
# recognizer_recording_timeout_with_silence
self.neuron_settings.recognizer_recording_timeout_with_silence = "randomString"
self.assertFalse(self.neuron_settings._is_parameters_ok())
self.neuron_settings.recognizer_recording_timeout_with_silence = 60
self.assertTrue(self.neuron_settings._is_parameters_ok())
def test_set_settings(self):
# tts
self.neuron_settings.default_tts = "randomtts"
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_default_tts") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(self.neuron_settings.default_tts)
self.neuron_settings.text_to_speech = [{"randomTTS": {"language": "fr-FR"}}]
tts = Tts(name= "randomTTS", parameters= {"language": "fr-FR"})
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_ttss") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(tts)
# stt
self.neuron_settings.default_stt = "randomstt"
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_default_stt") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(self.neuron_settings.default_stt)
self.neuron_settings.speech_to_text = [{"randomStt": {"language": "fr-FR"}}]
stt = Stt(name="randomStt", parameters={"language": "fr-FR"})
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_stts") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(stt)
# players
self.neuron_settings.default_player = "randomPlayer"
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_default_player") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(self.neuron_settings.default_player)
self.neuron_settings.players = [{"randomPlayer": {}}]
player = Player(name="randomPlayer", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_players") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(player)
# triggers
self.neuron_settings.default_trigger = "randomTrigger"
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_default_trigger") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(self.neuron_settings.default_trigger)
self.neuron_settings.triggers = [{"randomTrigger": {}}]
trigger = Trigger(name="randomTrigger", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_trigger") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(trigger)
# Hooks
self.neuron_settings.hooks = {"randomHook": "randomSynapse"}
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_hooks") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(self.neuron_settings.hooks)
# Variables
with tempfile.NamedTemporaryFile() as tmpfile:
tmpfile.write("coucou: 'hello'".encode()) # encode to get the binary format
tmpfile.flush() # To refresh the file with the data
self.neuron_settings.var_files = [tmpfile.name]
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_variables") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with({'coucou': 'hello'})
self.neuron_settings.var_files = [] # reset var_file
# Deaf
self.neuron_settings.deaf = True
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_deaf_status") as mock_setting_editor:
with mock.patch("kalliope.core.SignalLauncher.SignalLauncher.get_order_instance"):
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once()
# Mute
self.neuron_settings.mute = True
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_mute_status") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(True)
# set_recognizer_multiplier
self.neuron_settings.recognizer_multiplier = 50
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_recognizer_multiplier") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(50)
# set_recognizer_energy_ratio
self.neuron_settings.recognizer_energy_ratio = 50
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_recognizer_energy_ratio") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(50)
# set_recognizer_recording_timeout
self.neuron_settings.recognizer_recording_timeout = 50
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_recognizer_recording_timeout") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(50)
# set_recognizer_recording_timeout_with_silence
self.neuron_settings.recognizer_recording_timeout_with_silence = 50
with mock.patch("kalliope.core.ConfigurationManager.SettingEditor.set_recognizer_recording_timeout_with_silence") as mock_setting_editor:
self.neuron_settings._set_settings()
mock_setting_editor.assert_called_once_with(50)
|
import asyncio
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_DEVICE
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from .const import DOMAIN, SMS_GATEWAY
from .gateway import create_sms_gateway
PLATFORMS = ["sensor"]
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.isdevice})},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Configure Gammu state machine."""
hass.data.setdefault(DOMAIN, {})
sms_config = config.get(DOMAIN, {})
if not sms_config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=sms_config,
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Configure Gammu state machine."""
device = entry.data[CONF_DEVICE]
config = {"Device": device, "Connection": "at"}
gateway = await create_sms_gateway(config, hass)
if not gateway:
return False
hass.data[DOMAIN][SMS_GATEWAY] = gateway
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
gateway = hass.data[DOMAIN].pop(SMS_GATEWAY)
await gateway.terminate_async()
return unload_ok
|
from pytest import fixture
from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION
from tests.async_mock import MagicMock, patch
@fixture
def nzbget_api(hass):
"""Mock NZBGetApi for easier testing."""
with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api:
instance = mock_api.return_value
instance.history = MagicMock(return_value=list(MOCK_HISTORY))
instance.pausedownload = MagicMock(return_value=True)
instance.resumedownload = MagicMock(return_value=True)
instance.status = MagicMock(return_value=MOCK_STATUS.copy())
instance.version = MagicMock(return_value=MOCK_VERSION)
yield mock_api
|
from unittest.mock import Mock
import numpy as np
import pandas as pd
import pytest
import pytz
from qstrader.signals.sma import SMASignal
@pytest.mark.parametrize(
'start_dt,lookbacks,prices,expected',
[
(
pd.Timestamp('2019-01-01 14:30:00', tz=pytz.utc),
[6, 12],
[
99.34, 101.87, 98.32, 92.98, 103.87,
104.51, 97.62, 95.22, 96.09, 100.34,
105.14, 107.49, 90.23, 89.43, 87.68
],
[96.71833333333333, 97.55]
)
]
)
def test_sma_signal(start_dt, lookbacks, prices, expected):
"""
Checks that the SMA signal correctly calculates the
simple moving average for various lookbacks.
"""
universe = Mock()
universe.get_assets.return_value = ['EQ:SPY']
sma = SMASignal(start_dt, universe, lookbacks)
for price_idx in range(len(prices)):
sma.append('EQ:SPY', prices[price_idx])
for i, lookback in enumerate(lookbacks):
assert np.isclose(sma('EQ:SPY', lookback), expected[i])
|
import string
from .unit import Unit
class PuncRemoval(Unit):
"""Process unit for remove punctuations."""
def transform(self, input_: list) -> list:
"""
Remove punctuations from list of tokens.
:param input_: list of toekns.
:return rv: tokens without punctuation.
"""
table = str.maketrans({key: None for key in string.punctuation})
return [item.translate(table) for item in input_]
|
import logging
import re
import telnetlib
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
_DEVICES_REGEX = re.compile(
r"(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2})))\s"
r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s+"
r"(?P<status>([^\s]+))\s+"
r"(?P<type>([^\s]+))\s+"
r"(?P<intf>([^\s]+))\s+"
r"(?P<hwintf>([^\s]+))\s+"
r"(?P<host>([^\s]+))"
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a THOMSON scanner."""
scanner = ThomsonDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class ThomsonDeviceScanner(DeviceScanner):
"""This class queries a router running THOMSON firmware."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.last_results = {}
# Test the router is accessible.
data = self.get_thomson_data()
self.success_init = data is not None
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client["mac"] for client in self.last_results]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if not self.last_results:
return None
for client in self.last_results:
if client["mac"] == device:
return client["host"]
return None
def _update_info(self):
"""Ensure the information from the THOMSON router is up to date.
Return boolean if scanning successful.
"""
if not self.success_init:
return False
_LOGGER.info("Checking ARP")
data = self.get_thomson_data()
if not data:
return False
# Flag C stands for CONNECTED
active_clients = [
client for client in data.values() if client["status"].find("C") != -1
]
self.last_results = active_clients
return True
def get_thomson_data(self):
"""Retrieve data from THOMSON and return parsed result."""
try:
telnet = telnetlib.Telnet(self.host)
telnet.read_until(b"Username : ")
telnet.write((self.username + "\r\n").encode("ascii"))
telnet.read_until(b"Password : ")
telnet.write((self.password + "\r\n").encode("ascii"))
telnet.read_until(b"=>")
telnet.write(b"hostmgr list\r\n")
devices_result = telnet.read_until(b"=>").split(b"\r\n")
telnet.write(b"exit\r\n")
except EOFError:
_LOGGER.exception("Unexpected response from router")
return
except ConnectionRefusedError:
_LOGGER.exception("Connection refused by router. Telnet enabled?")
return
devices = {}
for device in devices_result:
match = _DEVICES_REGEX.search(device.decode("utf-8"))
if match:
devices[match.group("ip")] = {
"ip": match.group("ip"),
"mac": match.group("mac").upper(),
"host": match.group("host"),
"status": match.group("status"),
}
return devices
|
from typing import Any
from homeassistant.components import litejet
from homeassistant.components.scene import Scene
ATTR_NUMBER = "number"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up scenes for the LiteJet platform."""
litejet_ = hass.data["litejet_system"]
devices = []
for i in litejet_.scenes():
name = litejet_.get_scene_name(i)
if not litejet.is_ignored(hass, name):
devices.append(LiteJetScene(litejet_, i, name))
add_entities(devices)
class LiteJetScene(Scene):
"""Representation of a single LiteJet scene."""
def __init__(self, lj, i, name):
"""Initialize the scene."""
self._lj = lj
self._index = i
self._name = name
@property
def name(self):
"""Return the name of the scene."""
return self._name
@property
def device_state_attributes(self):
"""Return the device-specific state attributes."""
return {ATTR_NUMBER: self._index}
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self._lj.activate_scene(self._index)
|
from __future__ import print_function
import argparse
import logging
import pymongo
from .utils import do_db_auth, setup_logging
from ..arctic import Arctic, VERSION_STORE, LIBRARY_TYPES, \
ArcticLibraryBinding
from ..hooks import get_mongodb_uri
logger = logging.getLogger(__name__)
def main():
usage = """Initializes a named library in a user's database. Note that it will enable sharding on the underlying
collection if it can. To do this you must have admin credentials in arctic:
Example:
arctic_init_library --host=hostname --library=arctic_jblackburn.my_library
"""
setup_logging()
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_argument("--library", help="The name of the library. e.g. 'arctic_jblackburn.lib'")
parser.add_argument("--type", default=VERSION_STORE, choices=sorted(LIBRARY_TYPES.keys()),
help="The type of the library, as defined in "
"arctic.py. Default: %s" % VERSION_STORE)
parser.add_argument("--quota", default=10, help="Quota for the library in GB. A quota of 0 is unlimited."
"Default: 10")
parser.add_argument(
"--hashed",
action="store_true",
default=False,
help="Use hashed based sharding. Useful where SYMBOLs share a common prefix (e.g. Bloomberg BBGXXXX symbols) "
"Default: False")
opts = parser.parse_args()
if not opts.library or '.' not in opts.library:
parser.error('Must specify the full path of the library e.g. user.library!')
db_name, _ = ArcticLibraryBinding._parse_db_lib(opts.library)
print("Initializing: %s on mongo %s" % (opts.library, opts.host))
c = pymongo.MongoClient(get_mongodb_uri(opts.host))
if not do_db_auth(opts.host, c, db_name):
logger.error('Authentication Failed. Exiting.')
return
store = Arctic(c)
store.initialize_library("%s" % opts.library, opts.type, hashed=opts.hashed)
logger.info("Library %s created" % opts.library)
logger.info("Setting quota to %sG" % opts.quota)
store.set_quota(opts.library, int(opts.quota) * 1024 * 1024 * 1024)
if __name__ == '__main__':
main()
|
from datetime import timedelta
import logging
from typing import Optional
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_DISTANCE = "distance"
ATTR_SOURCE = "source"
DOMAIN = "geo_location"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=60)
async def async_setup(hass, config):
"""Set up the Geolocation component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class GeolocationEvent(Entity):
"""This represents an external event with an associated geolocation."""
@property
def state(self):
"""Return the state of the sensor."""
if self.distance is not None:
return round(self.distance, 1)
return None
@property
def source(self) -> str:
"""Return source value of this external event."""
raise NotImplementedError
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return None
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return None
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return None
@property
def state_attributes(self):
"""Return the state attributes of this external event."""
data = {}
if self.latitude is not None:
data[ATTR_LATITUDE] = round(self.latitude, 5)
if self.longitude is not None:
data[ATTR_LONGITUDE] = round(self.longitude, 5)
if self.source is not None:
data[ATTR_SOURCE] = self.source
return data
|
import traceback
class SchedulerBase:
"""Base class with common functionality for schedulers
Derived classes must implement get_current_task.
"""
def __init__(self):
self.tasks = []
self.callbacks = []
def __repr__(self):
return "%s" % self.tasks
def connect(self, signal, action):
assert signal == "runnable"
if action not in self.callbacks:
self.callbacks.append(action)
def add_task(self, task, atfront=False):
"""Add a task to the scheduler's task list
The task may be a function, generator or scheduler, and is
deemed to have finished when it returns a false value or raises
StopIteration.
"""
self.remove_task(task)
if atfront:
self.tasks.insert(0, task)
else:
self.tasks.append(task)
for callback in self.callbacks:
callback(self)
def remove_task(self, task):
"""Remove a single task from the scheduler"""
try:
self.tasks.remove(task)
except ValueError:
pass
def remove_all_tasks(self):
"""Remove all tasks from the scheduler"""
self.tasks = []
def add_scheduler(self, sched):
"""Adds a subscheduler as a child task of this scheduler"""
sched.connect("runnable", lambda t: self.add_task(t))
def remove_scheduler(self, sched):
"""Remove a sub-scheduler from this scheduler"""
self.remove_task(sched)
try:
self.callbacks.remove(sched)
except ValueError:
pass
def get_current_task(self):
"""Overridden function returning the next task to run"""
raise NotImplementedError
def __call__(self):
"""Run an iteration of the current task"""
if len(self.tasks):
r = self.iteration()
if r:
return r
return self.tasks_pending()
def complete_tasks(self):
"""Run all of the scheduler's current tasks to completion"""
while self.tasks_pending():
self.iteration()
def tasks_pending(self):
return len(self.tasks) != 0
def iteration(self):
"""Perform one iteration of the current task"""
try:
task = self.get_current_task()
except StopIteration:
return 0
try:
if hasattr(task, "__iter__"):
ret = next(task)
else:
ret = task()
except StopIteration:
pass
except Exception:
traceback.print_exc()
else:
if ret:
return ret
self.tasks.remove(task)
return 0
class LifoScheduler(SchedulerBase):
"""Scheduler calling most recently added tasks first"""
def get_current_task(self):
try:
return self.tasks[-1]
except IndexError:
raise StopIteration
class FifoScheduler(SchedulerBase):
"""Scheduler calling tasks in the order they were added"""
def get_current_task(self):
try:
return self.tasks[0]
except IndexError:
raise StopIteration
if __name__ == "__main__":
import time
import random
m = LifoScheduler()
def timetask(t):
while time.time() - t < 1:
print("***")
time.sleep(0.1)
print("!!!")
def sayhello(x):
for i in range(random.randint(2, 8)):
print("hello", x)
time.sleep(0.1)
yield 1
print("end", x)
s = FifoScheduler()
m.add_task(s)
s.add_task(sayhello(10))
s.add_task(sayhello(20))
s.add_task(sayhello(30))
while s.tasks_pending():
s.iteration()
time.sleep(2)
print("***")
|
import datetime
import logging
import os
import posixpath
import subprocess
import tarfile
from perfkitbenchmarker.providers.aws.util import AWS_PATH
def ArchiveRun(run_temp_directory, target_bucket,
prefix='',
gsutil_path='gsutil',
aws_path=AWS_PATH):
"""Archive a run directory to GCS or S3.
Args:
run_temp_directory: str. directory to archive.
target_bucket: str. Either a gs:// or s3:// path to an extant bucket.
prefix: str. prefix for the file.
gsutil_path: str. Path to the gsutil tool.
aws_path: str. Path to the aws command line tool.
Raises:
ValueError: when directory or target_bucket does not exist.
subprocess.CalledProcessError: subprocess call failed.
"""
if not os.path.isdir(run_temp_directory):
raise ValueError('{0} is not a directory.'.format(run_temp_directory))
tar_file_name = '{}{}.tar.gz'.format(
prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
prefix_len = 5
prefixes = {
's3://': [aws_path, 's3', 'cp'],
'gs://': [gsutil_path, 'cp']
}
assert all(len(key) == prefix_len for key in prefixes), prefixes
try:
cmd = (prefixes[target_bucket[:prefix_len]] +
['-', posixpath.join(target_bucket, tar_file_name)])
except KeyError:
raise ValueError('Unsupported bucket name: {0}'.format(target_bucket))
logging.info('Streaming %s to %s\n%s', run_temp_directory, tar_file_name,
' '.join(cmd))
p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
with p.stdin:
with tarfile.open(mode='w:gz', fileobj=p.stdin) as tar:
tar.add(run_temp_directory, os.path.basename(run_temp_directory))
status = p.wait()
if status:
raise subprocess.CalledProcessError(status, cmd)
|
from datetime import timedelta
from greeclimate.device import HorizontalSwing, VerticalSwing
from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError
import pytest
from homeassistant.components.climate.const import (
ATTR_FAN_MODE,
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
DOMAIN,
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
PRESET_SLEEP,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
SWING_BOTH,
SWING_HORIZONTAL,
SWING_OFF,
SWING_VERTICAL,
)
from homeassistant.components.gree.climate import (
FAN_MODES_REVERSE,
HVAC_MODES_REVERSE,
SUPPORTED_FEATURES,
)
from homeassistant.components.gree.const import (
DOMAIN as GREE_DOMAIN,
FAN_MEDIUM_HIGH,
FAN_MEDIUM_LOW,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .common import build_device_mock
from tests.async_mock import DEFAULT as DEFAULT_MOCK, AsyncMock, patch
from tests.common import MockConfigEntry, async_fire_time_changed
ENTITY_ID = f"{DOMAIN}.fake_device_1"
@pytest.fixture
def mock_now():
"""Fixture for dtutil.now."""
return dt_util.utcnow()
async def async_setup_gree(hass):
"""Set up the gree platform."""
MockConfigEntry(domain=GREE_DOMAIN).add_to_hass(hass)
await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {"climate": {}}})
await hass.async_block_till_done()
async def test_discovery_called_once(hass, discovery, device):
"""Test discovery is only ever called once."""
await async_setup_gree(hass)
assert discovery.call_count == 1
await async_setup_gree(hass)
assert discovery.call_count == 1
async def test_discovery_setup(hass, discovery, device):
"""Test setup of platform."""
MockDevice1 = build_device_mock(
name="fake-device-1", ipAddress="1.1.1.1", mac="aabbcc112233"
)
MockDevice2 = build_device_mock(
name="fake-device-2", ipAddress="2.2.2.2", mac="bbccdd223344"
)
discovery.return_value = [MockDevice1.device_info, MockDevice2.device_info]
device.side_effect = [MockDevice1, MockDevice2]
await async_setup_gree(hass)
await hass.async_block_till_done()
assert discovery.call_count == 1
assert len(hass.states.async_all(DOMAIN)) == 2
async def test_discovery_setup_connection_error(hass, discovery, device):
"""Test gree integration is setup."""
MockDevice1 = build_device_mock(name="fake-device-1")
MockDevice1.bind = AsyncMock(side_effect=DeviceNotBoundError)
MockDevice2 = build_device_mock(name="fake-device-2")
MockDevice2.bind = AsyncMock(side_effect=DeviceNotBoundError)
device.side_effect = [MockDevice1, MockDevice2]
await async_setup_gree(hass)
await hass.async_block_till_done()
assert discovery.call_count == 1
assert not hass.states.async_all(DOMAIN)
async def test_update_connection_failure(hass, discovery, device, mock_now):
"""Testing update hvac connection failure exception."""
device().update_state.side_effect = [
DEFAULT_MOCK,
DeviceTimeoutError,
DeviceTimeoutError,
]
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
# First update to make the device available
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
next_update = mock_now + timedelta(minutes=10)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
next_update = mock_now + timedelta(minutes=15)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
# Then two more update failures to make the device unavailable
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state == STATE_UNAVAILABLE
async def test_update_connection_failure_recovery(hass, discovery, device, mock_now):
"""Testing update hvac connection failure recovery."""
device().update_state.side_effect = [DeviceTimeoutError, DEFAULT_MOCK]
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state == STATE_UNAVAILABLE
next_update = mock_now + timedelta(minutes=10)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
async def test_update_unhandled_exception(hass, discovery, device, mock_now):
"""Testing update hvac connection unhandled response exception."""
device().update_state.side_effect = [DEFAULT_MOCK, Exception]
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
next_update = mock_now + timedelta(minutes=10)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state == STATE_UNAVAILABLE
async def test_send_command_device_timeout(hass, discovery, device, mock_now):
"""Test for sending power on command to the device with a device timeout."""
await async_setup_gree(hass)
# First update to make the device available
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
device().update_state.side_effect = DeviceTimeoutError
device().push_state_update.side_effect = DeviceTimeoutError
# Second update to make an initial error (device is still available)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
# Second attempt should make the device unavailable
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_AUTO},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == STATE_UNAVAILABLE
async def test_send_command_device_unknown_error(hass, discovery, device, mock_now):
"""Test for sending power on command to the device with a device timeout."""
device().update_state.side_effect = [DEFAULT_MOCK, Exception]
device().push_state_update.side_effect = Exception
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
# First update to make the device available
state = hass.states.get(ENTITY_ID)
assert state.name == "fake-device-1"
assert state.state != STATE_UNAVAILABLE
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_AUTO},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == STATE_UNAVAILABLE
async def test_send_power_on(hass, discovery, device, mock_now):
"""Test for sending power on command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_AUTO},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == HVAC_MODE_AUTO
async def test_send_power_on_device_timeout(hass, discovery, device, mock_now):
"""Test for sending power on command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_AUTO},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == HVAC_MODE_AUTO
async def test_send_target_temperature(hass, discovery, device, mock_now):
"""Test for sending target temperature command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 25.1},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_TEMPERATURE) == 25
async def test_send_target_temperature_device_timeout(
hass, discovery, device, mock_now
):
"""Test for sending target temperature command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 25.1},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_TEMPERATURE) == 25
async def test_update_target_temperature(hass, discovery, device, mock_now):
"""Test for updating target temperature from the device."""
device().target_temperature = 32
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_TEMPERATURE) == 32
@pytest.mark.parametrize(
"preset", (PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE)
)
async def test_send_preset_mode(hass, discovery, device, mock_now, preset):
"""Test for sending preset mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_PRESET_MODE) == preset
async def test_send_invalid_preset_mode(hass, discovery, device, mock_now):
"""Test for sending preset mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
with pytest.raises(ValueError):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: "invalid"},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_PRESET_MODE) != "invalid"
@pytest.mark.parametrize(
"preset", (PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE)
)
async def test_send_preset_mode_device_timeout(
hass, discovery, device, mock_now, preset
):
"""Test for sending preset mode command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_PRESET_MODE) == preset
@pytest.mark.parametrize(
"preset", (PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE)
)
async def test_update_preset_mode(hass, discovery, device, mock_now, preset):
"""Test for updating preset mode from the device."""
device().steady_heat = preset == PRESET_AWAY
device().power_save = preset == PRESET_ECO
device().sleep = preset == PRESET_SLEEP
device().turbo = preset == PRESET_BOOST
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_PRESET_MODE) == preset
@pytest.mark.parametrize(
"hvac_mode",
(
HVAC_MODE_OFF,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
),
)
async def test_send_hvac_mode(hass, discovery, device, mock_now, hvac_mode):
"""Test for sending hvac mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == hvac_mode
@pytest.mark.parametrize(
"hvac_mode",
(HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_DRY, HVAC_MODE_FAN_ONLY, HVAC_MODE_HEAT),
)
async def test_send_hvac_mode_device_timeout(
hass, discovery, device, mock_now, hvac_mode
):
"""Test for sending hvac mode command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == hvac_mode
@pytest.mark.parametrize(
"hvac_mode",
(
HVAC_MODE_OFF,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
),
)
async def test_update_hvac_mode(hass, discovery, device, mock_now, hvac_mode):
"""Test for updating hvac mode from the device."""
device().power = hvac_mode != HVAC_MODE_OFF
device().mode = HVAC_MODES_REVERSE.get(hvac_mode)
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.state == hvac_mode
@pytest.mark.parametrize(
"fan_mode",
(FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH),
)
async def test_send_fan_mode(hass, discovery, device, mock_now, fan_mode):
"""Test for sending fan mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_FAN_MODE) == fan_mode
async def test_send_invalid_fan_mode(hass, discovery, device, mock_now):
"""Test for sending fan mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
with pytest.raises(ValueError):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "invalid"},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_FAN_MODE) != "invalid"
@pytest.mark.parametrize(
"fan_mode",
(FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH),
)
async def test_send_fan_mode_device_timeout(
hass, discovery, device, mock_now, fan_mode
):
"""Test for sending fan mode command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_FAN_MODE) == fan_mode
@pytest.mark.parametrize(
"fan_mode",
(FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH),
)
async def test_update_fan_mode(hass, discovery, device, mock_now, fan_mode):
"""Test for updating fan mode from the device."""
device().fan_speed = FAN_MODES_REVERSE.get(fan_mode)
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_FAN_MODE) == fan_mode
@pytest.mark.parametrize(
"swing_mode", (SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL)
)
async def test_send_swing_mode(hass, discovery, device, mock_now, swing_mode):
"""Test for sending swing mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_SWING_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_SWING_MODE) == swing_mode
async def test_send_invalid_swing_mode(hass, discovery, device, mock_now):
"""Test for sending swing mode command to the device."""
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
with pytest.raises(ValueError):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_SWING_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: "invalid"},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_SWING_MODE) != "invalid"
@pytest.mark.parametrize(
"swing_mode", (SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL)
)
async def test_send_swing_mode_device_timeout(
hass, discovery, device, mock_now, swing_mode
):
"""Test for sending swing mode command to the device with a device timeout."""
device().push_state_update.side_effect = DeviceTimeoutError
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_SWING_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode},
blocking=True,
)
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_SWING_MODE) == swing_mode
@pytest.mark.parametrize(
"swing_mode", (SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL)
)
async def test_update_swing_mode(hass, discovery, device, mock_now, swing_mode):
"""Test for updating swing mode from the device."""
device().horizontal_swing = (
HorizontalSwing.FullSwing
if swing_mode in (SWING_BOTH, SWING_HORIZONTAL)
else HorizontalSwing.Default
)
device().vertical_swing = (
VerticalSwing.FullSwing
if swing_mode in (SWING_BOTH, SWING_VERTICAL)
else VerticalSwing.Default
)
await async_setup_gree(hass)
next_update = mock_now + timedelta(minutes=5)
with patch("homeassistant.util.dt.utcnow", return_value=next_update):
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state is not None
assert state.attributes.get(ATTR_SWING_MODE) == swing_mode
async def test_name(hass, discovery, device):
"""Test for name property."""
await async_setup_gree(hass)
state = hass.states.get(ENTITY_ID)
assert state.attributes[ATTR_FRIENDLY_NAME] == "fake-device-1"
async def test_supported_features_with_turnon(hass, discovery, device):
"""Test for supported_features property."""
await async_setup_gree(hass)
state = hass.states.get(ENTITY_ID)
assert state.attributes[ATTR_SUPPORTED_FEATURES] == SUPPORTED_FEATURES
|
from typing import Any
from homeassistant.components.scene import Scene
from . import DOMAIN as TAHOMA_DOMAIN
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tahoma scenes."""
if discovery_info is None:
return
controller = hass.data[TAHOMA_DOMAIN]["controller"]
scenes = [
TahomaScene(scene, controller) for scene in hass.data[TAHOMA_DOMAIN]["scenes"]
]
add_entities(scenes, True)
class TahomaScene(Scene):
"""Representation of a Tahoma scene entity."""
def __init__(self, tahoma_scene, controller):
"""Initialize the scene."""
self.tahoma_scene = tahoma_scene
self.controller = controller
self._name = self.tahoma_scene.name
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self.controller.launch_action_group(self.tahoma_scene.oid)
@property
def name(self):
"""Return the name of the scene."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of the scene."""
return {"tahoma_scene_oid": self.tahoma_scene.oid}
|
from datetime import timedelta
import logging
import os
import greenwavereality as greenwave
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_VERSION = "version"
SUPPORTED_FEATURES = SUPPORT_BRIGHTNESS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOST): cv.string, vol.Required(CONF_VERSION): cv.positive_int}
)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Greenwave Reality Platform."""
host = config.get(CONF_HOST)
tokenfile = hass.config.path(".greenwave")
if config.get(CONF_VERSION) == 3:
if os.path.exists(tokenfile):
with open(tokenfile) as tokenfile:
token = tokenfile.read()
else:
try:
token = greenwave.grab_token(host, "hass", "homeassistant")
except PermissionError:
_LOGGER.error("The Gateway Is Not In Sync Mode")
raise
with open(tokenfile, "w+") as tokenfile:
tokenfile.write(token)
else:
token = None
bulbs = greenwave.grab_bulbs(host, token)
add_entities(
GreenwaveLight(device, host, token, GatewayData(host, token))
for device in bulbs.values()
)
class GreenwaveLight(LightEntity):
"""Representation of an Greenwave Reality Light."""
def __init__(self, light, host, token, gatewaydata):
"""Initialize a Greenwave Reality Light."""
self._did = int(light["did"])
self._name = light["name"]
self._state = int(light["state"])
self._brightness = greenwave.hass_brightness(light)
self._host = host
self._online = greenwave.check_online(light)
self._token = token
self._gatewaydata = gatewaydata
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
@property
def available(self):
"""Return True if entity is available."""
return self._online
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def is_on(self):
"""Return true if light is on."""
return self._state
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
temp_brightness = int((kwargs.get(ATTR_BRIGHTNESS, 255) / 255) * 100)
greenwave.set_brightness(self._host, self._did, temp_brightness, self._token)
greenwave.turn_on(self._host, self._did, self._token)
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
greenwave.turn_off(self._host, self._did, self._token)
def update(self):
"""Fetch new state data for this light."""
self._gatewaydata.update()
bulbs = self._gatewaydata.greenwave
self._state = int(bulbs[self._did]["state"])
self._brightness = greenwave.hass_brightness(bulbs[self._did])
self._online = greenwave.check_online(bulbs[self._did])
self._name = bulbs[self._did]["name"]
class GatewayData:
"""Handle Gateway data and limit updates."""
def __init__(self, host, token):
"""Initialize the data object."""
self._host = host
self._token = token
self._greenwave = greenwave.grab_bulbs(host, token)
@property
def greenwave(self):
"""Return Gateway API object."""
return self._greenwave
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the gateway."""
self._greenwave = greenwave.grab_bulbs(self._host, self._token)
return self._greenwave
|
from django.test import SimpleTestCase
from django.utils.translation import override
from weblate.utils.render import render_template
class RenderTest(SimpleTestCase):
def test_float(self):
self.assertEqual(render_template("{{ number }}", number=1.1), "1.1")
def test_float_cs(self):
with override("cs"):
self.test_float()
def test_replace(self):
self.assertEqual(
render_template('{% replace "a-string-with-dashes" "-" " " %}'),
"a string with dashes",
)
def test_dirname(self):
self.assertEqual(
render_template("{{ value|dirname }}", value="weblate/test.po"), "weblate"
)
def test_stripext(self):
self.assertEqual(
render_template("{{ value|stripext }}", value="weblate/test.po"),
"weblate/test",
)
def test_parentdir(self):
self.assertEqual(
render_template("{{ value|parentdir }}", value="weblate/test.po"), "test.po"
)
def test_parentdir_chain(self):
self.assertEqual(
render_template(
"{{ value|parentdir|parentdir }}", value="foo/bar/weblate/test.po"
),
"weblate/test.po",
)
|
import hangups.user
import hangups.hangouts_pb2
USER_ID = hangups.user.UserID(1, 1)
def test_default_type_detection_empty_0():
# missing names
user = hangups.user.User(
USER_ID,
full_name='',
first_name='',
photo_url='',
emails=[],
is_self=False,
)
assert user.full_name == hangups.user.DEFAULT_NAME
assert user.first_name == hangups.user.DEFAULT_NAME
assert user.name_type == hangups.user.NameType.DEFAULT
def test_default_type_detection_empty_1():
# missing names
user = hangups.user.User(
USER_ID,
full_name=None,
first_name=None,
photo_url='',
emails=[],
is_self=False,
)
assert user.full_name == hangups.user.DEFAULT_NAME
assert user.first_name == hangups.user.DEFAULT_NAME
assert user.name_type == hangups.user.NameType.DEFAULT
def test_default_type_detection_from_conv_part_data():
# default user in 201904
conv_part_data = hangups.hangouts_pb2.ConversationParticipantData(
id=hangups.hangouts_pb2.ParticipantId(
chat_id='1',
gaia_id='1'
),
fallback_name='unknown',
invitation_status=hangups.hangouts_pb2.INVITATION_STATUS_ACCEPTED,
participant_type=hangups.hangouts_pb2.PARTICIPANT_TYPE_GAIA,
new_invitation_status=hangups.hangouts_pb2.INVITATION_STATUS_ACCEPTED,
)
user = hangups.user.User.from_conv_part_data(
conv_part_data=conv_part_data,
self_user_id=USER_ID
)
assert user.full_name == hangups.user.DEFAULT_NAME
assert user.first_name == hangups.user.DEFAULT_NAME
assert user.name_type == hangups.user.NameType.DEFAULT
def test_real_type():
# regular name
user = hangups.user.User(
USER_ID,
full_name='Joe Doe',
first_name='Joe',
photo_url='',
emails=[],
is_self=False,
)
assert user.full_name == 'Joe Doe'
assert user.first_name == 'Joe'
assert user.name_type == hangups.user.NameType.REAL
def test_real_type_from_conv_part_data():
conv_part_data = hangups.hangouts_pb2.ConversationParticipantData(
id=hangups.hangouts_pb2.ParticipantId(
chat_id='1',
gaia_id='1'
),
fallback_name='Joe Doe',
invitation_status=hangups.hangouts_pb2.INVITATION_STATUS_ACCEPTED,
participant_type=hangups.hangouts_pb2.PARTICIPANT_TYPE_GAIA,
new_invitation_status=hangups.hangouts_pb2.INVITATION_STATUS_ACCEPTED,
)
user = hangups.user.User.from_conv_part_data(
conv_part_data=conv_part_data,
self_user_id=USER_ID
)
assert user.full_name == 'Joe Doe'
assert user.first_name == 'Joe'
assert user.name_type == hangups.user.NameType.REAL
|
import argparse
from binascii import a2b_hex, b2a_hex
from autobahn import xbr
from twisted.internet.task import react
from twisted.internet.defer import inlineCallbacks
@inlineCallbacks
def main(reactor, gateway, adr):
sbc = xbr.SimpleBlockchain(gateway)
yield sbc.start()
print('status for address 0x{}:'.format(b2a_hex(adr).decode()))
# get ETH and XBR account balances for address
balances = yield sbc.get_balances(adr)
print('balances: {}'.format(balances))
# get XBR network membership status for address
member_status = yield sbc.get_member_status(adr)
print('member status: {}'.format(member_status))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gateway',
dest='gateway',
type=str,
default=None,
help='Ethereum HTTP gateway URL or None for auto-select.')
parser.add_argument('--adr',
dest='adr',
type=str,
default=None,
help='Ethereum address to lookup.')
args = parser.parse_args()
react(main, (args.gateway, a2b_hex(args.adr[2:],)))
|
import logging
from homeassistant.helpers.entity import ToggleEntity
from . import DATA_CLIMATE, DATA_LEAF, LeafEntity
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Nissan Leaf switch platform setup."""
if discovery_info is None:
return
devices = []
for vin, datastore in hass.data[DATA_LEAF].items():
_LOGGER.debug("Adding switch for vin=%s", vin)
devices.append(LeafClimateSwitch(datastore))
add_devices(devices, True)
class LeafClimateSwitch(LeafEntity, ToggleEntity):
"""Nissan Leaf Climate Control switch."""
@property
def name(self):
"""Switch name."""
return f"{self.car.leaf.nickname} Climate Control"
def log_registration(self):
"""Log registration."""
_LOGGER.debug(
"Registered LeafClimateSwitch integration with Home Assistant for VIN %s",
self.car.leaf.vin,
)
@property
def device_state_attributes(self):
"""Return climate control attributes."""
attrs = super().device_state_attributes
attrs["updated_on"] = self.car.last_climate_response
return attrs
@property
def is_on(self):
"""Return true if climate control is on."""
return self.car.data[DATA_CLIMATE]
async def async_turn_on(self, **kwargs):
"""Turn on climate control."""
if await self.car.async_set_climate(True):
self.car.data[DATA_CLIMATE] = True
async def async_turn_off(self, **kwargs):
"""Turn off climate control."""
if await self.car.async_set_climate(False):
self.car.data[DATA_CLIMATE] = False
|
import random
import re
import string
import pem
import base64
import sqlalchemy
from cryptography import x509
from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa, ec, padding
from cryptography.hazmat.primitives.serialization import load_pem_private_key, Encoding, pkcs7
from flask_restful.reqparse import RequestParser
from sqlalchemy import and_, func
from lemur.constants import CERTIFICATE_KEY_TYPES
from lemur.exceptions import InvalidConfiguration
paginated_parser = RequestParser()
paginated_parser.add_argument("count", type=int, default=10, location="args")
paginated_parser.add_argument("page", type=int, default=1, location="args")
paginated_parser.add_argument("sortDir", type=str, dest="sort_dir", location="args")
paginated_parser.add_argument("sortBy", type=str, dest="sort_by", location="args")
paginated_parser.add_argument("filter", type=str, location="args")
paginated_parser.add_argument("owner", type=str, location="args")
def base64encode(string):
# Performs Base64 encoding of string to string using the base64.b64encode() function
# which encodes bytes to bytes.
return base64.b64encode(string.encode()).decode()
def get_psuedo_random_string():
"""
Create a random and strongish challenge.
"""
challenge = "".join(random.choice(string.ascii_uppercase) for x in range(6)) # noqa
challenge += "".join(random.choice("~!@#$%^&*()_+") for x in range(6)) # noqa
challenge += "".join(random.choice(string.ascii_lowercase) for x in range(6))
challenge += "".join(random.choice(string.digits) for x in range(6)) # noqa
return challenge
def parse_certificate(body):
"""
Helper function that parses a PEM certificate.
:param body:
:return:
"""
assert isinstance(body, str)
return x509.load_pem_x509_certificate(body.encode("utf-8"), default_backend())
def parse_private_key(private_key):
"""
Parses a PEM-format private key (RSA, DSA, ECDSA or any other supported algorithm).
Raises ValueError for an invalid string. Raises AssertionError when passed value is not str-type.
:param private_key: String containing PEM private key
"""
assert isinstance(private_key, str)
return load_pem_private_key(
private_key.encode("utf8"), password=None, backend=default_backend()
)
def get_key_type_from_certificate(body):
"""
Helper function to determine key type by pasrding given PEM certificate
:param body: PEM string
:return: Key type string
"""
parsed_cert = parse_certificate(body)
if isinstance(parsed_cert.public_key(), rsa.RSAPublicKey):
return "RSA{key_size}".format(
key_size=parsed_cert.public_key().key_size
)
elif isinstance(parsed_cert.public_key(), ec.EllipticCurvePublicKey):
return get_key_type_from_ec_curve(parsed_cert.public_key().curve.name)
def split_pem(data):
"""
Split a string of several PEM payloads to a list of strings.
:param data: String
:return: List of strings
"""
return re.split("\n(?=-----BEGIN )", data)
def parse_cert_chain(pem_chain):
"""
Helper function to split and parse a series of PEM certificates.
:param pem_chain: string
:return: List of parsed certificates
"""
if pem_chain is None:
return []
return [parse_certificate(cert) for cert in split_pem(pem_chain) if cert]
def parse_csr(csr):
"""
Helper function that parses a CSR.
:param csr:
:return:
"""
assert isinstance(csr, str)
return x509.load_pem_x509_csr(csr.encode("utf-8"), default_backend())
def get_authority_key(body):
"""Returns the authority key for a given certificate in hex format"""
parsed_cert = parse_certificate(body)
authority_key = parsed_cert.extensions.get_extension_for_class(
x509.AuthorityKeyIdentifier
).value.key_identifier
return authority_key.hex()
def get_key_type_from_ec_curve(curve_name):
"""
Give an EC curve name, return the matching key_type.
:param: curve_name
:return: key_type
"""
_CURVE_TYPES = {
ec.SECP192R1().name: "ECCPRIME192V1",
ec.SECP256R1().name: "ECCPRIME256V1",
ec.SECP224R1().name: "ECCSECP224R1",
ec.SECP384R1().name: "ECCSECP384R1",
ec.SECP521R1().name: "ECCSECP521R1",
ec.SECP256K1().name: "ECCSECP256K1",
ec.SECT163K1().name: "ECCSECT163K1",
ec.SECT233K1().name: "ECCSECT233K1",
ec.SECT283K1().name: "ECCSECT283K1",
ec.SECT409K1().name: "ECCSECT409K1",
ec.SECT571K1().name: "ECCSECT571K1",
ec.SECT163R2().name: "ECCSECT163R2",
ec.SECT233R1().name: "ECCSECT233R1",
ec.SECT283R1().name: "ECCSECT283R1",
ec.SECT409R1().name: "ECCSECT409R1",
ec.SECT571R1().name: "ECCSECT571R2",
}
if curve_name in _CURVE_TYPES.keys():
return _CURVE_TYPES[curve_name]
else:
return None
def generate_private_key(key_type):
"""
Generates a new private key based on key_type.
Valid key types: RSA2048, RSA4096', 'ECCPRIME192V1', 'ECCPRIME256V1', 'ECCSECP192R1',
'ECCSECP224R1', 'ECCSECP256R1', 'ECCSECP384R1', 'ECCSECP521R1', 'ECCSECP256K1',
'ECCSECT163K1', 'ECCSECT233K1', 'ECCSECT283K1', 'ECCSECT409K1', 'ECCSECT571K1',
'ECCSECT163R2', 'ECCSECT233R1', 'ECCSECT283R1', 'ECCSECT409R1', 'ECCSECT571R2'
:param key_type:
:return:
"""
_CURVE_TYPES = {
"ECCPRIME192V1": ec.SECP192R1(), # duplicate
"ECCPRIME256V1": ec.SECP256R1(), # duplicate
"ECCSECP192R1": ec.SECP192R1(), # duplicate
"ECCSECP224R1": ec.SECP224R1(),
"ECCSECP256R1": ec.SECP256R1(), # duplicate
"ECCSECP384R1": ec.SECP384R1(),
"ECCSECP521R1": ec.SECP521R1(),
"ECCSECP256K1": ec.SECP256K1(),
"ECCSECT163K1": ec.SECT163K1(),
"ECCSECT233K1": ec.SECT233K1(),
"ECCSECT283K1": ec.SECT283K1(),
"ECCSECT409K1": ec.SECT409K1(),
"ECCSECT571K1": ec.SECT571K1(),
"ECCSECT163R2": ec.SECT163R2(),
"ECCSECT233R1": ec.SECT233R1(),
"ECCSECT283R1": ec.SECT283R1(),
"ECCSECT409R1": ec.SECT409R1(),
"ECCSECT571R2": ec.SECT571R1(),
}
if key_type not in CERTIFICATE_KEY_TYPES:
raise Exception(
"Invalid key type: {key_type}. Supported key types: {choices}".format(
key_type=key_type, choices=",".join(CERTIFICATE_KEY_TYPES)
)
)
if "RSA" in key_type:
key_size = int(key_type[3:])
return rsa.generate_private_key(
public_exponent=65537, key_size=key_size, backend=default_backend()
)
elif "ECC" in key_type:
return ec.generate_private_key(
curve=_CURVE_TYPES[key_type], backend=default_backend()
)
def check_cert_signature(cert, issuer_public_key):
"""
Check a certificate's signature against an issuer public key.
Before EC validation, make sure we support the algorithm, otherwise raise UnsupportedAlgorithm
On success, returns None; on failure, raises UnsupportedAlgorithm or InvalidSignature.
"""
if isinstance(issuer_public_key, rsa.RSAPublicKey):
# RSA requires padding, just to make life difficult for us poor developers :(
if cert.signature_algorithm_oid == x509.SignatureAlgorithmOID.RSASSA_PSS:
# In 2005, IETF devised a more secure padding scheme to replace PKCS #1 v1.5. To make sure that
# nobody can easily support or use it, they mandated lots of complicated parameters, unlike any
# other X.509 signature scheme.
# https://tools.ietf.org/html/rfc4056
raise UnsupportedAlgorithm("RSASSA-PSS not supported")
else:
padder = padding.PKCS1v15()
issuer_public_key.verify(
cert.signature,
cert.tbs_certificate_bytes,
padder,
cert.signature_hash_algorithm,
)
elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance(
ec.ECDSA(cert.signature_hash_algorithm), ec.ECDSA
):
issuer_public_key.verify(
cert.signature,
cert.tbs_certificate_bytes,
ec.ECDSA(cert.signature_hash_algorithm),
)
else:
raise UnsupportedAlgorithm(
"Unsupported Algorithm '{var}'.".format(
var=cert.signature_algorithm_oid._name
)
)
def is_selfsigned(cert):
"""
Returns True if the certificate is self-signed.
Returns False for failed verification or unsupported signing algorithm.
"""
try:
check_cert_signature(cert, cert.public_key())
# If verification was successful, it's self-signed.
return True
except InvalidSignature:
return False
def is_weekend(date):
"""
Determines if a given date is on a weekend.
:param date:
:return:
"""
if date.weekday() > 5:
return True
def validate_conf(app, required_vars):
"""
Ensures that the given fields are set in the applications conf.
:param app:
:param required_vars: list
"""
for var in required_vars:
if var not in app.config:
raise InvalidConfiguration(
"Required variable '{var}' is not set in Lemur's conf.".format(var=var)
)
# https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WindowedRangeQuery
def column_windows(session, column, windowsize):
"""Return a series of WHERE clauses against
a given column that break it into windows.
Result is an iterable of tuples, consisting of
((start, end), whereclause), where (start, end) are the ids.
Requires a database that supports window functions,
i.e. Postgresql, SQL Server, Oracle.
Enhance this yourself ! Add a "where" argument
so that windows of just a subset of rows can
be computed.
"""
def int_for_range(start_id, end_id):
if end_id:
return and_(column >= start_id, column < end_id)
else:
return column >= start_id
q = session.query(
column, func.row_number().over(order_by=column).label("rownum")
).from_self(column)
if windowsize > 1:
q = q.filter(sqlalchemy.text("rownum %% %d=1" % windowsize))
intervals = [id for id, in q]
while intervals:
start = intervals.pop(0)
if intervals:
end = intervals[0]
else:
end = None
yield int_for_range(start, end)
def windowed_query(q, column, windowsize):
""""Break a Query into windows on a given column."""
for whereclause in column_windows(q.session, column, windowsize):
for row in q.filter(whereclause).order_by(column):
yield row
def truthiness(s):
"""If input string resembles something truthy then return True, else False."""
return s.lower() in ("true", "yes", "on", "t", "1")
def find_matching_certificates_by_hash(cert, matching_certs):
"""Given a Cryptography-formatted certificate cert, and Lemur-formatted certificates (matching_certs),
determine if any of the certificate hashes match and return the matches."""
matching = []
for c in matching_certs:
if parse_certificate(c.body).fingerprint(hashes.SHA256()) == cert.fingerprint(
hashes.SHA256()
):
matching.append(c)
return matching
def convert_pkcs7_bytes_to_pem(certs_pkcs7):
"""
Given a list of certificates in pkcs7 encoding (bytes), covert them into a list of PEM encoded files
:raises ValueError or ValidationError
:param certs_pkcs7:
:return: list of certs in PEM format
"""
certificates = pkcs7.load_pem_pkcs7_certificates(certs_pkcs7)
certificates_pem = []
for cert in certificates:
certificates_pem.append(pem.parse(cert.public_bytes(encoding=Encoding.PEM))[0])
return certificates_pem
|
from unittest import mock
import pytest
import homeassistant.components.zha.core.registries as registries
MANUFACTURER = "mock manufacturer"
MODEL = "mock model"
@pytest.fixture
def zha_device():
"""Return a mock of ZHA device."""
dev = mock.MagicMock()
dev.manufacturer = MANUFACTURER
dev.model = MODEL
return dev
@pytest.fixture
def channels(channel):
"""Return a mock of channels."""
return [channel("level", 8), channel("on_off", 6)]
@pytest.mark.parametrize(
"rule, matched",
[
(registries.MatchRule(), False),
(registries.MatchRule(channel_names={"level"}), True),
(registries.MatchRule(channel_names={"level", "no match"}), False),
(registries.MatchRule(channel_names={"on_off"}), True),
(registries.MatchRule(channel_names={"on_off", "no match"}), False),
(registries.MatchRule(channel_names={"on_off", "level"}), True),
(registries.MatchRule(channel_names={"on_off", "level", "no match"}), False),
# test generic_id matching
(registries.MatchRule(generic_ids={"channel_0x0006"}), True),
(registries.MatchRule(generic_ids={"channel_0x0008"}), True),
(registries.MatchRule(generic_ids={"channel_0x0006", "channel_0x0008"}), True),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008", "channel_0x0009"}
),
False,
),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008"},
channel_names={"on_off", "level"},
),
True,
),
# manufacturer matching
(registries.MatchRule(manufacturers="no match"), False),
(registries.MatchRule(manufacturers=MANUFACTURER), True),
(
registries.MatchRule(manufacturers="no match", aux_channels="aux_channel"),
False,
),
(
registries.MatchRule(
manufacturers=MANUFACTURER, aux_channels="aux_channel"
),
True,
),
(registries.MatchRule(models=MODEL), True),
(registries.MatchRule(models="no match"), False),
(registries.MatchRule(models=MODEL, aux_channels="aux_channel"), True),
(registries.MatchRule(models="no match", aux_channels="aux_channel"), False),
# match everything
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008"},
channel_names={"on_off", "level"},
manufacturers=MANUFACTURER,
models=MODEL,
),
True,
),
(
registries.MatchRule(
channel_names="on_off", manufacturers={"random manuf", MANUFACTURER}
),
True,
),
(
registries.MatchRule(
channel_names="on_off", manufacturers={"random manuf", "Another manuf"}
),
False,
),
(
registries.MatchRule(
channel_names="on_off", manufacturers=lambda x: x == MANUFACTURER
),
True,
),
(
registries.MatchRule(
channel_names="on_off", manufacturers=lambda x: x != MANUFACTURER
),
False,
),
(
registries.MatchRule(
channel_names="on_off", models={"random model", MODEL}
),
True,
),
(
registries.MatchRule(
channel_names="on_off", models={"random model", "Another model"}
),
False,
),
(
registries.MatchRule(channel_names="on_off", models=lambda x: x == MODEL),
True,
),
(
registries.MatchRule(channel_names="on_off", models=lambda x: x != MODEL),
False,
),
],
)
def test_registry_matching(rule, matched, channels):
"""Test strict rule matching."""
assert rule.strict_matched(MANUFACTURER, MODEL, channels) is matched
@pytest.mark.parametrize(
"rule, matched",
[
(registries.MatchRule(), False),
(registries.MatchRule(channel_names={"level"}), True),
(registries.MatchRule(channel_names={"level", "no match"}), False),
(registries.MatchRule(channel_names={"on_off"}), True),
(registries.MatchRule(channel_names={"on_off", "no match"}), False),
(registries.MatchRule(channel_names={"on_off", "level"}), True),
(registries.MatchRule(channel_names={"on_off", "level", "no match"}), False),
(
registries.MatchRule(channel_names={"on_off", "level"}, models="no match"),
True,
),
(
registries.MatchRule(
channel_names={"on_off", "level"},
models="no match",
manufacturers="no match",
),
True,
),
(
registries.MatchRule(
channel_names={"on_off", "level"},
models="no match",
manufacturers=MANUFACTURER,
),
True,
),
# test generic_id matching
(registries.MatchRule(generic_ids={"channel_0x0006"}), True),
(registries.MatchRule(generic_ids={"channel_0x0008"}), True),
(registries.MatchRule(generic_ids={"channel_0x0006", "channel_0x0008"}), True),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008", "channel_0x0009"}
),
False,
),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008", "channel_0x0009"},
models="mo match",
),
False,
),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008", "channel_0x0009"},
models=MODEL,
),
True,
),
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008"},
channel_names={"on_off", "level"},
),
True,
),
# manufacturer matching
(registries.MatchRule(manufacturers="no match"), False),
(registries.MatchRule(manufacturers=MANUFACTURER), True),
(registries.MatchRule(models=MODEL), True),
(registries.MatchRule(models="no match"), False),
# match everything
(
registries.MatchRule(
generic_ids={"channel_0x0006", "channel_0x0008"},
channel_names={"on_off", "level"},
manufacturers=MANUFACTURER,
models=MODEL,
),
True,
),
],
)
def test_registry_loose_matching(rule, matched, channels):
"""Test loose rule matching."""
assert rule.loose_matched(MANUFACTURER, MODEL, channels) is matched
def test_match_rule_claim_channels_color(channel):
"""Test channel claiming."""
ch_color = channel("color", 0x300)
ch_level = channel("level", 8)
ch_onoff = channel("on_off", 6)
rule = registries.MatchRule(channel_names="on_off", aux_channels={"color", "level"})
claimed = rule.claim_channels([ch_color, ch_level, ch_onoff])
assert {"color", "level", "on_off"} == {ch.name for ch in claimed}
@pytest.mark.parametrize(
"rule, match",
[
(registries.MatchRule(channel_names={"level"}), {"level"}),
(registries.MatchRule(channel_names={"level", "no match"}), {"level"}),
(registries.MatchRule(channel_names={"on_off"}), {"on_off"}),
(registries.MatchRule(generic_ids="channel_0x0000"), {"basic"}),
(
registries.MatchRule(channel_names="level", generic_ids="channel_0x0000"),
{"basic", "level"},
),
(registries.MatchRule(channel_names={"level", "power"}), {"level", "power"}),
(
registries.MatchRule(
channel_names={"level", "on_off"}, aux_channels={"basic", "power"}
),
{"basic", "level", "on_off", "power"},
),
(registries.MatchRule(channel_names={"color"}), set()),
],
)
def test_match_rule_claim_channels(rule, match, channel, channels):
"""Test channel claiming."""
ch_basic = channel("basic", 0)
channels.append(ch_basic)
ch_power = channel("power", 1)
channels.append(ch_power)
claimed = rule.claim_channels(channels)
assert match == {ch.name for ch in claimed}
@pytest.fixture
def entity_registry():
"""Registry fixture."""
return registries.ZHAEntityRegistry()
@pytest.mark.parametrize(
"manufacturer, model, match_name",
(
("random manufacturer", "random model", "OnOff"),
("random manufacturer", MODEL, "OnOffModel"),
(MANUFACTURER, "random model", "OnOffManufacturer"),
(MANUFACTURER, MODEL, "OnOffModelManufacturer"),
(MANUFACTURER, "some model", "OnOffMultimodel"),
),
)
def test_weighted_match(channel, entity_registry, manufacturer, model, match_name):
"""Test weightedd match."""
s = mock.sentinel
@entity_registry.strict_match(
s.component,
channel_names="on_off",
models={MODEL, "another model", "some model"},
)
class OnOffMultimodel:
pass
@entity_registry.strict_match(s.component, channel_names="on_off")
class OnOff:
pass
@entity_registry.strict_match(
s.component, channel_names="on_off", manufacturers=MANUFACTURER
)
class OnOffManufacturer:
pass
@entity_registry.strict_match(s.component, channel_names="on_off", models=MODEL)
class OnOffModel:
pass
@entity_registry.strict_match(
s.component, channel_names="on_off", models=MODEL, manufacturers=MANUFACTURER
)
class OnOffModelManufacturer:
pass
ch_on_off = channel("on_off", 6)
ch_level = channel("level", 8)
match, claimed = entity_registry.get_entity(
s.component, manufacturer, model, [ch_on_off, ch_level]
)
assert match.__name__ == match_name
assert claimed == [ch_on_off]
|
from homeassistant.components.switch import DEVICE_CLASS_OUTLET, SwitchEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import DOMAIN, GeniusZone
ATTR_DURATION = "duration"
GH_ON_OFF_ZONE = "on / off"
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Set up the Genius Hub switch entities."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
async_add_entities(
[
GeniusSwitch(broker, z)
for z in broker.client.zone_objs
if z.data["type"] == GH_ON_OFF_ZONE
]
)
class GeniusSwitch(GeniusZone, SwitchEntity):
"""Representation of a Genius Hub switch."""
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_OUTLET
@property
def is_on(self) -> bool:
"""Return the current state of the on/off zone.
The zone is considered 'on' if & only if it is override/on (e.g. timer/on is 'off').
"""
return self._zone.data["mode"] == "override" and self._zone.data["setpoint"]
async def async_turn_off(self, **kwargs) -> None:
"""Send the zone to Timer mode.
The zone is deemed 'off' in this mode, although the plugs may actually be on.
"""
await self._zone.set_mode("timer")
async def async_turn_on(self, **kwargs) -> None:
"""Set the zone to override/on ({'setpoint': true}) for x seconds."""
await self._zone.set_override(1, kwargs.get(ATTR_DURATION, 3600))
|
from django.test import SimpleTestCase
from weblate.checks.tests.test_checks import MockUnit
from weblate.checks.utils import highlight_string
class HightlightTestCase(SimpleTestCase):
def test_simple(self):
self.assertEqual(
highlight_string(
"simple {format} string", MockUnit(flags="python-brace-format")
),
[(7, 15, "{format}")],
)
def test_multi(self):
self.assertEqual(
highlight_string(
"simple {format} %d string",
MockUnit(flags="python-brace-format, python-format"),
),
[(7, 15, "{format}"), (16, 18, "%d")],
)
def test_overlap(self):
self.assertEqual(
highlight_string(
'nested <a href="{format}">string</a>',
MockUnit(flags="python-brace-format"),
),
[(7, 26, '<a href="{format}">'), (32, 36, "</a>")],
)
|
from functools import partial
import logging
from pyflume import FlumeAuth, FlumeDeviceList
from requests.exceptions import RequestException
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_PASSWORD,
CONF_USERNAME,
)
from .const import BASE_TOKEN_FILENAME
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
# If flume ever implements a login page for oauth
# we can use the oauth2 support built into Home Assistant.
#
# Currently they only implement the token endpoint
#
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(CONF_CLIENT_ID): str,
vol.Required(CONF_CLIENT_SECRET): str,
}
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
username = data[CONF_USERNAME]
password = data[CONF_PASSWORD]
client_id = data[CONF_CLIENT_ID]
client_secret = data[CONF_CLIENT_SECRET]
flume_token_full_path = hass.config.path(f"{BASE_TOKEN_FILENAME}-{username}")
try:
flume_auth = await hass.async_add_executor_job(
partial(
FlumeAuth,
username,
password,
client_id,
client_secret,
flume_token_file=flume_token_full_path,
)
)
flume_devices = await hass.async_add_executor_job(FlumeDeviceList, flume_auth)
except RequestException as err:
raise CannotConnect from err
except Exception as err:
raise InvalidAuth from err
if not flume_devices or not flume_devices.device_list:
raise CannotConnect
# Return info that you want to store in the config entry.
return {"title": username}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for flume."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
try:
info = await validate_input(self.hass, user_input)
return self.async_create_entry(title=info["title"], data=user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_import(self, user_input):
"""Handle import."""
return await self.async_step_user(user_input)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
import logging
import os
from threading import Thread
from kalliope import Utils
from kalliope.trigger.snowboy import snowboydecoder
from cffi import FFI as _FFI
class SnowboyModelNotFound(Exception):
pass
class MissingParameterException(Exception):
pass
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Snowboy(Thread):
def __init__(self, **kwargs):
super(Snowboy, self).__init__()
self._ignore_stderr()
# pause listening boolean
self.interrupted = False
self.kill_received = False
# get the sensitivity if set by the user
self.sensitivity = kwargs.get('sensitivity', 0.5)
self.apply_frontend = kwargs.get('apply_frontend', False)
# callback function to call when hotword caught
self.callback = kwargs.get('callback', None)
if self.callback is None:
raise MissingParameterException("callback function is required with snowboy")
# get the keywords to load
self.keywords = kwargs.get('keywords', None)
self.pmdl_file = kwargs.get('pmdl_file', None) # We notify the user that the pmdl_file parameter has been changed
if self.pmdl_file:
raise MissingParameterException('"pmdl_file" parameter is deprecated, please update your snowboy settings. \n Visit https://kalliope-project.github.io/kalliope/settings/triggers/snowboy/ for more information.')
if self.keywords is None:
raise MissingParameterException("At least one keyword is required with snowboy")
keyword_files = list()
sensitivities = list()
for keyword in self.keywords:
if self.check_if_path_is_valid(keyword['file_path']):
keyword_files.append(keyword['file_path'])
try:
if not isinstance(keyword['sensitivity'], list):
sensitivities.append(keyword['sensitivity'])
else:
for sensitivity in keyword['sensitivity']:
sensitivities.append(sensitivity)
except KeyError:
sensitivities.append(0.5)
self.detector = snowboydecoder.HotwordDetector(keyword_files,
sensitivity=sensitivities,
detected_callback=self.callback,
interrupt_check=self.interrupt_callback,
apply_frontend=self.apply_frontend)
def interrupt_callback(self):
"""
This function will be passed to snowboy to stop the main thread
:return:
"""
return self.interrupted
def run(self):
"""
Start the snowboy thread and wait for a Kalliope trigger word
:return:
"""
# start snowboy loop forever
self.detector.daemon = True
self.detector.start()
self.detector.join()
def pause(self):
"""
pause the Snowboy main thread
"""
logger.debug("Pausing snowboy process")
self.detector.pause()
def unpause(self):
"""
unpause the Snowboy main thread
"""
logger.debug("Unpausing snowboy process")
self.detector.unpause()
def stop(self):
"""
Kill the snowboy process
:return:
"""
logger.debug("Killing snowboy process")
self.interrupted = True
self.detector.terminate()
def check_if_path_is_valid(self, keyword_file):
try:
keyword_path = Utils.get_real_file_path(keyword_file)
os.path.isfile(keyword_path)
except TypeError:
raise SnowboyModelNotFound("The keyword at %s does not exist" % keyword_file)
return True
@staticmethod
def _ignore_stderr():
"""
Try to forward PortAudio messages from stderr to /dev/null.
"""
ffi = _FFI()
ffi.cdef("""
/* from stdio.h */
extern FILE* fopen(const char* path, const char* mode);
extern int fclose(FILE* fp);
extern FILE* stderr; /* GNU C library */
extern FILE* __stderrp; /* Mac OS X */
""")
stdio = ffi.dlopen(None)
devnull = stdio.fopen(os.devnull.encode(), b'w')
try:
stdio.stderr = devnull
except KeyError:
try:
stdio.__stderrp = devnull
except KeyError:
stdio.fclose(devnull)
|
from __future__ import print_function
try:
enumerate = enumerate
except NameError:
def enumerate(iterable):
"""emulates the python2.3 enumerate() function"""
i = 0
for val in iterable:
yield i, val
i += 1
def toto(value):
for k, v in value:
print(v.get('yo'))
|
from io import open
import subprocess
from flask import current_app
from lemur.utils import mktempfile, mktemppath
from lemur.plugins.bases import ExportPlugin
from lemur.plugins import lemur_csr as csr
def run_process(command):
"""
Runs a given command with pOpen and wraps some
error handling around it.
:param command:
:return:
"""
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
current_app.logger.debug(command)
stdout, stderr = p.communicate()
if p.returncode != 0:
current_app.logger.debug(" ".join(command))
current_app.logger.error(stderr)
raise Exception(stderr)
def create_csr(cert, chain, csr_tmp, key):
"""
Creates a csr from key and cert file.
:param cert:
:param chain:
:param csr_tmp:
:param key:
"""
assert isinstance(cert, str)
assert isinstance(chain, str)
assert isinstance(key, str)
with mktempfile() as key_tmp:
with open(key_tmp, "w") as f:
f.write(key)
with mktempfile() as cert_tmp:
with open(cert_tmp, "w") as f:
if chain:
f.writelines([cert.strip() + "\n", chain.strip() + "\n"])
else:
f.writelines([cert.strip() + "\n"])
output = subprocess.check_output(
["openssl", "x509", "-x509toreq", "-in", cert_tmp, "-signkey", key_tmp]
)
subprocess.run(["openssl", "req", "-out", csr_tmp], input=output)
class CSRExportPlugin(ExportPlugin):
title = "CSR"
slug = "openssl-csr"
description = "Exports a CSR"
version = csr.VERSION
author = "jchuong"
author_url = "https://github.com/jchuong"
def export(self, body, chain, key, options, **kwargs):
"""
Creates CSR from certificate
:param key:
:param chain:
:param body:
:param options:
:param kwargs:
"""
with mktemppath() as output_tmp:
if not key:
raise Exception("Private Key required by CSR")
create_csr(body, chain, output_tmp, key)
extension = "csr"
with open(output_tmp, "rb") as f:
raw = f.read()
# passphrase is None
return extension, None, raw
|
from datetime import timedelta
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.gdacs import CONF_CATEGORIES, DOMAIN
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
)
from tests.async_mock import patch
@pytest.fixture(name="gdacs_setup", autouse=True)
def gdacs_setup_fixture():
"""Mock gdacs entry setup."""
with patch("homeassistant.components.gdacs.async_setup_entry", return_value=True):
yield
async def test_duplicate_error(hass, config_entry):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25}
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import(hass):
"""Test that the import step works."""
conf = {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_SCAN_INTERVAL: timedelta(minutes=4),
CONF_CATEGORIES: ["Drought", "Earthquake"],
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_SCAN_INTERVAL: 240.0,
CONF_CATEGORIES: ["Drought", "Earthquake"],
}
async def test_step_user(hass):
"""Test that the user step works."""
hass.config.latitude = -41.2
hass.config.longitude = 174.7
conf = {CONF_RADIUS: 25}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "-41.2, 174.7"
assert result["data"] == {
CONF_LATITUDE: -41.2,
CONF_LONGITUDE: 174.7,
CONF_RADIUS: 25,
CONF_SCAN_INTERVAL: 300.0,
CONF_CATEGORIES: [],
}
|
from asyncio import Queue
from datetime import datetime
from typing import Any, Generator, Optional
from pytest import fixture
from .consts import (
DUMMY_AUTO_OFF_SET,
DUMMY_DEVICE_ID,
DUMMY_DEVICE_NAME,
DUMMY_DEVICE_STATE,
DUMMY_ELECTRIC_CURRENT,
DUMMY_IP_ADDRESS,
DUMMY_MAC_ADDRESS,
DUMMY_PHONE_ID,
DUMMY_POWER_CONSUMPTION,
DUMMY_REMAINING_TIME,
)
from tests.async_mock import AsyncMock, patch
@patch("aioswitcher.devices.SwitcherV2Device")
class MockSwitcherV2Device:
"""Class for mocking the aioswitcher.devices.SwitcherV2Device object."""
def __init__(self) -> None:
"""Initialize the object."""
self._last_state_change = datetime.now()
@property
def device_id(self) -> str:
"""Return the device id."""
return DUMMY_DEVICE_ID
@property
def ip_addr(self) -> str:
"""Return the ip address."""
return DUMMY_IP_ADDRESS
@property
def mac_addr(self) -> str:
"""Return the mac address."""
return DUMMY_MAC_ADDRESS
@property
def name(self) -> str:
"""Return the device name."""
return DUMMY_DEVICE_NAME
@property
def state(self) -> str:
"""Return the device state."""
return DUMMY_DEVICE_STATE
@property
def remaining_time(self) -> Optional[str]:
"""Return the time left to auto-off."""
return DUMMY_REMAINING_TIME
@property
def auto_off_set(self) -> str:
"""Return the auto-off configuration value."""
return DUMMY_AUTO_OFF_SET
@property
def power_consumption(self) -> int:
"""Return the power consumption in watts."""
return DUMMY_POWER_CONSUMPTION
@property
def electric_current(self) -> float:
"""Return the power consumption in amps."""
return DUMMY_ELECTRIC_CURRENT
@property
def phone_id(self) -> str:
"""Return the phone id."""
return DUMMY_PHONE_ID
@property
def last_data_update(self) -> datetime:
"""Return the timestamp of the last update."""
return datetime.now()
@property
def last_state_change(self) -> datetime:
"""Return the timestamp of the state change."""
return self._last_state_change
@fixture(name="mock_bridge")
def mock_bridge_fixture() -> Generator[None, Any, None]:
"""Fixture for mocking aioswitcher.bridge.SwitcherV2Bridge."""
queue = Queue()
async def mock_queue():
"""Mock asyncio's Queue."""
await queue.put(MockSwitcherV2Device())
return await queue.get()
mock_bridge = AsyncMock()
patchers = [
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.start",
new=mock_bridge,
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.stop",
new=mock_bridge,
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.queue",
get=mock_queue,
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.running",
return_value=True,
),
]
for patcher in patchers:
patcher.start()
yield
for patcher in patchers:
patcher.stop()
@fixture(name="mock_failed_bridge")
def mock_failed_bridge_fixture() -> Generator[None, Any, None]:
"""Fixture for mocking aioswitcher.bridge.SwitcherV2Bridge."""
async def mock_queue():
"""Mock asyncio's Queue."""
raise RuntimeError
patchers = [
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.start",
return_value=None,
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.stop",
return_value=None,
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Bridge.queue",
get=mock_queue,
),
]
for patcher in patchers:
patcher.start()
yield
for patcher in patchers:
patcher.stop()
@fixture(name="mock_api")
def mock_api_fixture() -> Generator[AsyncMock, Any, None]:
"""Fixture for mocking aioswitcher.api.SwitcherV2Api."""
mock_api = AsyncMock()
patchers = [
patch(
"homeassistant.components.switcher_kis.SwitcherV2Api.connect", new=mock_api
),
patch(
"homeassistant.components.switcher_kis.SwitcherV2Api.disconnect",
new=mock_api,
),
]
for patcher in patchers:
patcher.start()
yield
for patcher in patchers:
patcher.stop()
|
def AptInstall(vm):
"""Installs the nvidia-docker package on the VM."""
vm.Install('docker')
vm.RemoteCommand('curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey '
'| sudo apt-key add -')
vm.RemoteCommand('curl -s -L https://nvidia.github.io/nvidia-docker/'
'$(. /etc/os-release;echo $ID$VERSION_ID)'
'/nvidia-docker.list | sudo tee '
'/etc/apt/sources.list.d/nvidia-docker.list')
vm.RemoteCommand('sudo apt-get update')
vm.InstallPackages('nvidia-docker2')
# Reload the Docker daemon configuration
vm.RemoteCommand('sudo pkill -SIGHUP dockerd')
def YumInstall(vm):
"""Installs the nvidia-docker package on the VM."""
vm.Install('docker')
vm.RemoteCommand('curl -s -L https://nvidia.github.io/'
'nvidia-container-runtime/'
'$(. /etc/os-release;echo $ID$VERSION_ID)/'
'nvidia-container-runtime.repo | sudo tee /etc/yum.repos.d/'
'nvidia-container-runtime.repo')
vm.RemoteCommand('sudo tee /etc/yum.repos.d/nvidia-container-runtime.repo')
vm.InstallPackages('nvidia-container-runtime-hook')
|
def apply_application_controller_patch(zha_gateway):
"""Apply patches to ZHA objects."""
# Patch handle_message until zigpy can provide an event here
def handle_message(sender, profile, cluster, src_ep, dst_ep, message):
"""Handle message from a device."""
if (
sender.ieee in zha_gateway.devices
and not zha_gateway.devices[sender.ieee].available
):
zha_gateway.async_device_became_available(
sender, profile, cluster, src_ep, dst_ep, message
)
return sender.handle_message(profile, cluster, src_ep, dst_ep, message)
zha_gateway.application_controller.handle_message = handle_message
|
import asyncio
from datetime import timedelta
import logging
from requests import HTTPError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
from homeassistant.util import Throttle
from . import api, config_flow
from .const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=1)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["binary_sensor", "sensor", "switch"]
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up Home Connect component."""
hass.data[DOMAIN] = {}
if DOMAIN not in config:
return True
config_flow.OAuth2FlowHandler.async_register_implementation(
hass,
config_entry_oauth2_flow.LocalOAuth2Implementation(
hass,
DOMAIN,
config[DOMAIN][CONF_CLIENT_ID],
config[DOMAIN][CONF_CLIENT_SECRET],
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
),
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Home Connect from a config entry."""
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
hc_api = api.ConfigEntryAuth(hass, entry, implementation)
hass.data[DOMAIN][entry.entry_id] = hc_api
await update_all_devices(hass, entry)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
@Throttle(SCAN_INTERVAL)
async def update_all_devices(hass, entry):
"""Update all the devices."""
data = hass.data[DOMAIN]
hc_api = data[entry.entry_id]
try:
await hass.async_add_executor_job(hc_api.get_devices)
for device_dict in hc_api.devices:
await hass.async_add_executor_job(device_dict["device"].initialize)
except HTTPError as err:
_LOGGER.warning("Cannot update devices: %s", err.response.status_code)
|
from collections import Counter
import logging
from typing import Callable, List, Tuple
from homeassistant import const as ha_const
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import HomeAssistantType
from . import const as zha_const, registries as zha_regs, typing as zha_typing
from .. import ( # noqa: F401 pylint: disable=unused-import,
binary_sensor,
climate,
cover,
device_tracker,
fan,
light,
lock,
sensor,
switch,
)
from .channels import base
_LOGGER = logging.getLogger(__name__)
@callback
async def async_add_entities(
_async_add_entities: Callable,
entities: List[
Tuple[
zha_typing.ZhaEntityType,
Tuple[str, zha_typing.ZhaDeviceType, List[zha_typing.ChannelType]],
]
],
) -> None:
"""Add entities helper."""
if not entities:
return
to_add = [ent_cls(*args) for ent_cls, args in entities]
_async_add_entities(to_add, update_before_add=True)
entities.clear()
class ProbeEndpoint:
"""All discovered channels and entities of an endpoint."""
def __init__(self):
"""Initialize instance."""
self._device_configs = {}
@callback
def discover_entities(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
self.discover_by_device_type(channel_pool)
self.discover_by_cluster_id(channel_pool)
@callback
def discover_by_device_type(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
unique_id = channel_pool.unique_id
component = self._device_configs.get(unique_id, {}).get(ha_const.CONF_TYPE)
if component is None:
ep_profile_id = channel_pool.endpoint.profile_id
ep_device_type = channel_pool.endpoint.device_type
component = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type)
if component and component in zha_const.COMPONENTS:
channels = channel_pool.unclaimed_channels()
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, channel_pool.manufacturer, channel_pool.model, channels
)
if entity_class is None:
return
channel_pool.claim_channels(claimed)
channel_pool.async_new_entity(component, entity_class, unique_id, claimed)
@callback
def discover_by_cluster_id(self, channel_pool: zha_typing.ChannelPoolType) -> None:
"""Process an endpoint on a zigpy device."""
items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items()
single_input_clusters = {
cluster_class: match
for cluster_class, match in items
if not isinstance(cluster_class, int)
}
remaining_channels = channel_pool.unclaimed_channels()
for channel in remaining_channels:
if channel.cluster.cluster_id in zha_regs.CHANNEL_ONLY_CLUSTERS:
channel_pool.claim_channels([channel])
continue
component = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get(
channel.cluster.cluster_id
)
if component is None:
for cluster_class, match in single_input_clusters.items():
if isinstance(channel.cluster, cluster_class):
component = match
break
self.probe_single_cluster(component, channel, channel_pool)
# until we can get rid off registries
self.handle_on_off_output_cluster_exception(channel_pool)
@staticmethod
def probe_single_cluster(
component: str,
channel: zha_typing.ChannelType,
ep_channels: zha_typing.ChannelPoolType,
) -> None:
"""Probe specified cluster for specific component."""
if component is None or component not in zha_const.COMPONENTS:
return
channel_list = [channel]
unique_id = f"{ep_channels.unique_id}-{channel.cluster.cluster_id}"
entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity(
component, ep_channels.manufacturer, ep_channels.model, channel_list
)
if entity_class is None:
return
ep_channels.claim_channels(claimed)
ep_channels.async_new_entity(component, entity_class, unique_id, claimed)
def handle_on_off_output_cluster_exception(
self, ep_channels: zha_typing.ChannelPoolType
) -> None:
"""Process output clusters of the endpoint."""
profile_id = ep_channels.endpoint.profile_id
device_type = ep_channels.endpoint.device_type
if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []):
return
for cluster_id, cluster in ep_channels.endpoint.out_clusters.items():
component = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get(
cluster.cluster_id
)
if component is None:
continue
channel_class = zha_regs.ZIGBEE_CHANNEL_REGISTRY.get(
cluster_id, base.ZigbeeChannel
)
channel = channel_class(cluster, ep_channels)
self.probe_single_cluster(component, channel, ep_channels)
def initialize(self, hass: HomeAssistantType) -> None:
"""Update device overrides config."""
zha_config = hass.data[zha_const.DATA_ZHA].get(zha_const.DATA_ZHA_CONFIG, {})
overrides = zha_config.get(zha_const.CONF_DEVICE_CONFIG)
if overrides:
self._device_configs.update(overrides)
class GroupProbe:
"""Determine the appropriate component for a group."""
def __init__(self):
"""Initialize instance."""
self._hass = None
self._unsubs = []
def initialize(self, hass: HomeAssistantType) -> None:
"""Initialize the group probe."""
self._hass = hass
self._unsubs.append(
async_dispatcher_connect(
hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group
)
)
def cleanup(self):
"""Clean up on when zha shuts down."""
for unsub in self._unsubs[:]:
unsub()
self._unsubs.remove(unsub)
def _reprobe_group(self, group_id: int) -> None:
"""Reprobe a group for entities after its members change."""
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
zha_group = zha_gateway.groups.get(group_id)
if zha_group is None:
return
self.discover_group_entities(zha_group)
@callback
def discover_group_entities(self, group: zha_typing.ZhaGroupType) -> None:
"""Process a group and create any entities that are needed."""
# only create a group entity if there are 2 or more members in a group
if len(group.members) < 2:
_LOGGER.debug(
"Group: %s:0x%04x has less than 2 members - skipping entity discovery",
group.name,
group.group_id,
)
return
entity_domains = GroupProbe.determine_entity_domains(self._hass, group)
if not entity_domains:
return
zha_gateway = self._hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
for domain in entity_domains:
entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain)
if entity_class is None:
continue
self._hass.data[zha_const.DATA_ZHA][domain].append(
(
entity_class,
(
group.get_domain_entity_ids(domain),
f"{domain}_zha_group_0x{group.group_id:04x}",
group.group_id,
zha_gateway.coordinator_zha_device,
),
)
)
async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES)
@staticmethod
def determine_entity_domains(
hass: HomeAssistantType, group: zha_typing.ZhaGroupType
) -> List[str]:
"""Determine the entity domains for this group."""
entity_domains: List[str] = []
zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
all_domain_occurrences = []
for member in group.members:
if member.device.is_coordinator:
continue
entities = async_entries_for_device(
zha_gateway.ha_entity_registry, member.device.device_id
)
all_domain_occurrences.extend(
[
entity.domain
for entity in entities
if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS
]
)
if not all_domain_occurrences:
return entity_domains
# get all domains we care about if there are more than 2 entities of this domain
counts = Counter(all_domain_occurrences)
entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2]
_LOGGER.debug(
"The entity domains are: %s for group: %s:0x%04x",
entity_domains,
group.name,
group.group_id,
)
return entity_domains
PROBE = ProbeEndpoint()
GROUP_PROBE = GroupProbe()
|
from box.box import Box
class ConfigBox(Box):
"""
Modified box object to add object transforms.
Allows for build in transforms like:
cns = ConfigBox(my_bool='yes', my_int='5', my_list='5,4,3,3,2')
cns.bool('my_bool') # True
cns.int('my_int') # 5
cns.list('my_list', mod=lambda x: int(x)) # [5, 4, 3, 3, 2]
"""
_protected_keys = dir(Box) + ["bool", "int", "float", "list", "getboolean", "getfloat", "getint"]
def __getattr__(self, item):
"""
Config file keys are stored in lower case, be a little more
loosey goosey
"""
try:
return super().__getattr__(item)
except AttributeError:
return super().__getattr__(item.lower())
def __dir__(self):
return super().__dir__() + ["bool", "int", "float", "list", "getboolean", "getfloat", "getint"]
def bool(self, item, default=None):
"""
Return value of key as a boolean
:param item: key of value to transform
:param default: value to return if item does not exist
:return: approximated bool of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
if isinstance(item, (bool, int)):
return bool(item)
if isinstance(item, str) and item.lower() in ("n", "no", "false", "f", "0"):
return False
return True if item else False
def int(self, item, default=None):
"""
Return value of key as an int
:param item: key of value to transform
:param default: value to return if item does not exist
:return: int of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
return int(item)
def float(self, item, default=None):
"""
Return value of key as a float
:param item: key of value to transform
:param default: value to return if item does not exist
:return: float of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
return float(item)
def list(self, item, default=None, spliter=",", strip=True, mod=None):
"""
Return value of key as a list
:param item: key of value to transform
:param mod: function to map against list
:param default: value to return if item does not exist
:param spliter: character to split str on
:param strip: clean the list with the `strip`
:return: list of items
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
if strip:
item = item.lstrip("[").rstrip("]")
out = [x.strip() if strip else x for x in item.split(spliter)]
if mod:
return list(map(mod, out))
return out
# loose configparser compatibility
def getboolean(self, item, default=None):
return self.bool(item, default)
def getint(self, item, default=None):
return self.int(item, default)
def getfloat(self, item, default=None):
return self.float(item, default)
def __repr__(self):
return "<ConfigBox: {0}>".format(str(self.to_dict()))
def copy(self):
return ConfigBox(super().copy())
def __copy__(self):
return ConfigBox(super().copy())
|
__docformat__ = "restructuredtext en"
import sys
from stat import S_IWRITE
import codecs
from six import string_types
BULLET = '*'
SUBBULLET = '-'
INDENT = ' ' * 4
class NoEntry(Exception):
"""raised when we are unable to find an entry"""
class EntryNotFound(Exception):
"""raised when we are unable to find a given entry"""
class Version(tuple):
"""simple class to handle soft version number has a tuple while
correctly printing it as X.Y.Z
"""
def __new__(cls, versionstr):
if isinstance(versionstr, string_types):
versionstr = versionstr.strip(' :') # XXX (syt) duh?
parsed = cls.parse(versionstr)
else:
parsed = versionstr
return tuple.__new__(cls, parsed)
@classmethod
def parse(cls, versionstr):
versionstr = versionstr.strip(' :')
try:
return [int(i) for i in versionstr.split('.')]
except ValueError as ex:
raise ValueError("invalid literal for version '%s' (%s)" %
(versionstr, ex))
def __str__(self):
return '.'.join([str(i) for i in self])
# upstream change log #########################################################
class ChangeLogEntry(object):
"""a change log entry, i.e. a set of messages associated to a version and
its release date
"""
version_class = Version
def __init__(self, date=None, version=None, **kwargs):
self.__dict__.update(kwargs)
if version:
self.version = self.version_class(version)
else:
self.version = None
self.date = date
self.messages = []
def add_message(self, msg):
"""add a new message"""
self.messages.append(([msg], []))
def complete_latest_message(self, msg_suite):
"""complete the latest added message
"""
if not self.messages:
raise ValueError('unable to complete last message as '
'there is no previous message)')
if self.messages[-1][1]: # sub messages
self.messages[-1][1][-1].append(msg_suite)
else: # message
self.messages[-1][0].append(msg_suite)
def add_sub_message(self, sub_msg, key=None):
if not self.messages:
raise ValueError('unable to complete last message as '
'there is no previous message)')
if key is None:
self.messages[-1][1].append([sub_msg])
else:
raise NotImplementedError('sub message to specific key '
'are not implemented yet')
def write(self, stream=sys.stdout):
"""write the entry to file """
stream.write(u'%s -- %s\n' % (self.date or '', self.version or ''))
for msg, sub_msgs in self.messages:
stream.write(u'%s%s %s\n' % (INDENT, BULLET, msg[0]))
stream.write(u''.join(msg[1:]))
if sub_msgs:
stream.write(u'\n')
for sub_msg in sub_msgs:
stream.write(u'%s%s %s\n' %
(INDENT * 2, SUBBULLET, sub_msg[0]))
stream.write(u''.join(sub_msg[1:]))
stream.write(u'\n')
stream.write(u'\n\n')
class ChangeLog(object):
"""object representation of a whole ChangeLog file"""
entry_class = ChangeLogEntry
def __init__(self, changelog_file, title=u''):
self.file = changelog_file
assert isinstance(title, type(u'')), 'title must be a unicode object'
self.title = title
self.additional_content = u''
self.entries = []
self.load()
def __repr__(self):
return '<ChangeLog %s at %s (%s entries)>' % (self.file, id(self),
len(self.entries))
def add_entry(self, entry):
"""add a new entry to the change log"""
self.entries.append(entry)
def get_entry(self, version='', create=None):
""" return a given changelog entry
if version is omitted, return the current entry
"""
if not self.entries:
if version or not create:
raise NoEntry()
self.entries.append(self.entry_class())
if not version:
if self.entries[0].version and create is not None:
self.entries.insert(0, self.entry_class())
return self.entries[0]
version = self.version_class(version)
for entry in self.entries:
if entry.version == version:
return entry
raise EntryNotFound()
def add(self, msg, create=None):
"""add a new message to the latest opened entry"""
entry = self.get_entry(create=create)
entry.add_message(msg)
def load(self):
""" read a logilab's ChangeLog from file """
try:
stream = codecs.open(self.file, encoding='utf-8')
except IOError:
return
last = None
expect_sub = False
for line in stream:
sline = line.strip()
words = sline.split()
# if new entry
if len(words) == 1 and words[0] == '--':
expect_sub = False
last = self.entry_class()
self.add_entry(last)
# if old entry
elif len(words) == 3 and words[1] == '--':
expect_sub = False
last = self.entry_class(words[0], words[2])
self.add_entry(last)
# if title
elif sline and last is None:
self.title = '%s%s' % (self.title, line)
# if new entry
elif sline and sline[0] == BULLET:
expect_sub = False
last.add_message(sline[1:].strip())
# if new sub_entry
elif expect_sub and sline and sline[0] == SUBBULLET:
last.add_sub_message(sline[1:].strip())
# if new line for current entry
elif sline and last.messages:
last.complete_latest_message(line)
else:
expect_sub = True
self.additional_content += line
stream.close()
def format_title(self):
return u'%s\n\n' % self.title.strip()
def save(self):
"""write back change log"""
# filetutils isn't importable in appengine, so import locally
from logilab.common.fileutils import ensure_fs_mode
ensure_fs_mode(self.file, S_IWRITE)
self.write(codecs.open(self.file, 'w', encoding='utf-8'))
def write(self, stream=sys.stdout):
"""write changelog to stream"""
stream.write(self.format_title())
for entry in self.entries:
entry.write(stream)
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, Camera
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import DOMAIN as SKYBELL_DOMAIN, SkybellDevice
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=90)
IMAGE_AVATAR = "avatar"
IMAGE_ACTIVITY = "activity"
CONF_ACTIVITY_NAME = "activity_name"
CONF_AVATAR_NAME = "avatar_name"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=[IMAGE_AVATAR]): vol.All(
cv.ensure_list, [vol.In([IMAGE_AVATAR, IMAGE_ACTIVITY])]
),
vol.Optional(CONF_ACTIVITY_NAME): cv.string,
vol.Optional(CONF_AVATAR_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform for a Skybell device."""
cond = config[CONF_MONITORED_CONDITIONS]
names = {}
names[IMAGE_ACTIVITY] = config.get(CONF_ACTIVITY_NAME)
names[IMAGE_AVATAR] = config.get(CONF_AVATAR_NAME)
skybell = hass.data.get(SKYBELL_DOMAIN)
sensors = []
for device in skybell.get_devices():
for camera_type in cond:
sensors.append(SkybellCamera(device, camera_type, names.get(camera_type)))
add_entities(sensors, True)
class SkybellCamera(SkybellDevice, Camera):
"""A camera implementation for Skybell devices."""
def __init__(self, device, camera_type, name=None):
"""Initialize a camera for a Skybell device."""
self._type = camera_type
SkybellDevice.__init__(self, device)
Camera.__init__(self)
if name is not None:
self._name = f"{self._device.name} {name}"
else:
self._name = self._device.name
self._url = None
self._response = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def image_url(self):
"""Get the camera image url based on type."""
if self._type == IMAGE_ACTIVITY:
return self._device.activity_image
return self._device.image
def camera_image(self):
"""Get the latest camera image."""
super().update()
if self._url != self.image_url:
self._url = self.image_url
try:
self._response = requests.get(self._url, stream=True, timeout=10)
except requests.HTTPError as err:
_LOGGER.warning("Failed to get camera image: %s", err)
self._response = None
if not self._response:
return None
return self._response.content
|
import numpy as np
from matchzoo.engine.base_metric import BaseMetric, sort_and_couple
from .discounted_cumulative_gain import DiscountedCumulativeGain
class NormalizedDiscountedCumulativeGain(BaseMetric):
"""Normalized discounted cumulative gain metric."""
ALIAS = ['normalized_discounted_cumulative_gain', 'ndcg']
def __init__(self, k: int = 1, threshold: float = 0.):
"""
:class:`NormalizedDiscountedCumulativeGain` constructor.
:param k: Number of results to consider
:param threshold: the label threshold of relevance degree.
"""
self._k = k
self._threshold = threshold
def __repr__(self) -> str:
""":return: Formated string representation of the metric."""
return f"{self.ALIAS[0]}@{self._k}({self._threshold})"
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate normalized discounted cumulative gain (ndcg).
Relevance is positive real values or binary values.
Example:
>>> y_true = [0, 1, 2, 0]
>>> y_pred = [0.4, 0.2, 0.5, 0.7]
>>> ndcg = NormalizedDiscountedCumulativeGain
>>> ndcg(k=1)(y_true, y_pred)
0.0
>>> round(ndcg(k=2)(y_true, y_pred), 2)
0.52
>>> round(ndcg(k=3)(y_true, y_pred), 2)
0.52
>>> type(ndcg()(y_true, y_pred))
<class 'float'>
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Normalized discounted cumulative gain.
"""
dcg_metric = DiscountedCumulativeGain(k=self._k,
threshold=self._threshold)
idcg_val = dcg_metric(y_true, y_true)
dcg_val = dcg_metric(y_true, y_pred)
return dcg_val / idcg_val if idcg_val != 0 else 0
|
import numpy as np
def crop_bbox(
bbox, y_slice=None, x_slice=None,
allow_outside_center=True, return_param=False):
"""Translate bounding boxes to fit within the cropped area of an image.
This method is mainly used together with image cropping.
This method translates the coordinates of bounding boxes like
:func:`~chainercv.transforms.translate_bbox`. In addition,
this function truncates the bounding boxes to fit within the cropped area.
If a bounding box does not overlap with the cropped area,
this bounding box will be removed.
Args:
bbox (~numpy.ndarray): See the table below.
y_slice (slice): The slice of y axis.
x_slice (slice): The slice of x axis.
allow_outside_center (bool): If this argument is :obj:`False`,
bounding boxes whose centers are outside of the cropped area
are removed. The default value is :obj:`True`.
return_param (bool): If :obj:`True`, this function returns
indices of kept bounding boxes.
.. csv-table::
:header: name, shape, dtype, format
:obj:`bbox`, ":math:`(R, 4)`", :obj:`float32`, \
":math:`(y_{min}, x_{min}, y_{max}, x_{max})`"
Returns:
~numpy.ndarray or (~numpy.ndarray, dict):
If :obj:`return_param = False`, returns an array :obj:`bbox`.
If :obj:`return_param = True`,
returns a tuple whose elements are :obj:`bbox, param`.
:obj:`param` is a dictionary of intermediate parameters whose
contents are listed below with key, value-type and the description
of the value.
* **index** (*numpy.ndarray*): An array holding indices of used \
bounding boxes.
* **trancated_index** (*numpy.ndarray*): An array holding indices of \
truncated bounding boxes, with respect to **returned** \
:obj:`bbox`, rather than original :obj:`bbox`.
"""
t, b = _slice_to_bounds(y_slice)
l, r = _slice_to_bounds(x_slice)
crop_bb = np.array((t, l, b, r))
if allow_outside_center:
mask = np.ones(bbox.shape[0], dtype=bool)
else:
center = (bbox[:, :2] + bbox[:, 2:]) / 2
mask = np.logical_and(crop_bb[:2] <= center, center < crop_bb[2:]) \
.all(axis=1)
original_bbox, bbox = bbox, bbox.copy()
bbox[:, :2] = np.maximum(bbox[:, :2], crop_bb[:2])
bbox[:, 2:] = np.minimum(bbox[:, 2:], crop_bb[2:])
truncated_mask = np.any(original_bbox != bbox, axis=1)
bbox[:, :2] -= crop_bb[:2]
bbox[:, 2:] -= crop_bb[:2]
mask = np.logical_and(mask, (bbox[:, :2] < bbox[:, 2:]).all(axis=1))
bbox = bbox[mask]
truncated_mask = truncated_mask[mask]
if return_param:
index = np.flatnonzero(mask)
truncated_index = np.flatnonzero(truncated_mask)
return bbox, {
'index': index,
'truncated_index': truncated_index,
}
else:
return bbox
def _slice_to_bounds(slice_):
if slice_ is None:
return 0, np.inf
if slice_.start is None:
l = 0
else:
l = slice_.start
if slice_.stop is None:
u = np.inf
else:
u = slice_.stop
return l, u
|
from django.http import HttpRequest
from django.test import TestCase
from django.test.utils import modify_settings, override_settings
from django.urls import reverse
from weblate.accounts.tests.test_registration import REGISTRATION_DATA
from weblate.trans.tests.test_views import RegistrationTestMixin
from weblate.trans.tests.utils import create_test_user
class LegalTest(TestCase, RegistrationTestMixin):
def test_index(self):
response = self.client.get(reverse("legal:index"))
self.assertContains(response, "Legal Terms Overview")
def test_terms(self):
response = self.client.get(reverse("legal:terms"))
self.assertContains(response, "Terms of Service")
def test_cookies(self):
response = self.client.get(reverse("legal:cookies"))
self.assertContains(response, "Cookies Policy")
def test_security(self):
response = self.client.get(reverse("legal:security"))
self.assertContains(response, "Security Policy")
def test_contracts(self):
response = self.client.get(reverse("legal:contracts"))
self.assertContains(response, "Subcontractors")
@modify_settings(
SOCIAL_AUTH_PIPELINE={"append": "weblate.legal.pipeline.tos_confirm"}
)
@override_settings(REGISTRATION_OPEN=True, REGISTRATION_CAPTCHA=False)
def test_confirm(self):
"""TOS confirmation on social auth."""
response = self.client.post(reverse("register"), REGISTRATION_DATA, follow=True)
# Check we did succeed
self.assertContains(response, "Thank you for registering.")
# Follow link
url = self.assert_registration_mailbox()
response = self.client.get(url, follow=True)
self.assertTrue(
response.redirect_chain[-1][0].startswith(reverse("legal:confirm"))
)
# Extract next URL
url = response.context["form"].initial["next"]
# Try invalid form (not checked)
response = self.client.post(reverse("legal:confirm"), {"next": url})
self.assertContains(response, "This field is required")
# Actually confirm the TOS
response = self.client.post(
reverse("legal:confirm"), {"next": url, "confirm": 1}, follow=True
)
self.assertContains(response, "Your profile")
@modify_settings(
MIDDLEWARE={"append": "weblate.legal.middleware.RequireTOSMiddleware"}
)
def test_middleware(self):
user = create_test_user()
# Unauthenticated
response = self.client.get(reverse("home"), follow=True)
self.assertContains(response, "Browse all 0 projects")
# Login
self.client.login(username="testuser", password="testpassword")
# Chck that homepage redirects
response = self.client.get(reverse("home"), follow=True)
self.assertTrue(
response.redirect_chain[-1][0].startswith(reverse("legal:confirm"))
)
# Check that contact works even without TOS
response = self.client.get(reverse("contact"), follow=True)
self.assertContains(response, "You can contact maintainers")
# Confirm current TOS
request = HttpRequest()
request.META["REMOTE_ADDR"] = "127.0.0.1"
user.agreement.make_current(request)
# Homepage now should work
response = self.client.get(reverse("home"), follow=True)
self.assertContains(response, "Suggested translations")
|
import asyncio
import logging
from homematicip.aio.auth import AsyncAuth
from homematicip.aio.home import AsyncHome
from homematicip.base.base_connection import HmipConnectionError
from homematicip.base.enums import EventType
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import HomeAssistantType
from .const import COMPONENTS, HMIPC_AUTHTOKEN, HMIPC_HAPID, HMIPC_NAME, HMIPC_PIN
from .errors import HmipcConnectionError
_LOGGER = logging.getLogger(__name__)
class HomematicipAuth:
"""Manages HomematicIP client registration."""
def __init__(self, hass, config) -> None:
"""Initialize HomematicIP Cloud client registration."""
self.hass = hass
self.config = config
self.auth = None
async def async_setup(self) -> bool:
"""Connect to HomematicIP for registration."""
try:
self.auth = await self.get_auth(
self.hass, self.config.get(HMIPC_HAPID), self.config.get(HMIPC_PIN)
)
return self.auth is not None
except HmipcConnectionError:
return False
async def async_checkbutton(self) -> bool:
"""Check blue butten has been pressed."""
try:
return await self.auth.isRequestAcknowledged()
except HmipConnectionError:
return False
async def async_register(self):
"""Register client at HomematicIP."""
try:
authtoken = await self.auth.requestAuthToken()
await self.auth.confirmAuthToken(authtoken)
return authtoken
except HmipConnectionError:
return False
async def get_auth(self, hass: HomeAssistantType, hapid, pin):
"""Create a HomematicIP access point object."""
auth = AsyncAuth(hass.loop, async_get_clientsession(hass))
try:
await auth.init(hapid)
if pin:
auth.pin = pin
await auth.connectionRequest("HomeAssistant")
except HmipConnectionError:
return None
return auth
class HomematicipHAP:
"""Manages HomematicIP HTTP and WebSocket connection."""
def __init__(self, hass: HomeAssistantType, config_entry: ConfigEntry) -> None:
"""Initialize HomematicIP Cloud connection."""
self.hass = hass
self.config_entry = config_entry
self.home = None
self._ws_close_requested = False
self._retry_task = None
self._tries = 0
self._accesspoint_connected = True
self.hmip_device_by_entity_id = {}
self.reset_connection_listener = None
async def async_setup(self, tries: int = 0) -> bool:
"""Initialize connection."""
try:
self.home = await self.get_hap(
self.hass,
self.config_entry.data.get(HMIPC_HAPID),
self.config_entry.data.get(HMIPC_AUTHTOKEN),
self.config_entry.data.get(HMIPC_NAME),
)
except HmipcConnectionError as err:
raise ConfigEntryNotReady from err
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error connecting with HomematicIP Cloud: %s", err)
return False
_LOGGER.info(
"Connected to HomematicIP with HAP %s", self.config_entry.unique_id
)
for component in COMPONENTS:
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, component
)
)
return True
@callback
def async_update(self, *args, **kwargs) -> None:
"""Async update the home device.
Triggered when the HMIP HOME_CHANGED event has fired.
There are several occasions for this event to happen.
1. We are interested to check whether the access point
is still connected. If not, entity state changes cannot
be forwarded to hass. So if access point is disconnected all devices
are set to unavailable.
2. We need to update home including devices and groups after a reconnect.
3. We need to update home without devices and groups in all other cases.
"""
if not self.home.connected:
_LOGGER.error("HMIP access point has lost connection with the cloud")
self._accesspoint_connected = False
self.set_all_to_unavailable()
elif not self._accesspoint_connected:
# Now the HOME_CHANGED event has fired indicating the access
# point has reconnected to the cloud again.
# Explicitly getting an update as entity states might have
# changed during access point disconnect."""
job = self.hass.async_create_task(self.get_state())
job.add_done_callback(self.get_state_finished)
self._accesspoint_connected = True
@callback
def async_create_entity(self, *args, **kwargs) -> None:
"""Create an entity or a group."""
is_device = EventType(kwargs["event_type"]) == EventType.DEVICE_ADDED
self.hass.async_create_task(self.async_create_entity_lazy(is_device))
async def async_create_entity_lazy(self, is_device=True) -> None:
"""Delay entity creation to allow the user to enter a device name."""
if is_device:
await asyncio.sleep(30)
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
async def get_state(self) -> None:
"""Update HMIP state and tell Home Assistant."""
await self.home.get_current_state()
self.update_all()
def get_state_finished(self, future) -> None:
"""Execute when get_state coroutine has finished."""
try:
future.result()
except HmipConnectionError:
# Somehow connection could not recover. Will disconnect and
# so reconnect loop is taking over.
_LOGGER.error("Updating state after HMIP access point reconnect failed")
self.hass.async_create_task(self.home.disable_events())
def set_all_to_unavailable(self) -> None:
"""Set all devices to unavailable and tell Home Assistant."""
for device in self.home.devices:
device.unreach = True
self.update_all()
def update_all(self) -> None:
"""Signal all devices to update their state."""
for device in self.home.devices:
device.fire_update_event()
async def async_connect(self) -> None:
"""Start WebSocket connection."""
tries = 0
while True:
retry_delay = 2 ** min(tries, 8)
try:
await self.home.get_current_state()
hmip_events = await self.home.enable_events()
tries = 0
await hmip_events
except HmipConnectionError:
_LOGGER.error(
"Error connecting to HomematicIP with HAP %s. "
"Retrying in %d seconds",
self.config_entry.unique_id,
retry_delay,
)
if self._ws_close_requested:
break
self._ws_close_requested = False
tries += 1
try:
self._retry_task = self.hass.async_create_task(
asyncio.sleep(retry_delay)
)
await self._retry_task
except asyncio.CancelledError:
break
async def async_reset(self) -> bool:
"""Close the websocket connection."""
self._ws_close_requested = True
if self._retry_task is not None:
self._retry_task.cancel()
await self.home.disable_events()
_LOGGER.info("Closed connection to HomematicIP cloud server")
for component in COMPONENTS:
await self.hass.config_entries.async_forward_entry_unload(
self.config_entry, component
)
self.hmip_device_by_entity_id = {}
return True
@callback
def shutdown(self, event) -> None:
"""Wrap the call to async_reset.
Used as an argument to EventBus.async_listen_once.
"""
self.hass.async_create_task(self.async_reset())
_LOGGER.debug(
"Reset connection to access point id %s", self.config_entry.unique_id
)
async def get_hap(
self, hass: HomeAssistantType, hapid: str, authtoken: str, name: str
) -> AsyncHome:
"""Create a HomematicIP access point object."""
home = AsyncHome(hass.loop, async_get_clientsession(hass))
home.name = name
home.label = "Access Point"
home.modelType = "HmIP-HAP"
home.set_auth_token(authtoken)
try:
await home.init(hapid)
await home.get_current_state()
except HmipConnectionError as err:
raise HmipcConnectionError from err
home.on_update(self.async_update)
home.on_create(self.async_create_entity)
hass.loop.create_task(self.async_connect())
return home
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.