text
stringlengths 213
32.3k
|
---|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
import re
from diamond.collector import Collector
from jolokia import JolokiaCollector
##########################################################################
class TestJolokiaCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('JolokiaCollector', {})
self.collector = JolokiaCollector(config, None)
def test_import(self):
self.assertTrue(JolokiaCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
def se(url, timeout=0):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = self.get_metrics()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_real_data_with_rewrite(self, publish_mock):
def se(url, timeout=0):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
rewrite = [
(re.compile('memoryUsage'), 'memUsed'),
(re.compile('.*\.init'), ''),
]
self.collector.rewrite.extend(rewrite)
self.collector.collect()
patch_urlopen.stop()
rewritemetrics = self.get_metrics_rewrite_test()
self.assertPublishedMany(publish_mock, rewritemetrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_and_basic_auth(self, publish_mock):
self.collector.config["username"] = "user"
self.collector.config["password"] = "password"
self.test_should_work_with_real_data()
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats_blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_should_skip_when_mbean_request_fails(self, publish_mock):
def se(url, timeout=0):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing_with_bad_mbean')
elif url == ('http://localhost:8778/jolokia/?ignoreErrors=true'
'&p=read/xxx.bad.package:*'):
return self.getFixture('stats_error')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = self.get_metrics()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
def test_should_escape_jolokia_domains(self):
domain_with_slash = self.collector._escape_domain('some/domain')
domain_with_bang = self.collector._escape_domain('some!domain')
domain_with_quote = self.collector._escape_domain('some"domain')
self.assertEqual(domain_with_slash, 'some%21/domain')
self.assertEqual(domain_with_bang, 'some%21%21domain')
self.assertEqual(domain_with_quote, 'some%21%22domain')
def test_canonical_names_setting_not_set(self):
config = get_collector_config('JolokiaCollector', {})
logger_mock = Mock()
patch_logger = patch('logging.getLogger', Mock(
return_value=logger_mock))
patch_logger.start()
JolokiaCollector(config, None)
patch_logger.stop()
logger_mock.error.assert_not_called()
@patch('jolokia.JolokiaCollector._create_request')
@patch('urllib2.urlopen')
def test_should_handle_canonical_names_setting_True(self, urlopen_mock,
create_request_mock):
config = get_collector_config('JolokiaCollector', {})
config['collectors']['JolokiaCollector']['use_canonical_names'] = 'True'
config['collectors']['JolokiaCollector']['domains'] = ['foo']
request = Mock()
request.read.return_value = "{status: 400}"
urlopen_mock.return_value = request
collector = JolokiaCollector(config, None)
collector.collect()
self.assertIn('canonicalNaming=true',
create_request_mock.call_args[0][0])
self.assertIs(collector.config['use_canonical_names'], True)
@patch('jolokia.JolokiaCollector._create_request')
@patch('urllib2.urlopen')
def test_should_handle_canonical_names_setting_False(self, urlopen_mock,
create_request_mock):
config = get_collector_config('JolokiaCollector', {})
config['collectors']['JolokiaCollector']['use_canonical_names'] = \
'False'
config['collectors']['JolokiaCollector']['domains'] = ['foo']
request = Mock()
request.read.return_value = "{status: 400}"
urlopen_mock.return_value = request
collector = JolokiaCollector(config, None)
collector.collect()
self.assertIn('canonicalNaming=false',
create_request_mock.call_args[0][0])
self.assertIs(collector.config['use_canonical_names'], False)
def test_should_handle_invalid_canonical_names_setting_values(self):
config = get_collector_config('JolokiaCollector', {})
config['collectors']['JolokiaCollector']['use_canonical_names'] = 'foo'
logger_mock = Mock()
patch_logger = patch('logging.getLogger', Mock(
return_value=logger_mock))
patch_logger.start()
collector = JolokiaCollector(config, None)
patch_logger.stop()
logger_mock.error.assert_called_once_with(
'Unexpected value "%s" for "use_canonical_names" setting. '
'Expected "True" or "False". Using default value.', 'foo')
self.assertEqual(collector.config['use_canonical_names'],
collector.get_default_config()['use_canonical_names'])
def get_metrics(self):
prefix = 'java.lang.name_ParNew.type_GarbageCollector.LastGcInfo'
return {
prefix + '.startTime': 14259063,
prefix + '.id': 219,
prefix + '.duration': 2,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.max': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.committed': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.init': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.used': 25165824,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.max': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.committed': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.init': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.used': 5146840,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.max': 85983232,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.committed': 23920640,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.init': 21757952,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.used': 23796992,
prefix + '.memoryUsageBeforeGc.Code_Cache.max': 50331648,
prefix + '.memoryUsageBeforeGc.Code_Cache.committed': 2686976,
prefix + '.memoryUsageBeforeGc.Code_Cache.init': 2555904,
prefix + '.memoryUsageBeforeGc.Code_Cache.used': 2600768,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.max': 3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.committed':
3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.init': 3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.used': 414088
}
def get_metrics_rewrite_test(self):
prefix = 'java.lang.name_ParNew.type_GarbageCollector.LastGcInfo'
return {
prefix + '.startTime': 14259063,
prefix + '.id': 219,
prefix + '.duration': 2,
prefix + '.memUsedBeforeGc.Par_Eden_Space.max': 25165824,
prefix + '.memUsedBeforeGc.Par_Eden_Space.committed': 25165824,
prefix + '.memUsedBeforeGc.Par_Eden_Space.used': 25165824,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.max': 73400320,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.committed': 73400320,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.used': 5146840,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.max': 85983232,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.committed': 23920640,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.used': 23796992,
prefix + '.memUsedBeforeGc.Code_Cache.max': 50331648,
prefix + '.memUsedBeforeGc.Code_Cache.committed': 2686976,
prefix + '.memUsedBeforeGc.Code_Cache.used': 2600768,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.max': 3145728,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.committed': 3145728,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.used': 414088
}
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from homeassistant.components import cloud
from homeassistant.components.cloud import const
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock, patch
async def mock_cloud(hass, config=None):
"""Mock cloud."""
assert await async_setup_component(hass, cloud.DOMAIN, {"cloud": config or {}})
cloud_inst = hass.data["cloud"]
with patch("hass_nabucasa.Cloud.run_executor", AsyncMock(return_value=None)):
await cloud_inst.start()
def mock_cloud_prefs(hass, prefs={}):
"""Fixture for cloud component."""
prefs_to_set = {
const.PREF_ENABLE_ALEXA: True,
const.PREF_ENABLE_GOOGLE: True,
const.PREF_GOOGLE_SECURE_DEVICES_PIN: None,
}
prefs_to_set.update(prefs)
hass.data[cloud.DOMAIN].client._prefs._prefs = prefs_to_set
return hass.data[cloud.DOMAIN].client._prefs
|
from typing import NewType, TYPE_CHECKING
from redbot.core.commands import BadArgument, Context, Converter
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import inline
_ = Translator("Cleanup", __file__)
class RawMessageIds(Converter):
async def convert(self, ctx: Context, argument: str) -> int:
if argument.isnumeric() and len(argument) >= 17:
return int(argument)
raise BadArgument(_("{} doesn't look like a valid message ID.").format(argument))
PositiveInt = NewType("PositiveInt", int)
if TYPE_CHECKING:
positive_int = PositiveInt
else:
def positive_int(arg: str) -> int:
try:
ret = int(arg)
except ValueError:
raise BadArgument(_("{arg} is not an integer.").format(arg=inline(arg)))
if ret <= 0:
raise BadArgument(_("{arg} is not a positive integer.").format(arg=inline(arg)))
return ret
|
import importlib
import logging
import types
from typing import Any, Dict, List, Optional
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from ..auth_store import AuthStore
from ..const import MFA_SESSION_EXPIRATION
from ..models import Credentials, User, UserMeta
_LOGGER = logging.getLogger(__name__)
DATA_REQS = "auth_prov_reqs_processed"
AUTH_PROVIDERS = Registry()
AUTH_PROVIDER_SCHEMA = vol.Schema(
{
vol.Required(CONF_TYPE): str,
vol.Optional(CONF_NAME): str,
# Specify ID if you have two auth providers for same type.
vol.Optional(CONF_ID): str,
},
extra=vol.ALLOW_EXTRA,
)
class AuthProvider:
"""Provider of user authentication."""
DEFAULT_TITLE = "Unnamed auth provider"
def __init__(
self, hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
) -> None:
"""Initialize an auth provider."""
self.hass = hass
self.store = store
self.config = config
@property
def id(self) -> Optional[str]:
"""Return id of the auth provider.
Optional, can be None.
"""
return self.config.get(CONF_ID)
@property
def type(self) -> str:
"""Return type of the provider."""
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth provider."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
@property
def support_mfa(self) -> bool:
"""Return whether multi-factor auth supported by the auth provider."""
return True
async def async_credentials(self) -> List[Credentials]:
"""Return all credentials of this provider."""
users = await self.store.async_get_users()
return [
credentials
for user in users
for credentials in user.credentials
if (
credentials.auth_provider_type == self.type
and credentials.auth_provider_id == self.id
)
]
@callback
def async_create_credentials(self, data: Dict[str, str]) -> Credentials:
"""Create credentials."""
return Credentials(
auth_provider_type=self.type, auth_provider_id=self.id, data=data
)
# Implement by extending class
async def async_login_flow(self, context: Optional[Dict]) -> "LoginFlow":
"""Return the data flow for logging in with auth provider.
Auth provider should extend LoginFlow and return an instance.
"""
raise NotImplementedError
async def async_get_or_create_credentials(
self, flow_result: Dict[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
raise NotImplementedError
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Will be used to populate info when creating a new user.
"""
raise NotImplementedError
async def async_initialize(self) -> None:
"""Initialize the auth provider."""
async def auth_provider_from_config(
hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
) -> AuthProvider:
"""Initialize an auth provider from a config."""
provider_name = config[CONF_TYPE]
module = await load_auth_provider_module(hass, provider_name)
try:
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for auth provider %s: %s",
provider_name,
humanize_error(config, err),
)
raise
return AUTH_PROVIDERS[provider_name](hass, store, config) # type: ignore
async def load_auth_provider_module(
hass: HomeAssistant, provider: str
) -> types.ModuleType:
"""Load an auth provider."""
try:
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
except ImportError as err:
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
raise HomeAssistantError(
f"Unable to load auth provider {provider}: {err}"
) from err
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
return module
processed = hass.data.get(DATA_REQS)
if processed is None:
processed = hass.data[DATA_REQS] = set()
elif provider in processed:
return module
# https://github.com/python/mypy/issues/1424
reqs = module.REQUIREMENTS # type: ignore
await requirements.async_process_requirements(
hass, f"auth provider {provider}", reqs
)
processed.add(provider)
return module
class LoginFlow(data_entry_flow.FlowHandler):
"""Handler for the login flow."""
def __init__(self, auth_provider: AuthProvider) -> None:
"""Initialize the login flow."""
self._auth_provider = auth_provider
self._auth_module_id: Optional[str] = None
self._auth_manager = auth_provider.hass.auth
self.available_mfa_modules: Dict[str, str] = {}
self.created_at = dt_util.utcnow()
self.invalid_mfa_times = 0
self.user: Optional[User] = None
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the first step of login flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return await self.async_finish(flow_result) if login init step pass.
"""
raise NotImplementedError
async def async_step_select_mfa_module(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of select mfa module."""
errors = {}
if user_input is not None:
auth_module = user_input.get("multi_factor_auth_module")
if auth_module in self.available_mfa_modules:
self._auth_module_id = auth_module
return await self.async_step_mfa()
errors["base"] = "invalid_auth_module"
if len(self.available_mfa_modules) == 1:
self._auth_module_id = list(self.available_mfa_modules)[0]
return await self.async_step_mfa()
return self.async_show_form(
step_id="select_mfa_module",
data_schema=vol.Schema(
{"multi_factor_auth_module": vol.In(self.available_mfa_modules)}
),
errors=errors,
)
async def async_step_mfa(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of mfa validation."""
assert self.user
errors = {}
assert self._auth_module_id is not None
auth_module = self._auth_manager.get_auth_mfa_module(self._auth_module_id)
if auth_module is None:
# Given an invalid input to async_step_select_mfa_module
# will show invalid_auth_module error
return await self.async_step_select_mfa_module(user_input={})
if user_input is None and hasattr(
auth_module, "async_initialize_login_mfa_step"
):
try:
await auth_module.async_initialize_login_mfa_step( # type: ignore
self.user.id
)
except HomeAssistantError:
_LOGGER.exception("Error initializing MFA step")
return self.async_abort(reason="unknown_error")
if user_input is not None:
expires = self.created_at + MFA_SESSION_EXPIRATION
if dt_util.utcnow() > expires:
return self.async_abort(reason="login_expired")
result = await auth_module.async_validate(self.user.id, user_input)
if not result:
errors["base"] = "invalid_code"
self.invalid_mfa_times += 1
if self.invalid_mfa_times >= auth_module.MAX_RETRY_TIME > 0:
return self.async_abort(reason="too_many_retry")
if not errors:
return await self.async_finish(self.user)
description_placeholders: Dict[str, Optional[str]] = {
"mfa_module_name": auth_module.name,
"mfa_module_id": auth_module.id,
}
return self.async_show_form(
step_id="mfa",
data_schema=auth_module.input_schema,
description_placeholders=description_placeholders,
errors=errors,
)
async def async_finish(self, flow_result: Any) -> Dict:
"""Handle the pass of login flow."""
return self.async_create_entry(title=self._auth_provider.name, data=flow_result)
|
import logging
from rachiopy import Rachio
from requests.exceptions import ConnectTimeout
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_API_KEY, HTTP_OK
from homeassistant.core import callback
from .const import (
CONF_MANUAL_RUN_MINS,
DEFAULT_MANUAL_RUN_MINS,
KEY_ID,
KEY_STATUS,
KEY_USERNAME,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}, extra=vol.ALLOW_EXTRA)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
rachio = Rachio(data[CONF_API_KEY])
username = None
try:
data = await hass.async_add_executor_job(rachio.person.info)
_LOGGER.debug("rachio.person.getInfo: %s", data)
if int(data[0][KEY_STATUS]) != HTTP_OK:
raise InvalidAuth
rachio_id = data[1][KEY_ID]
data = await hass.async_add_executor_job(rachio.person.get, rachio_id)
_LOGGER.debug("rachio.person.get: %s", data)
if int(data[0][KEY_STATUS]) != HTTP_OK:
raise CannotConnect
username = data[1][KEY_USERNAME]
except ConnectTimeout as error:
_LOGGER.error("Could not reach the Rachio API: %s", error)
raise CannotConnect from error
# Return info that you want to store in the config entry.
return {"title": username}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Rachio."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_API_KEY])
self._abort_if_unique_id_configured()
try:
info = await validate_input(self.hass, user_input)
return self.async_create_entry(title=info["title"], data=user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_homekit(self, homekit_info):
"""Handle HomeKit discovery."""
if self._async_current_entries():
# We can see rachio on the network to tell them to configure
# it, but since the device will not give up the account it is
# bound to and there can be multiple rachio systems on a single
# account, we avoid showing the device as discovered once
# they already have one configured as they can always
# add a new one via "+"
return self.async_abort(reason="already_configured")
properties = {
key.lower(): value for (key, value) in homekit_info["properties"].items()
}
await self.async_set_unique_id(properties["id"])
return await self.async_step_user()
async def async_step_import(self, user_input):
"""Handle import."""
return await self.async_step_user(user_input)
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for Rachio."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
data_schema = vol.Schema(
{
vol.Optional(
CONF_MANUAL_RUN_MINS,
default=self.config_entry.options.get(
CONF_MANUAL_RUN_MINS, DEFAULT_MANUAL_RUN_MINS
),
): int
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
from __future__ import print_function
import argparse
import ast
import os
import sys
from stash.system.shcommon import _STASH_EXTENSION_BIN_PATH, _STASH_EXTENSION_MAN_PATH
try:
raw_input
except NameError:
# py3
raw_input = input
_stash = globals()["_stash"]
TYPE_CMD = "command"
TYPE_PAGE = "page"
TYPE_NOTFOUND = "not found"
TYPE_LISTTOPICS = "list topics"
MAIN_BINPATH = os.path.join(os.environ["STASH_ROOT"], "bin")
MAIN_PAGEPATH = os.path.join(os.environ["STASH_ROOT"], "man")
BINPATHS = [MAIN_BINPATH, _STASH_EXTENSION_BIN_PATH]
PAGEPATHS = [MAIN_PAGEPATH, _STASH_EXTENSION_MAN_PATH]
for p in BINPATHS + PAGEPATHS:
if not os.path.exists(p):
os.mkdir(p)
def all_commands():
all_cmds = []
for bp in BINPATHS:
cmds = [
fn[:-3]
for fn in os.listdir(bp)
if fn.endswith(".py") and not fn.startswith(".") and os.path.isfile(os.path.join(bp,
fn))
]
all_cmds += cmds
all_cmds.sort()
return all_cmds
def get_type(search):
"""returns (type, path) for a given topic/command."""
if search == "topics":
return (TYPE_LISTTOPICS, None)
cmdpath = find_command(search)
if cmdpath is not None:
return (TYPE_CMD, cmdpath)
if "(" in search and ")" in search:
try:
pn = int(search[search.index("(") + 1:search.index(")")])
except:
print(_stash.text_color("Invalid Pagenumber", "red"))
sys.exit(1)
search = search[:search.index("(")]
else:
pn = 1
if "." in search:
# FIXME: fix '.' in search shoild search only matching extensions
# Example: 'man test.md' searches for 'test.md' instead of 'test'
print(_stash.text_color("Searching for pages with '.' in the name is bugged and has been disabled.", "red"))
sys.exit(1)
to_search = search
found = []
for pp in PAGEPATHS:
found += os.listdir(pp)
else:
to_search = search
found = []
for p in PAGEPATHS:
found += [(fn[:fn.index(".")] if "." in fn else fn) for fn in os.listdir(p)]
if to_search in found:
ppc = []
for pp in PAGEPATHS:
ppc += [(fn, pp) for fn in os.listdir(pp)]
ffns = [(fn, pp) if fn.startswith(to_search + ".") else None for fn, pp in ppc]
ffn = list(filter(None, ffns))
if len(ffn) == 0:
# isdir
pname = "page_" + str(pn)
for pp in PAGEPATHS:
dirpath = os.path.join(pp, to_search)
if not os.path.exists(dirpath):
continue
for fn in os.listdir(dirpath):
if fn.startswith(pname):
fp = os.path.join(dirpath, fn)
if not os.path.exists(fp):
print(_stash.text_color("Page not found!", "red"))
return (TYPE_PAGE, fp)
return (TYPE_NOTFOUND, None)
path = os.path.join(ffn[0][1], ffn[0][0])
return (TYPE_PAGE, path)
else:
return (TYPE_NOTFOUND, None)
def find_command(cmd):
for bp in BINPATHS:
if os.path.exists(bp) and cmd + ".py" in os.listdir(bp):
return os.path.join(bp, cmd + ".py")
return None
def get_docstring(filename):
try:
with open(filename) as f:
tree = ast.parse(f.read(), os.path.basename(filename))
return ast.get_docstring(tree)
except:
return "UNKNOWN"
def get_summary(filename):
docstring = get_docstring(filename)
return docstring.splitlines()[0] if docstring else ''
def show_page(path):
"""shows the page at path."""
if not os.path.exists(path):
print(_stash.text_color("Error: cannot find page!", "red"), )
sys.exit(1)
with open(path, "r") as fin:
content = fin.read()
if len(content.replace("\n", "")) == 0:
print(_stash.text_color("Error: help empty!", "red"))
sys.exit(1)
if path.endswith(".txt"):
show_text(content)
elif path.endswith(".url"):
if content.startswith("stash://"):
# local file
path = os.path.join(os.getenv("STASH_ROOT"), content.replace("stash://", ""))
show_page(path.replace("\n", ""))
return
print("Opening webviewer...")
_stash("webviewer -n '{u}'".format(u=content.replace("\n", "")))
elif path.endswith(".html"):
print("Opening quicklook...")
_stash("quicklook {p}".format(p=path))
else:
show_text(content)
def show_text(text):
print(_stash.text_color("=" * 20, "yellow"))
lines = text.split("\n")
while True:
if len(lines) < 100:
print("\n".join(lines))
return
else:
print("\n".join(lines[:100]))
lines = lines[100:]
prompt = _stash.text_color("(Press Return to continue)", "yellow")
raw_input(prompt)
print("\n")
def show_topics():
"""prints all available miscellaneous help topics."""
print(_stash.text_color("Miscellaneous Topics:", "yellow"))
for pp in PAGEPATHS:
if not os.path.isdir(pp):
continue
content = os.listdir(pp)
for pn in content:
if "." in pn:
name = pn[:pn.index(".")]
else:
name = pn
print(name)
def main(args):
ap = argparse.ArgumentParser(description=__doc__)
ap.add_argument("topic", nargs="?", help="the command/topic to get help for")
ns = ap.parse_args(args)
if not ns.topic:
cmds = all_commands()
if len(cmds) > 100:
if raw_input("List all {} commands?".format(len(cmds))).strip().lower() not in ("y", "yes"):
sys.exit(0)
for cmd in cmds:
print(_stash.text_bold('{:>11}: '.format(cmd)) + get_summary(find_command(cmd)))
print("Type 'man topics' to see miscellaneous help topics")
sys.exit(0)
else:
ft, path = get_type(ns.topic)
if ft == TYPE_NOTFOUND:
print(_stash.text_color("man: no help for '{}'".format(ns.topic), "red"))
sys.exit(1)
if ft == TYPE_LISTTOPICS:
show_topics()
sys.exit(0)
elif ft == TYPE_CMD:
try:
docstring = get_docstring(path)
except Exception as err:
print(_stash.text_color("man: {}: {!s}".format(type(err).__name__, err), "red"), file=sys.stderr)
sys.exit(1)
if docstring:
print("Docstring of command '{}':\n{}".format(ns.topic, docstring))
else:
print(_stash.text_color("man: command '{}' has no docstring".format(ns.topic), "red"))
sys.exit(0)
elif ft == TYPE_PAGE:
show_page(path)
sys.exit(0)
if __name__ == "__main__":
main(sys.argv[1:])
|
import numpy as np
from scipy.sparse import csc_matrix
from .open import read_tag, fiff_open
from .tree import dir_tree_find
from .write import (start_block, end_block, write_int, write_float,
write_string, write_float_matrix, write_int_matrix,
write_float_sparse, write_id)
from .tag import find_tag
from .constants import FIFF
from ..utils import warn, _check_fname
_proc_keys = ['parent_file_id', 'block_id', 'parent_block_id',
'date', 'experimenter', 'creator']
_proc_ids = [FIFF.FIFF_PARENT_FILE_ID,
FIFF.FIFF_BLOCK_ID,
FIFF.FIFF_PARENT_BLOCK_ID,
FIFF.FIFF_MEAS_DATE,
FIFF.FIFF_EXPERIMENTER,
FIFF.FIFF_CREATOR]
_proc_writers = [write_id, write_id, write_id,
write_int, write_string, write_string]
_proc_casters = [dict, dict, dict, np.array, str, str]
def _read_proc_history(fid, tree):
"""Read processing history from fiff file.
This function reads the SSS info, the CTC correction and the
calibaraions from the SSS processing logs inside af a raw file
(C.f. Maxfilter v2.2 manual (October 2010), page 21)::
104 = { 900 = proc. history
104 = { 901 = proc. record
103 = block ID
204 = date
212 = scientist
113 = creator program
104 = { 502 = SSS info
264 = SSS task
263 = SSS coord frame
265 = SSS origin
266 = SSS ins.order
267 = SSS outs.order
268 = SSS nr chnls
269 = SSS components
278 = SSS nfree
243 = HPI g limit 0.98
244 = HPI dist limit 0.005
105 = } 502 = SSS info
104 = { 504 = MaxST info
264 = SSS task
272 = SSST subspace correlation
279 = SSST buffer length
105 = }
104 = { 501 = CTC correction
103 = block ID
204 = date
113 = creator program
800 = CTC matrix
3417 = proj item chs
105 = } 501 = CTC correction
104 = { 503 = SSS finecalib.
270 = SSS cal chnls
271 = SSS cal coeff
105 = } 503 = SSS finecalib.
105 = } 901 = proc. record
105 = } 900 = proc. history
"""
proc_history = dir_tree_find(tree, FIFF.FIFFB_PROCESSING_HISTORY)
out = list()
if len(proc_history) > 0:
proc_history = proc_history[0]
proc_records = dir_tree_find(proc_history,
FIFF.FIFFB_PROCESSING_RECORD)
for proc_record in proc_records:
record = dict()
for i_ent in range(proc_record['nent']):
kind = proc_record['directory'][i_ent].kind
pos = proc_record['directory'][i_ent].pos
for key, id_, cast in zip(_proc_keys, _proc_ids,
_proc_casters):
if kind == id_:
tag = read_tag(fid, pos)
record[key] = cast(tag.data)
break
else:
warn('Unknown processing history item %s' % kind)
record['max_info'] = _read_maxfilter_record(fid, proc_record)
iass = dir_tree_find(proc_record, FIFF.FIFFB_IAS)
if len(iass) > 0:
# XXX should eventually populate this
ss = [dict() for _ in range(len(iass))]
record['ias'] = ss
if len(record['max_info']) > 0:
out.append(record)
return out
def _write_proc_history(fid, info):
"""Write processing history to file."""
if len(info['proc_history']) > 0:
start_block(fid, FIFF.FIFFB_PROCESSING_HISTORY)
for record in info['proc_history']:
start_block(fid, FIFF.FIFFB_PROCESSING_RECORD)
for key, id_, writer in zip(_proc_keys, _proc_ids, _proc_writers):
if key in record:
writer(fid, id_, record[key])
_write_maxfilter_record(fid, record['max_info'])
if 'ias' in record:
for _ in record['ias']:
start_block(fid, FIFF.FIFFB_IAS)
# XXX should eventually populate this
end_block(fid, FIFF.FIFFB_IAS)
end_block(fid, FIFF.FIFFB_PROCESSING_RECORD)
end_block(fid, FIFF.FIFFB_PROCESSING_HISTORY)
_sss_info_keys = ('job', 'frame', 'origin', 'in_order',
'out_order', 'nchan', 'components', 'nfree',
'hpi_g_limit', 'hpi_dist_limit')
_sss_info_ids = (FIFF.FIFF_SSS_JOB,
FIFF.FIFF_SSS_FRAME,
FIFF.FIFF_SSS_ORIGIN,
FIFF.FIFF_SSS_ORD_IN,
FIFF.FIFF_SSS_ORD_OUT,
FIFF.FIFF_SSS_NMAG,
FIFF.FIFF_SSS_COMPONENTS,
FIFF.FIFF_SSS_NFREE,
FIFF.FIFF_HPI_FIT_GOOD_LIMIT,
FIFF.FIFF_HPI_FIT_DIST_LIMIT)
_sss_info_writers = (write_int, write_int, write_float, write_int,
write_int, write_int, write_int, write_int,
write_float, write_float)
_sss_info_casters = (int, int, np.array, int,
int, int, np.array, int,
float, float)
_max_st_keys = ('job', 'subspcorr', 'buflen')
_max_st_ids = (FIFF.FIFF_SSS_JOB, FIFF.FIFF_SSS_ST_CORR,
FIFF.FIFF_SSS_ST_LENGTH)
_max_st_writers = (write_int, write_float, write_float)
_max_st_casters = (int, float, float)
_sss_ctc_keys = ('block_id', 'date', 'creator', 'decoupler')
_sss_ctc_ids = (FIFF.FIFF_BLOCK_ID,
FIFF.FIFF_MEAS_DATE,
FIFF.FIFF_CREATOR,
FIFF.FIFF_DECOUPLER_MATRIX)
_sss_ctc_writers = (write_id, write_int, write_string, write_float_sparse)
_sss_ctc_casters = (dict, np.array, str, csc_matrix)
_sss_cal_keys = ('cal_chans', 'cal_corrs')
_sss_cal_ids = (FIFF.FIFF_SSS_CAL_CHANS, FIFF.FIFF_SSS_CAL_CORRS)
_sss_cal_writers = (write_int_matrix, write_float_matrix)
_sss_cal_casters = (np.array, np.array)
def _read_ctc(fname):
"""Read cross-talk correction matrix."""
fname = _check_fname(fname, overwrite='read', must_exist=True)
f, tree, _ = fiff_open(fname)
with f as fid:
sss_ctc = _read_maxfilter_record(fid, tree)['sss_ctc']
bad_str = 'Invalid cross-talk FIF: %s' % fname
if len(sss_ctc) == 0:
raise ValueError(bad_str)
node = dir_tree_find(tree, FIFF.FIFFB_DATA_CORRECTION)[0]
comment = find_tag(fid, node, FIFF.FIFF_COMMENT).data
if comment != 'cross-talk compensation matrix':
raise ValueError(bad_str)
sss_ctc['creator'] = find_tag(fid, node, FIFF.FIFF_CREATOR).data
sss_ctc['date'] = find_tag(fid, node, FIFF.FIFF_MEAS_DATE).data
return sss_ctc
def _read_maxfilter_record(fid, tree):
"""Read maxfilter processing record from file."""
sss_info_block = dir_tree_find(tree, FIFF.FIFFB_SSS_INFO) # 502
sss_info = dict()
if len(sss_info_block) > 0:
sss_info_block = sss_info_block[0]
for i_ent in range(sss_info_block['nent']):
kind = sss_info_block['directory'][i_ent].kind
pos = sss_info_block['directory'][i_ent].pos
for key, id_, cast in zip(_sss_info_keys, _sss_info_ids,
_sss_info_casters):
if kind == id_:
tag = read_tag(fid, pos)
sss_info[key] = cast(tag.data)
break
max_st_block = dir_tree_find(tree, FIFF.FIFFB_SSS_ST_INFO) # 504
max_st = dict()
if len(max_st_block) > 0:
max_st_block = max_st_block[0]
for i_ent in range(max_st_block['nent']):
kind = max_st_block['directory'][i_ent].kind
pos = max_st_block['directory'][i_ent].pos
for key, id_, cast in zip(_max_st_keys, _max_st_ids,
_max_st_casters):
if kind == id_:
tag = read_tag(fid, pos)
max_st[key] = cast(tag.data)
break
sss_ctc_block = dir_tree_find(tree, FIFF.FIFFB_CHANNEL_DECOUPLER) # 501
sss_ctc = dict()
if len(sss_ctc_block) > 0:
sss_ctc_block = sss_ctc_block[0]
for i_ent in range(sss_ctc_block['nent']):
kind = sss_ctc_block['directory'][i_ent].kind
pos = sss_ctc_block['directory'][i_ent].pos
for key, id_, cast in zip(_sss_ctc_keys, _sss_ctc_ids,
_sss_ctc_casters):
if kind == id_:
tag = read_tag(fid, pos)
sss_ctc[key] = cast(tag.data)
break
else:
if kind == FIFF.FIFF_PROJ_ITEM_CH_NAME_LIST:
tag = read_tag(fid, pos)
chs = tag.data.split(':')
# XXX for some reason this list can have a bunch of junk
# in the last entry, e.g.:
# [..., u'MEG2642', u'MEG2643', u'MEG2641\x00 ... \x00']
chs[-1] = chs[-1].split('\x00')[0]
sss_ctc['proj_items_chs'] = chs
sss_cal_block = dir_tree_find(tree, FIFF.FIFFB_SSS_CAL) # 503
sss_cal = dict()
if len(sss_cal_block) > 0:
sss_cal_block = sss_cal_block[0]
for i_ent in range(sss_cal_block['nent']):
kind = sss_cal_block['directory'][i_ent].kind
pos = sss_cal_block['directory'][i_ent].pos
for key, id_, cast in zip(_sss_cal_keys, _sss_cal_ids,
_sss_cal_casters):
if kind == id_:
tag = read_tag(fid, pos)
sss_cal[key] = cast(tag.data)
break
max_info = dict(sss_info=sss_info, sss_ctc=sss_ctc,
sss_cal=sss_cal, max_st=max_st)
return max_info
def _write_maxfilter_record(fid, record):
"""Write maxfilter processing record to file."""
sss_info = record['sss_info']
if len(sss_info) > 0:
start_block(fid, FIFF.FIFFB_SSS_INFO)
for key, id_, writer in zip(_sss_info_keys, _sss_info_ids,
_sss_info_writers):
if key in sss_info:
writer(fid, id_, sss_info[key])
end_block(fid, FIFF.FIFFB_SSS_INFO)
max_st = record['max_st']
if len(max_st) > 0:
start_block(fid, FIFF.FIFFB_SSS_ST_INFO)
for key, id_, writer in zip(_max_st_keys, _max_st_ids,
_max_st_writers):
if key in max_st:
writer(fid, id_, max_st[key])
end_block(fid, FIFF.FIFFB_SSS_ST_INFO)
sss_ctc = record['sss_ctc']
if len(sss_ctc) > 0: # dict has entries
start_block(fid, FIFF.FIFFB_CHANNEL_DECOUPLER)
for key, id_, writer in zip(_sss_ctc_keys, _sss_ctc_ids,
_sss_ctc_writers):
if key in sss_ctc:
writer(fid, id_, sss_ctc[key])
if 'proj_items_chs' in sss_ctc:
write_string(fid, FIFF.FIFF_PROJ_ITEM_CH_NAME_LIST,
':'.join(sss_ctc['proj_items_chs']))
end_block(fid, FIFF.FIFFB_CHANNEL_DECOUPLER)
sss_cal = record['sss_cal']
if len(sss_cal) > 0:
start_block(fid, FIFF.FIFFB_SSS_CAL)
for key, id_, writer in zip(_sss_cal_keys, _sss_cal_ids,
_sss_cal_writers):
if key in sss_cal:
writer(fid, id_, sss_cal[key])
end_block(fid, FIFF.FIFFB_SSS_CAL)
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import CONN_CLASS_ASSUMED
from homeassistant.const import CONF_DEVICE
from . import dongle
from .const import DOMAIN # pylint:disable=unused-import
from .const import ERROR_INVALID_DONGLE_PATH, LOGGER
class EnOceanFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle the enOcean config flows."""
VERSION = 1
MANUAL_PATH_VALUE = "Custom path"
CONNECTION_CLASS = CONN_CLASS_ASSUMED
def __init__(self):
"""Initialize the EnOcean config flow."""
self.dongle_path = None
self.discovery_info = None
async def async_step_import(self, data=None):
"""Import a yaml configuration."""
if not await self.validate_enocean_conf(data):
LOGGER.warning(
"Cannot import yaml configuration: %s is not a valid dongle path",
data[CONF_DEVICE],
)
return self.async_abort(reason="invalid_dongle_path")
return self.create_enocean_entry(data)
async def async_step_user(self, user_input=None):
"""Handle an EnOcean config flow start."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return await self.async_step_detect()
async def async_step_detect(self, user_input=None):
"""Propose a list of detected dongles."""
errors = {}
if user_input is not None:
if user_input[CONF_DEVICE] == self.MANUAL_PATH_VALUE:
return await self.async_step_manual(None)
if await self.validate_enocean_conf(user_input):
return self.create_enocean_entry(user_input)
errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH}
bridges = await self.hass.async_add_executor_job(dongle.detect)
if len(bridges) == 0:
return await self.async_step_manual(user_input)
bridges.append(self.MANUAL_PATH_VALUE)
return self.async_show_form(
step_id="detect",
data_schema=vol.Schema({vol.Required(CONF_DEVICE): vol.In(bridges)}),
errors=errors,
)
async def async_step_manual(self, user_input=None):
"""Request manual USB dongle path."""
default_value = None
errors = {}
if user_input is not None:
if await self.validate_enocean_conf(user_input):
return self.create_enocean_entry(user_input)
default_value = user_input[CONF_DEVICE]
errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH}
return self.async_show_form(
step_id="manual",
data_schema=vol.Schema(
{vol.Required(CONF_DEVICE, default=default_value): str}
),
errors=errors,
)
async def validate_enocean_conf(self, user_input) -> bool:
"""Return True if the user_input contains a valid dongle path."""
dongle_path = user_input[CONF_DEVICE]
path_is_valid = await self.hass.async_add_executor_job(
dongle.validate_path, dongle_path
)
return path_is_valid
def create_enocean_entry(self, user_input):
"""Create an entry for the provided configuration."""
return self.async_create_entry(title="EnOcean", data=user_input)
|
from datetime import timedelta
import logging
from oru import Meter, MeterError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ENERGY_KILO_WATT_HOUR
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_METER_NUMBER = "meter_number"
SCAN_INTERVAL = timedelta(minutes=15)
SENSOR_NAME = "ORU Current Energy Usage"
SENSOR_ICON = "mdi:counter"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_METER_NUMBER): cv.string})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
meter_number = config[CONF_METER_NUMBER]
try:
meter = Meter(meter_number)
except MeterError:
_LOGGER.error("Unable to create Oru meter")
return
add_entities([CurrentEnergyUsageSensor(meter)], True)
_LOGGER.debug("Oru meter_number = %s", meter_number)
class CurrentEnergyUsageSensor(Entity):
"""Representation of the sensor."""
def __init__(self, meter):
"""Initialize the sensor."""
self._state = None
self._available = None
self.meter = meter
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return self.meter.meter_id
@property
def name(self):
"""Return the name of the sensor."""
return SENSOR_NAME
@property
def icon(self):
"""Return the icon of the sensor."""
return SENSOR_ICON
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ENERGY_KILO_WATT_HOUR
def update(self):
"""Fetch new state data for the sensor."""
try:
last_read = self.meter.last_read()
self._state = last_read
self._available = True
_LOGGER.debug(
"%s = %s %s", self.name, self._state, self.unit_of_measurement
)
except MeterError as err:
self._available = False
_LOGGER.error("Unexpected oru meter error: %s", err)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.architectures import arch_ops as ops
from compare_gan.architectures import resnet_ops
from six.moves import range
import tensorflow as tf
class Generator(resnet_ops.ResNetGenerator):
"""ResNet30 generator, 30 blocks, generates images of resolution 128x128.
Trying to match the architecture defined in [1]. Difference is that there
the final resolution is 64x64, while here we have 128x128.
"""
def apply(self, z, y, is_training):
"""Build the generator network for the given inputs.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, are we in train or eval model.
Returns:
A tensor of size [batch_size] + self._image_shape with values in [0, 1].
"""
z_shape = z.get_shape().as_list()
if len(z_shape) != 2:
raise ValueError("Expected shape [batch_size, z_dim], got %s." % z_shape)
ch = 64
colors = self._image_shape[2]
# Map noise to the actual seed.
output = ops.linear(z, 4 * 4 * 8 * ch, scope="fc_noise")
# Reshape the seed to be a rank-4 Tensor.
output = tf.reshape(output, [-1, 4, 4, 8 * ch], name="fc_reshaped")
in_channels = 8 * ch
out_channels = 4 * ch
for superblock in range(6):
for i in range(5):
block = self._resnet_block(
name="B_{}_{}".format(superblock, i),
in_channels=in_channels,
out_channels=in_channels,
scale="none")
output = block(output, z=z, y=y, is_training=is_training)
# We want to upscale 5 times.
if superblock < 5:
block = self._resnet_block(
name="B_{}_up".format(superblock),
in_channels=in_channels,
out_channels=out_channels,
scale="up")
output = block(output, z=z, y=y, is_training=is_training)
in_channels /= 2
out_channels /= 2
output = ops.conv2d(
output, output_dim=colors, k_h=3, k_w=3, d_h=1, d_w=1,
name="final_conv")
output = tf.nn.sigmoid(output)
return output
class Discriminator(resnet_ops.ResNetDiscriminator):
"""ResNet discriminator, 30 blocks, 128x128x3 and 128x128x1 resolution."""
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
resnet_ops.validate_image_inputs(x)
colors = x.get_shape().as_list()[-1]
assert colors in [1, 3]
ch = 64
output = ops.conv2d(
x, output_dim=ch // 4, k_h=3, k_w=3, d_h=1, d_w=1,
name="color_conv")
in_channels = ch // 4
out_channels = ch // 2
for superblock in range(6):
for i in range(5):
block = self._resnet_block(
name="B_{}_{}".format(superblock, i),
in_channels=in_channels,
out_channels=in_channels,
scale="none")
output = block(output, z=None, y=y, is_training=is_training)
# We want to downscale 5 times.
if superblock < 5:
block = self._resnet_block(
name="B_{}_up".format(superblock),
in_channels=in_channels,
out_channels=out_channels,
scale="down")
output = block(output, z=None, y=y, is_training=is_training)
in_channels *= 2
out_channels *= 2
# Final part
output = tf.reshape(output, [-1, 4 * 4 * 8 * ch])
out_logit = ops.linear(output, 1, scope="disc_final_fc",
use_sn=self._spectral_norm)
out = tf.nn.sigmoid(out_logit)
return out, out_logit, output
|
import argparse
import webbrowser
import ui
from objc_util import on_main_thread
@on_main_thread
def open_webbrowser(url, modal=False):
"""opens the url in the webbrowser"""
webbrowser.open(url, modal)
def open_webview(url, modal=False):
"""opens the url in a view."""
v = ui.WebView()
v.present("fullscreen")
v.load_url(url)
if modal:
v.wait_modal()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("url", help="url to open", action="store")
parser.add_argument("-m", "--modal", help="wait until the user closed the webbrowser", action="store_true", dest="modal")
parser.add_argument(
"-n",
"--insecure",
help="prefix the url with http:// instead of https:// if no prefix is given",
action="store_const",
const="http://",
default="https://",
dest="prefix"
)
parser.add_argument("-f", "--foreground", help="Open the url in the foreground", action="store_true", dest="foreground")
ns = parser.parse_args()
url = ns.url
if "://" not in url:
url = ns.prefix + url
if not ns.foreground:
open_webbrowser(url, ns.modal)
else:
open_webview(url, ns.modal)
|
import copy
from collections import abc as collections_abc
import functools
import inspect
import os
import types
import six
from .cassette import Cassette
from .serializers import yamlserializer, jsonserializer
from .persisters.filesystem import FilesystemPersister
from .util import compose, auto_decorate
from .record_mode import RecordMode
from . import matchers
from . import filters
class VCR:
@staticmethod
def is_test_method(method_name, function):
return method_name.startswith("test") and isinstance(function, types.FunctionType)
@staticmethod
def ensure_suffix(suffix):
def ensure(path):
if not path.endswith(suffix):
return path + suffix
return path
return ensure
def __init__(
self,
path_transformer=None,
before_record_request=None,
custom_patches=(),
filter_query_parameters=(),
ignore_hosts=(),
record_mode=RecordMode.ONCE,
ignore_localhost=False,
filter_headers=(),
before_record_response=None,
filter_post_data_parameters=(),
match_on=("method", "scheme", "host", "port", "path", "query"),
before_record=None,
inject_cassette=False,
serializer="yaml",
cassette_library_dir=None,
func_path_generator=None,
decode_compressed_response=False,
):
self.serializer = serializer
self.match_on = match_on
self.cassette_library_dir = cassette_library_dir
self.serializers = {"yaml": yamlserializer, "json": jsonserializer}
self.matchers = {
"method": matchers.method,
"uri": matchers.uri,
"url": matchers.uri, # matcher for backwards compatibility
"scheme": matchers.scheme,
"host": matchers.host,
"port": matchers.port,
"path": matchers.path,
"query": matchers.query,
"headers": matchers.headers,
"raw_body": matchers.raw_body,
"body": matchers.body,
}
self.persister = FilesystemPersister
self.record_mode = record_mode
self.filter_headers = filter_headers
self.filter_query_parameters = filter_query_parameters
self.filter_post_data_parameters = filter_post_data_parameters
self.before_record_request = before_record_request or before_record
self.before_record_response = before_record_response
self.ignore_hosts = ignore_hosts
self.ignore_localhost = ignore_localhost
self.inject_cassette = inject_cassette
self.path_transformer = path_transformer
self.func_path_generator = func_path_generator
self.decode_compressed_response = decode_compressed_response
self._custom_patches = tuple(custom_patches)
def _get_serializer(self, serializer_name):
try:
serializer = self.serializers[serializer_name]
except KeyError:
raise KeyError("Serializer {} doesn't exist or isn't registered".format(serializer_name))
return serializer
def _get_matchers(self, matcher_names):
matchers = []
try:
for m in matcher_names:
matchers.append(self.matchers[m])
except KeyError:
raise KeyError("Matcher {} doesn't exist or isn't registered".format(m))
return matchers
def use_cassette(self, path=None, **kwargs):
if path is not None and not isinstance(path, str):
function = path
# Assume this is an attempt to decorate a function
return self._use_cassette(**kwargs)(function)
return self._use_cassette(path=path, **kwargs)
def _use_cassette(self, with_current_defaults=False, **kwargs):
if with_current_defaults:
config = self.get_merged_config(**kwargs)
return Cassette.use(**config)
# This is made a function that evaluates every time a cassette
# is made so that changes that are made to this VCR instance
# that occur AFTER the `use_cassette` decorator is applied
# still affect subsequent calls to the decorated function.
args_getter = functools.partial(self.get_merged_config, **kwargs)
return Cassette.use_arg_getter(args_getter)
def get_merged_config(self, **kwargs):
serializer_name = kwargs.get("serializer", self.serializer)
matcher_names = kwargs.get("match_on", self.match_on)
path_transformer = kwargs.get("path_transformer", self.path_transformer)
func_path_generator = kwargs.get("func_path_generator", self.func_path_generator)
cassette_library_dir = kwargs.get("cassette_library_dir", self.cassette_library_dir)
additional_matchers = kwargs.get("additional_matchers", ())
if cassette_library_dir:
def add_cassette_library_dir(path):
if not path.startswith(cassette_library_dir):
return os.path.join(cassette_library_dir, path)
return path
path_transformer = compose(add_cassette_library_dir, path_transformer)
elif not func_path_generator:
# If we don't have a library dir, use the functions
# location to build a full path for cassettes.
func_path_generator = self._build_path_from_func_using_module
merged_config = {
"serializer": self._get_serializer(serializer_name),
"persister": self.persister,
"match_on": self._get_matchers(tuple(matcher_names) + tuple(additional_matchers)),
"record_mode": kwargs.get("record_mode", self.record_mode),
"before_record_request": self._build_before_record_request(kwargs),
"before_record_response": self._build_before_record_response(kwargs),
"custom_patches": self._custom_patches + kwargs.get("custom_patches", ()),
"inject": kwargs.get("inject_cassette", self.inject_cassette),
"path_transformer": path_transformer,
"func_path_generator": func_path_generator,
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
}
path = kwargs.get("path")
if path:
merged_config["path"] = path
return merged_config
def _build_before_record_response(self, options):
before_record_response = options.get("before_record_response", self.before_record_response)
decode_compressed_response = options.get(
"decode_compressed_response", self.decode_compressed_response
)
filter_functions = []
if decode_compressed_response:
filter_functions.append(filters.decode_response)
if before_record_response:
if not isinstance(before_record_response, collections_abc.Iterable):
before_record_response = (before_record_response,)
filter_functions.extend(before_record_response)
def before_record_response(response):
for function in filter_functions:
if response is None:
break
response = function(response)
return response
return before_record_response
def _build_before_record_request(self, options):
filter_functions = []
filter_headers = options.get("filter_headers", self.filter_headers)
filter_query_parameters = options.get("filter_query_parameters", self.filter_query_parameters)
filter_post_data_parameters = options.get(
"filter_post_data_parameters", self.filter_post_data_parameters
)
before_record_request = options.get(
"before_record_request", options.get("before_record", self.before_record_request)
)
ignore_hosts = options.get("ignore_hosts", self.ignore_hosts)
ignore_localhost = options.get("ignore_localhost", self.ignore_localhost)
if filter_headers:
replacements = [h if isinstance(h, tuple) else (h, None) for h in filter_headers]
filter_functions.append(functools.partial(filters.replace_headers, replacements=replacements))
if filter_query_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_query_parameters]
filter_functions.append(
functools.partial(filters.replace_query_parameters, replacements=replacements)
)
if filter_post_data_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_post_data_parameters]
filter_functions.append(
functools.partial(filters.replace_post_data_parameters, replacements=replacements)
)
hosts_to_ignore = set(ignore_hosts)
if ignore_localhost:
hosts_to_ignore.update(("localhost", "0.0.0.0", "127.0.0.1"))
if hosts_to_ignore:
filter_functions.append(self._build_ignore_hosts(hosts_to_ignore))
if before_record_request:
if not isinstance(before_record_request, collections_abc.Iterable):
before_record_request = (before_record_request,)
filter_functions.extend(before_record_request)
def before_record_request(request):
request = copy.copy(request)
for function in filter_functions:
if request is None:
break
request = function(request)
return request
return before_record_request
@staticmethod
def _build_ignore_hosts(hosts_to_ignore):
def filter_ignored_hosts(request):
if hasattr(request, "host") and request.host in hosts_to_ignore:
return
return request
return filter_ignored_hosts
@staticmethod
def _build_path_from_func_using_module(function):
return os.path.join(os.path.dirname(inspect.getfile(function)), function.__name__)
def register_serializer(self, name, serializer):
self.serializers[name] = serializer
def register_matcher(self, name, matcher):
self.matchers[name] = matcher
def register_persister(self, persister):
# Singleton, no name required
self.persister = persister
def test_case(self, predicate=None):
predicate = predicate or self.is_test_method
# TODO: Remove this reference to `six` in favor of the Python3 equivalent
return six.with_metaclass(auto_decorate(self.use_cassette, predicate))
|
import zigpy.zcl.clusters.closures as closures
from homeassistant.core import callback
from .. import registries
from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED
from .base import ClientChannel, ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.DoorLock.cluster_id)
class DoorLockChannel(ZigbeeChannel):
"""Door lock channel."""
_value_attribute = 0
REPORT_CONFIG = ({"attr": "lock_state", "config": REPORT_CONFIG_IMMEDIATE},)
async def async_update(self):
"""Retrieve latest state."""
result = await self.get_attribute_value("lock_state", from_cache=True)
if result is not None:
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", 0, "lock_state", result
)
@callback
def attribute_updated(self, attrid, value):
"""Handle attribute update from lock cluster."""
attr_name = self.cluster.attributes.get(attrid, [attrid])[0]
self.debug(
"Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value
)
if attrid == self._value_attribute:
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value
)
async def async_initialize(self, from_cache):
"""Initialize channel."""
await self.get_attribute_value(self._value_attribute, from_cache=from_cache)
await super().async_initialize(from_cache)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.Shade.cluster_id)
class Shade(ZigbeeChannel):
"""Shade channel."""
@registries.CLIENT_CHANNELS_REGISTRY.register(closures.WindowCovering.cluster_id)
class WindowCoveringClient(ClientChannel):
"""Window client channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(closures.WindowCovering.cluster_id)
class WindowCovering(ZigbeeChannel):
"""Window channel."""
_value_attribute = 8
REPORT_CONFIG = (
{"attr": "current_position_lift_percentage", "config": REPORT_CONFIG_IMMEDIATE},
)
async def async_update(self):
"""Retrieve latest state."""
result = await self.get_attribute_value(
"current_position_lift_percentage", from_cache=False
)
self.debug("read current position: %s", result)
if result is not None:
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
8,
"current_position_lift_percentage",
result,
)
@callback
def attribute_updated(self, attrid, value):
"""Handle attribute update from window_covering cluster."""
attr_name = self.cluster.attributes.get(attrid, [attrid])[0]
self.debug(
"Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value
)
if attrid == self._value_attribute:
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value
)
async def async_initialize(self, from_cache):
"""Initialize channel."""
await self.get_attribute_value(self._value_attribute, from_cache=from_cache)
await super().async_initialize(from_cache)
|
from ipaddress import ip_network
import logging
import pytest
import homeassistant.components.http as http
from homeassistant.setup import async_setup_component
from homeassistant.util.ssl import server_context_intermediate, server_context_modern
from tests.async_mock import Mock, patch
@pytest.fixture
def mock_stack():
"""Mock extract stack."""
with patch(
"homeassistant.components.http.extract_stack",
return_value=[
Mock(
filename="/home/paulus/core/homeassistant/core.py",
lineno="23",
line="do_something()",
),
Mock(
filename="/home/paulus/core/homeassistant/components/hue/light.py",
lineno="23",
line="self.light.is_on",
),
Mock(
filename="/home/paulus/core/homeassistant/components/http/__init__.py",
lineno="157",
line="base_url",
),
],
):
yield
class TestView(http.HomeAssistantView):
"""Test the HTTP views."""
name = "test"
url = "/hello"
async def get(self, request):
"""Return a get request."""
return "hello"
async def test_registering_view_while_running(
hass, aiohttp_client, aiohttp_unused_port
):
"""Test that we can register a view while the server is running."""
await async_setup_component(
hass, http.DOMAIN, {http.DOMAIN: {http.CONF_SERVER_PORT: aiohttp_unused_port()}}
)
await hass.async_start()
# This raises a RuntimeError if app is frozen
hass.http.register_view(TestView)
def test_api_base_url_with_domain(mock_stack):
"""Test setting API URL with domain."""
api_config = http.ApiConfig("127.0.0.1", "example.com")
assert api_config.base_url == "http://example.com:8123"
def test_api_base_url_with_ip(mock_stack):
"""Test setting API URL with IP."""
api_config = http.ApiConfig("127.0.0.1", "1.1.1.1")
assert api_config.base_url == "http://1.1.1.1:8123"
def test_api_base_url_with_ip_and_port(mock_stack):
"""Test setting API URL with IP and port."""
api_config = http.ApiConfig("127.0.0.1", "1.1.1.1", 8124)
assert api_config.base_url == "http://1.1.1.1:8124"
def test_api_base_url_with_protocol(mock_stack):
"""Test setting API URL with protocol."""
api_config = http.ApiConfig("127.0.0.1", "https://example.com")
assert api_config.base_url == "https://example.com:8123"
def test_api_base_url_with_protocol_and_port(mock_stack):
"""Test setting API URL with protocol and port."""
api_config = http.ApiConfig("127.0.0.1", "https://example.com", 433)
assert api_config.base_url == "https://example.com:433"
def test_api_base_url_with_ssl_enable(mock_stack):
"""Test setting API URL with use_ssl enabled."""
api_config = http.ApiConfig("127.0.0.1", "example.com", use_ssl=True)
assert api_config.base_url == "https://example.com:8123"
def test_api_base_url_with_ssl_enable_and_port(mock_stack):
"""Test setting API URL with use_ssl enabled and port."""
api_config = http.ApiConfig("127.0.0.1", "1.1.1.1", use_ssl=True, port=8888)
assert api_config.base_url == "https://1.1.1.1:8888"
def test_api_base_url_with_protocol_and_ssl_enable(mock_stack):
"""Test setting API URL with specific protocol and use_ssl enabled."""
api_config = http.ApiConfig("127.0.0.1", "http://example.com", use_ssl=True)
assert api_config.base_url == "http://example.com:8123"
def test_api_base_url_removes_trailing_slash(mock_stack):
"""Test a trialing slash is removed when setting the API URL."""
api_config = http.ApiConfig("127.0.0.1", "http://example.com/")
assert api_config.base_url == "http://example.com:8123"
def test_api_local_ip(mock_stack):
"""Test a trialing slash is removed when setting the API URL."""
api_config = http.ApiConfig("127.0.0.1", "http://example.com/")
assert api_config.local_ip == "127.0.0.1"
async def test_api_no_base_url(hass, mock_stack):
"""Test setting api url."""
result = await async_setup_component(hass, "http", {"http": {}})
assert result
assert hass.config.api.base_url == "http://127.0.0.1:8123"
async def test_not_log_password(hass, aiohttp_client, caplog, legacy_auth):
"""Test access with password doesn't get logged."""
assert await async_setup_component(hass, "api", {"http": {}})
client = await aiohttp_client(hass.http.app)
logging.getLogger("aiohttp.access").setLevel(logging.INFO)
resp = await client.get("/api/", params={"api_password": "test-password"})
assert resp.status == 401
logs = caplog.text
# Ensure we don't log API passwords
assert "/api/" in logs
assert "some-pass" not in logs
async def test_proxy_config(hass):
"""Test use_x_forwarded_for must config together with trusted_proxies."""
assert (
await async_setup_component(
hass,
"http",
{
"http": {
http.CONF_USE_X_FORWARDED_FOR: True,
http.CONF_TRUSTED_PROXIES: ["127.0.0.1"],
}
},
)
is True
)
async def test_proxy_config_only_use_xff(hass):
"""Test use_x_forwarded_for must config together with trusted_proxies."""
assert (
await async_setup_component(
hass, "http", {"http": {http.CONF_USE_X_FORWARDED_FOR: True}}
)
is not True
)
async def test_proxy_config_only_trust_proxies(hass):
"""Test use_x_forwarded_for must config together with trusted_proxies."""
assert (
await async_setup_component(
hass, "http", {"http": {http.CONF_TRUSTED_PROXIES: ["127.0.0.1"]}}
)
is not True
)
async def test_ssl_profile_defaults_modern(hass):
"""Test default ssl profile."""
assert await async_setup_component(hass, "http", {}) is True
hass.http.ssl_certificate = "bla"
with patch("ssl.SSLContext.load_cert_chain"), patch(
"homeassistant.util.ssl.server_context_modern",
side_effect=server_context_modern,
) as mock_context:
await hass.async_start()
await hass.async_block_till_done()
assert len(mock_context.mock_calls) == 1
async def test_ssl_profile_change_intermediate(hass):
"""Test setting ssl profile to intermediate."""
assert (
await async_setup_component(
hass, "http", {"http": {"ssl_profile": "intermediate"}}
)
is True
)
hass.http.ssl_certificate = "bla"
with patch("ssl.SSLContext.load_cert_chain"), patch(
"homeassistant.util.ssl.server_context_intermediate",
side_effect=server_context_intermediate,
) as mock_context:
await hass.async_start()
await hass.async_block_till_done()
assert len(mock_context.mock_calls) == 1
async def test_ssl_profile_change_modern(hass):
"""Test setting ssl profile to modern."""
assert (
await async_setup_component(hass, "http", {"http": {"ssl_profile": "modern"}})
is True
)
hass.http.ssl_certificate = "bla"
with patch("ssl.SSLContext.load_cert_chain"), patch(
"homeassistant.util.ssl.server_context_modern",
side_effect=server_context_modern,
) as mock_context:
await hass.async_start()
await hass.async_block_till_done()
assert len(mock_context.mock_calls) == 1
async def test_cors_defaults(hass):
"""Test the CORS default settings."""
with patch("homeassistant.components.http.setup_cors") as mock_setup:
assert await async_setup_component(hass, "http", {})
assert len(mock_setup.mock_calls) == 1
assert mock_setup.mock_calls[0][1][1] == ["https://cast.home-assistant.io"]
async def test_storing_config(hass, aiohttp_client, aiohttp_unused_port):
"""Test that we store last working config."""
config = {
http.CONF_SERVER_PORT: aiohttp_unused_port(),
"use_x_forwarded_for": True,
"trusted_proxies": ["192.168.1.100"],
}
assert await async_setup_component(hass, http.DOMAIN, {http.DOMAIN: config})
await hass.async_start()
restored = await hass.components.http.async_get_last_config()
restored["trusted_proxies"][0] = ip_network(restored["trusted_proxies"][0])
assert restored == http.HTTP_SCHEMA(config)
async def test_use_of_base_url(hass):
"""Test detection base_url usage when called without integration context."""
await async_setup_component(hass, "http", {"http": {}})
with patch(
"homeassistant.components.http.extract_stack",
return_value=[
Mock(
filename="/home/frenck/homeassistant/core.py",
lineno="21",
line="do_something()",
),
Mock(
filename="/home/frenck/homeassistant/core.py",
lineno="42",
line="url = hass.config.api.base_url",
),
Mock(
filename="/home/frenck/example/client.py",
lineno="21",
line="something()",
),
],
), pytest.raises(RuntimeError):
hass.config.api.base_url
async def test_use_of_base_url_integration(hass, caplog):
"""Test detection base_url usage when called with integration context."""
await async_setup_component(hass, "http", {"http": {}})
with patch(
"homeassistant.components.http.extract_stack",
return_value=[
Mock(
filename="/home/frenck/homeassistant/core.py",
lineno="21",
line="do_something()",
),
Mock(
filename="/home/frenck/homeassistant/components/example/__init__.py",
lineno="42",
line="url = hass.config.api.base_url",
),
Mock(
filename="/home/frenck/example/client.py",
lineno="21",
line="something()",
),
],
):
assert hass.config.api.base_url == "http://127.0.0.1:8123"
assert (
"Detected use of deprecated `base_url` property, use `homeassistant.helpers.network.get_url` method instead. Please report issue for example using this method at homeassistant/components/example/__init__.py, line 42: url = hass.config.api.base_url"
in caplog.text
)
async def test_use_of_base_url_integration_webhook(hass, caplog):
"""Test detection base_url usage when called with integration context."""
await async_setup_component(hass, "http", {"http": {}})
with patch(
"homeassistant.components.http.extract_stack",
return_value=[
Mock(
filename="/home/frenck/homeassistant/core.py",
lineno="21",
line="do_something()",
),
Mock(
filename="/home/frenck/homeassistant/components/example/__init__.py",
lineno="42",
line="url = hass.config.api.base_url",
),
Mock(
filename="/home/frenck/homeassistant/components/webhook/__init__.py",
lineno="42",
line="return get_url(hass)",
),
Mock(
filename="/home/frenck/example/client.py",
lineno="21",
line="something()",
),
],
):
assert hass.config.api.base_url == "http://127.0.0.1:8123"
assert (
"Detected use of deprecated `base_url` property, use `homeassistant.helpers.network.get_url` method instead. Please report issue for example using this method at homeassistant/components/example/__init__.py, line 42: url = hass.config.api.base_url"
in caplog.text
)
async def test_use_of_base_url_custom_component(hass, caplog):
"""Test detection base_url usage when called with custom component context."""
await async_setup_component(hass, "http", {"http": {}})
with patch(
"homeassistant.components.http.extract_stack",
return_value=[
Mock(
filename="/home/frenck/homeassistant/core.py",
lineno="21",
line="do_something()",
),
Mock(
filename="/home/frenck/.homeassistant/custom_components/example/__init__.py",
lineno="42",
line="url = hass.config.api.base_url",
),
Mock(
filename="/home/frenck/example/client.py",
lineno="21",
line="something()",
),
],
):
assert hass.config.api.base_url == "http://127.0.0.1:8123"
assert (
"Detected use of deprecated `base_url` property, use `homeassistant.helpers.network.get_url` method instead. Please report issue to the custom component author for example using this method at custom_components/example/__init__.py, line 42: url = hass.config.api.base_url"
in caplog.text
)
|
import logging
from russound import russound
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_ZONES = "zones"
CONF_SOURCES = "sources"
SUPPORT_RUSSOUND = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
ZONE_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
SOURCE_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_ZONES): vol.Schema({cv.positive_int: ZONE_SCHEMA}),
vol.Required(CONF_SOURCES): vol.All(cv.ensure_list, [SOURCE_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Russound RNET platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
if host is None or port is None:
_LOGGER.error("Invalid config. Expected %s and %s", CONF_HOST, CONF_PORT)
return False
russ = russound.Russound(host, port)
russ.connect()
sources = []
for source in config[CONF_SOURCES]:
sources.append(source["name"])
if russ.is_connected():
for zone_id, extra in config[CONF_ZONES].items():
add_entities(
[RussoundRNETDevice(hass, russ, sources, zone_id, extra)], True
)
else:
_LOGGER.error("Not connected to %s:%s", host, port)
class RussoundRNETDevice(MediaPlayerEntity):
"""Representation of a Russound RNET device."""
def __init__(self, hass, russ, sources, zone_id, extra):
"""Initialise the Russound RNET device."""
self._name = extra["name"]
self._russ = russ
self._sources = sources
self._zone_id = zone_id
self._state = None
self._volume = None
self._source = None
def update(self):
"""Retrieve latest state."""
# Updated this function to make a single call to get_zone_info, so that
# with a single call we can get On/Off, Volume and Source, reducing the
# amount of traffic and speeding up the update process.
ret = self._russ.get_zone_info("1", self._zone_id, 4)
_LOGGER.debug("ret= %s", ret)
if ret is not None:
_LOGGER.debug("Updating status for zone %s", self._zone_id)
if ret[0] == 0:
self._state = STATE_OFF
else:
self._state = STATE_ON
self._volume = ret[2] * 2 / 100.0
# Returns 0 based index for source.
index = ret[1]
# Possibility exists that user has defined list of all sources.
# If a source is set externally that is beyond the defined list then
# an exception will be thrown.
# In this case return and unknown source (None)
try:
self._source = self._sources[index]
except IndexError:
self._source = None
else:
_LOGGER.error("Could not update status for zone %s", self._zone_id)
@property
def name(self):
"""Return the name of the zone."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_RUSSOUND
@property
def source(self):
"""Get the currently selected source."""
return self._source
@property
def volume_level(self):
"""Volume level of the media player (0..1).
Value is returned based on a range (0..100).
Therefore float divide by 100 to get to the required range.
"""
return self._volume
def set_volume_level(self, volume):
"""Set volume level. Volume has a range (0..1).
Translate this to a range of (0..100) as expected
by _russ.set_volume()
"""
self._russ.set_volume("1", self._zone_id, volume * 100)
def turn_on(self):
"""Turn the media player on."""
self._russ.set_power("1", self._zone_id, "1")
def turn_off(self):
"""Turn off media player."""
self._russ.set_power("1", self._zone_id, "0")
def mute_volume(self, mute):
"""Send mute command."""
self._russ.toggle_mute("1", self._zone_id)
def select_source(self, source):
"""Set the input source."""
if source in self._sources:
index = self._sources.index(source)
# 0 based value for source
self._russ.set_source("1", self._zone_id, index)
@property
def source_list(self):
"""Return a list of available input sources."""
return self._sources
|
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONF_NAME,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
PRESSURE_HPA,
TEMP_CELSIUS,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_API_HUMIDITY,
ATTR_API_PM1,
ATTR_API_PRESSURE,
ATTR_API_TEMPERATURE,
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
)
ATTRIBUTION = "Data provided by Airly"
ATTR_ICON = "icon"
ATTR_LABEL = "label"
ATTR_UNIT = "unit"
PARALLEL_UPDATES = 1
SENSOR_TYPES = {
ATTR_API_PM1: {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:blur",
ATTR_LABEL: ATTR_API_PM1,
ATTR_UNIT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
ATTR_API_HUMIDITY: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_ICON: None,
ATTR_LABEL: ATTR_API_HUMIDITY.capitalize(),
ATTR_UNIT: PERCENTAGE,
},
ATTR_API_PRESSURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_PRESSURE,
ATTR_ICON: None,
ATTR_LABEL: ATTR_API_PRESSURE.capitalize(),
ATTR_UNIT: PRESSURE_HPA,
},
ATTR_API_TEMPERATURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: ATTR_API_TEMPERATURE.capitalize(),
ATTR_UNIT: TEMP_CELSIUS,
},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Airly sensor entities based on a config entry."""
name = config_entry.data[CONF_NAME]
coordinator = hass.data[DOMAIN][config_entry.entry_id]
sensors = []
for sensor in SENSOR_TYPES:
sensors.append(AirlySensor(coordinator, name, sensor))
async_add_entities(sensors, False)
class AirlySensor(CoordinatorEntity):
"""Define an Airly sensor."""
def __init__(self, coordinator, name, kind):
"""Initialize."""
super().__init__(coordinator)
self._name = name
self.kind = kind
self._device_class = None
self._state = None
self._icon = None
self._unit_of_measurement = None
self._attrs = {ATTR_ATTRIBUTION: ATTRIBUTION}
@property
def name(self):
"""Return the name."""
return f"{self._name} {SENSOR_TYPES[self.kind][ATTR_LABEL]}"
@property
def state(self):
"""Return the state."""
self._state = self.coordinator.data[self.kind]
if self.kind in [ATTR_API_PM1, ATTR_API_PRESSURE]:
self._state = round(self._state)
if self.kind in [ATTR_API_TEMPERATURE, ATTR_API_HUMIDITY]:
self._state = round(self._state, 1)
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
self._icon = SENSOR_TYPES[self.kind][ATTR_ICON]
return self._icon
@property
def device_class(self):
"""Return the device_class."""
return SENSOR_TYPES[self.kind][ATTR_DEVICE_CLASS]
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{self.coordinator.latitude}-{self.coordinator.longitude}-{self.kind.lower()}"
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {
(DOMAIN, self.coordinator.latitude, self.coordinator.longitude)
},
"name": DEFAULT_NAME,
"manufacturer": MANUFACTURER,
"entry_type": "service",
}
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return SENSOR_TYPES[self.kind][ATTR_UNIT]
|
from tests.async_mock import Mock, patch
EMPTY_8_6_JPEG = b"empty_8_6"
def patch_debounce():
"""Return patch for debounce method."""
return patch(
"homeassistant.components.homekit.accessories.debounce",
lambda f: lambda *args, **kwargs: f(*args, **kwargs),
)
def mock_turbo_jpeg(
first_width=None, second_width=None, first_height=None, second_height=None
):
"""Mock a TurboJPEG instance."""
mocked_turbo_jpeg = Mock()
mocked_turbo_jpeg.decode_header.side_effect = [
(first_width, first_height, 0, 0),
(second_width, second_height, 0, 0),
]
mocked_turbo_jpeg.scale_with_quality.return_value = EMPTY_8_6_JPEG
return mocked_turbo_jpeg
|
from __future__ import unicode_literals
from base64 import b32encode
def b32_encode(item):
"""base32 encode"""
try:
return (b32encode(item.encode('utf-8'))).decode()
except:
return ''
|
import logging
import telnetlib
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_TIMEOUT,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_SOURCES = "sources"
DEFAULT_NAME = "Pioneer AVR"
DEFAULT_PORT = 23 # telnet default. Some Pioneer AVRs use 8102
DEFAULT_TIMEOUT = None
DEFAULT_SOURCES = {}
SUPPORT_PIONEER = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY
)
MAX_VOLUME = 185
MAX_SOURCE_NUMBERS = 60
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.socket_timeout,
vol.Optional(CONF_SOURCES, default=DEFAULT_SOURCES): {cv.string: cv.string},
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Pioneer platform."""
pioneer = PioneerDevice(
config[CONF_NAME],
config[CONF_HOST],
config[CONF_PORT],
config[CONF_TIMEOUT],
config[CONF_SOURCES],
)
if pioneer.update():
add_entities([pioneer])
class PioneerDevice(MediaPlayerEntity):
"""Representation of a Pioneer device."""
def __init__(self, name, host, port, timeout, sources):
"""Initialize the Pioneer device."""
self._name = name
self._host = host
self._port = port
self._timeout = timeout
self._pwstate = "PWR1"
self._volume = 0
self._muted = False
self._selected_source = ""
self._source_name_to_number = sources
self._source_number_to_name = {v: k for k, v in sources.items()}
@classmethod
def telnet_request(cls, telnet, command, expected_prefix):
"""Execute `command` and return the response."""
try:
telnet.write(command.encode("ASCII") + b"\r")
except telnetlib.socket.timeout:
_LOGGER.debug("Pioneer command %s timed out", command)
return None
# The receiver will randomly send state change updates, make sure
# we get the response we are looking for
for _ in range(3):
result = telnet.read_until(b"\r\n", timeout=0.2).decode("ASCII").strip()
if result.startswith(expected_prefix):
return result
return None
def telnet_command(self, command):
"""Establish a telnet connection and sends command."""
try:
try:
telnet = telnetlib.Telnet(self._host, self._port, self._timeout)
except (ConnectionRefusedError, OSError):
_LOGGER.warning("Pioneer %s refused connection", self._name)
return
telnet.write(command.encode("ASCII") + b"\r")
telnet.read_very_eager() # skip response
telnet.close()
except telnetlib.socket.timeout:
_LOGGER.debug("Pioneer %s command %s timed out", self._name, command)
def update(self):
"""Get the latest details from the device."""
try:
telnet = telnetlib.Telnet(self._host, self._port, self._timeout)
except (ConnectionRefusedError, OSError):
_LOGGER.warning("Pioneer %s refused connection", self._name)
return False
pwstate = self.telnet_request(telnet, "?P", "PWR")
if pwstate:
self._pwstate = pwstate
volume_str = self.telnet_request(telnet, "?V", "VOL")
self._volume = int(volume_str[3:]) / MAX_VOLUME if volume_str else None
muted_value = self.telnet_request(telnet, "?M", "MUT")
self._muted = (muted_value == "MUT0") if muted_value else None
# Build the source name dictionaries if necessary
if not self._source_name_to_number:
for i in range(MAX_SOURCE_NUMBERS):
result = self.telnet_request(telnet, f"?RGB{str(i).zfill(2)}", "RGB")
if not result:
continue
source_name = result[6:]
source_number = str(i).zfill(2)
self._source_name_to_number[source_name] = source_number
self._source_number_to_name[source_number] = source_name
source_number = self.telnet_request(telnet, "?F", "FN")
if source_number:
self._selected_source = self._source_number_to_name.get(source_number[2:])
else:
self._selected_source = None
telnet.close()
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._pwstate == "PWR2":
return STATE_OFF
if self._pwstate == "PWR1":
return STATE_OFF
if self._pwstate == "PWR0":
return STATE_ON
return None
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_PIONEER
@property
def source(self):
"""Return the current input source."""
return self._selected_source
@property
def source_list(self):
"""List of available input sources."""
return list(self._source_name_to_number)
@property
def media_title(self):
"""Title of current playing media."""
return self._selected_source
def turn_off(self):
"""Turn off media player."""
self.telnet_command("PF")
def volume_up(self):
"""Volume up media player."""
self.telnet_command("VU")
def volume_down(self):
"""Volume down media player."""
self.telnet_command("VD")
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
# 60dB max
self.telnet_command(f"{round(volume * MAX_VOLUME):03}VL")
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self.telnet_command("MO" if mute else "MF")
def turn_on(self):
"""Turn the media player on."""
self.telnet_command("PO")
def select_source(self, source):
"""Select input source."""
self.telnet_command(f"{self._source_name_to_number.get(source)}FN")
|
from ipaddress import ip_address
from aiohttp import web
import voluptuous as vol
import voluptuous_serialize
from homeassistant import data_entry_flow
from homeassistant.components.http.ban import (
log_invalid_auth,
process_success_login,
process_wrong_login,
)
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.const import (
HTTP_BAD_REQUEST,
HTTP_METHOD_NOT_ALLOWED,
HTTP_NOT_FOUND,
)
from . import indieauth
async def async_setup(hass, store_result):
"""Component to allow users to login."""
hass.http.register_view(AuthProvidersView)
hass.http.register_view(LoginFlowIndexView(hass.auth.login_flow, store_result))
hass.http.register_view(LoginFlowResourceView(hass.auth.login_flow, store_result))
class AuthProvidersView(HomeAssistantView):
"""View to get available auth providers."""
url = "/auth/providers"
name = "api:auth:providers"
requires_auth = False
async def get(self, request):
"""Get available auth providers."""
hass = request.app["hass"]
if not hass.components.onboarding.async_is_user_onboarded():
return self.json_message(
message="Onboarding not finished",
status_code=HTTP_BAD_REQUEST,
message_code="onboarding_required",
)
return self.json(
[
{"name": provider.name, "id": provider.id, "type": provider.type}
for provider in hass.auth.auth_providers
]
)
def _prepare_result_json(result):
"""Convert result to JSON."""
if result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
data = result.copy()
data.pop("result")
data.pop("data")
return data
if result["type"] != data_entry_flow.RESULT_TYPE_FORM:
return result
data = result.copy()
schema = data["data_schema"]
if schema is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(schema)
return data
class LoginFlowIndexView(HomeAssistantView):
"""View to create a config flow."""
url = "/auth/login_flow"
name = "api:auth:login_flow"
requires_auth = False
def __init__(self, flow_mgr, store_result):
"""Initialize the flow manager index view."""
self._flow_mgr = flow_mgr
self._store_result = store_result
async def get(self, request):
"""Do not allow index of flows in progress."""
return web.Response(status=HTTP_METHOD_NOT_ALLOWED)
@RequestDataValidator(
vol.Schema(
{
vol.Required("client_id"): str,
vol.Required("handler"): vol.Any(str, list),
vol.Required("redirect_uri"): str,
vol.Optional("type", default="authorize"): str,
}
)
)
@log_invalid_auth
async def post(self, request, data):
"""Create a new login flow."""
if not await indieauth.verify_redirect_uri(
request.app["hass"], data["client_id"], data["redirect_uri"]
):
return self.json_message(
"invalid client id or redirect uri", HTTP_BAD_REQUEST
)
if isinstance(data["handler"], list):
handler = tuple(data["handler"])
else:
handler = data["handler"]
try:
result = await self._flow_mgr.async_init(
handler,
context={
"ip_address": ip_address(request.remote),
"credential_only": data.get("type") == "link_user",
},
)
except data_entry_flow.UnknownHandler:
return self.json_message("Invalid handler specified", HTTP_NOT_FOUND)
except data_entry_flow.UnknownStep:
return self.json_message("Handler does not support init", HTTP_BAD_REQUEST)
if result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
await process_success_login(request)
result.pop("data")
result["result"] = self._store_result(data["client_id"], result["result"])
return self.json(result)
return self.json(_prepare_result_json(result))
class LoginFlowResourceView(HomeAssistantView):
"""View to interact with the flow manager."""
url = "/auth/login_flow/{flow_id}"
name = "api:auth:login_flow:resource"
requires_auth = False
def __init__(self, flow_mgr, store_result):
"""Initialize the login flow resource view."""
self._flow_mgr = flow_mgr
self._store_result = store_result
async def get(self, request):
"""Do not allow getting status of a flow in progress."""
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
@RequestDataValidator(vol.Schema({"client_id": str}, extra=vol.ALLOW_EXTRA))
@log_invalid_auth
async def post(self, request, flow_id, data):
"""Handle progressing a login flow request."""
client_id = data.pop("client_id")
if not indieauth.verify_client_id(client_id):
return self.json_message("Invalid client id", HTTP_BAD_REQUEST)
try:
# do not allow change ip during login flow
for flow in self._flow_mgr.async_progress():
if flow["flow_id"] == flow_id and flow["context"][
"ip_address"
] != ip_address(request.remote):
return self.json_message("IP address changed", HTTP_BAD_REQUEST)
result = await self._flow_mgr.async_configure(flow_id, data)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
except vol.Invalid:
return self.json_message("User input malformed", HTTP_BAD_REQUEST)
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
# @log_invalid_auth does not work here since it returns HTTP 200
# need manually log failed login attempts
if result.get("errors") is not None and result["errors"].get("base") in [
"invalid_auth",
"invalid_code",
]:
await process_wrong_login(request)
return self.json(_prepare_result_json(result))
result.pop("data")
result["result"] = self._store_result(client_id, result["result"])
return self.json(result)
async def delete(self, request, flow_id):
"""Cancel a flow in progress."""
try:
self._flow_mgr.async_abort(flow_id)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
return self.json_message("Flow aborted")
|
import pytest
from homeassistant.components.climate.const import (
ATTR_AUX_HEAT,
ATTR_HUMIDITY,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
DOMAIN,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.components.climate.reproduce_state import async_reproduce_states
from homeassistant.const import ATTR_TEMPERATURE
from homeassistant.core import Context, State
from tests.common import async_mock_service
ENTITY_1 = "climate.test1"
ENTITY_2 = "climate.test2"
@pytest.mark.parametrize("state", [HVAC_MODE_AUTO, HVAC_MODE_HEAT, HVAC_MODE_OFF])
async def test_with_hvac_mode(hass, state):
"""Test that state different hvac states."""
calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HVAC_MODE)
await async_reproduce_states(hass, [State(ENTITY_1, state)])
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {"entity_id": ENTITY_1, "hvac_mode": state}
async def test_multiple_state(hass):
"""Test that multiple states gets calls."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_SET_HVAC_MODE)
await async_reproduce_states(
hass, [State(ENTITY_1, HVAC_MODE_HEAT), State(ENTITY_2, HVAC_MODE_AUTO)]
)
await hass.async_block_till_done()
assert len(calls_1) == 2
# order is not guaranteed
assert any(
call.data == {"entity_id": ENTITY_1, "hvac_mode": HVAC_MODE_HEAT}
for call in calls_1
)
assert any(
call.data == {"entity_id": ENTITY_2, "hvac_mode": HVAC_MODE_AUTO}
for call in calls_1
)
async def test_state_with_none(hass):
"""Test that none is not a hvac state."""
calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HVAC_MODE)
await async_reproduce_states(hass, [State(ENTITY_1, None)])
await hass.async_block_till_done()
assert len(calls) == 0
async def test_state_with_context(hass):
"""Test that context is forwarded."""
calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HVAC_MODE)
context = Context()
await async_reproduce_states(
hass, [State(ENTITY_1, HVAC_MODE_HEAT)], context=context
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {"entity_id": ENTITY_1, "hvac_mode": HVAC_MODE_HEAT}
assert calls[0].context == context
@pytest.mark.parametrize(
"service,attribute",
[
(SERVICE_SET_AUX_HEAT, ATTR_AUX_HEAT),
(SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE),
(SERVICE_SET_SWING_MODE, ATTR_SWING_MODE),
(SERVICE_SET_HUMIDITY, ATTR_HUMIDITY),
(SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE),
(SERVICE_SET_TEMPERATURE, ATTR_TARGET_TEMP_HIGH),
(SERVICE_SET_TEMPERATURE, ATTR_TARGET_TEMP_LOW),
],
)
async def test_attribute(hass, service, attribute):
"""Test that service call is made for each attribute."""
calls_1 = async_mock_service(hass, DOMAIN, service)
value = "dummy"
await async_reproduce_states(hass, [State(ENTITY_1, None, {attribute: value})])
await hass.async_block_till_done()
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": ENTITY_1, attribute: value}
|
from datetime import timedelta
import json
import os
from unittest import mock
from aiohomekit.model import Accessories, Accessory
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from aiohomekit.testing import FakeController
from homeassistant import config_entries
from homeassistant.components.homekit_controller import config_flow
from homeassistant.components.homekit_controller.const import (
CONTROLLER,
DOMAIN,
HOMEKIT_ACCESSORY_DISPATCH,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture
class Helper:
"""Helper methods for interacting with HomeKit fakes."""
def __init__(self, hass, entity_id, pairing, accessory, config_entry):
"""Create a helper for a given accessory/entity."""
self.hass = hass
self.entity_id = entity_id
self.pairing = pairing
self.accessory = accessory
self.config_entry = config_entry
self.characteristics = {}
for service in self.accessory.services:
service_name = ServicesTypes.get_short(service.type)
for char in service.characteristics:
char_name = CharacteristicsTypes.get_short(char.type)
self.characteristics[(service_name, char_name)] = char
async def update_named_service(self, service, characteristics):
"""Update a service."""
self.pairing.testing.update_named_service(service, characteristics)
await self.hass.async_block_till_done()
async def poll_and_get_state(self):
"""Trigger a time based poll and return the current entity state."""
await time_changed(self.hass, 60)
state = self.hass.states.get(self.entity_id)
assert state is not None
return state
async def time_changed(hass, seconds):
"""Trigger time changed."""
next_update = dt_util.utcnow() + timedelta(seconds)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
async def setup_accessories_from_file(hass, path):
"""Load an collection of accessory defs from JSON data."""
accessories_fixture = await hass.async_add_executor_job(
load_fixture, os.path.join("homekit_controller", path)
)
accessories_json = json.loads(accessories_fixture)
accessories = Accessories.from_list(accessories_json)
return accessories
async def setup_platform(hass):
"""Load the platform but with a fake Controller API."""
config = {"discovery": {}}
with mock.patch("aiohomekit.Controller") as controller:
fake_controller = controller.return_value = FakeController()
await async_setup_component(hass, DOMAIN, config)
return fake_controller
async def setup_test_accessories(hass, accessories):
"""Load a fake homekit device based on captured JSON profile."""
fake_controller = await setup_platform(hass)
pairing_id = "00:00:00:00:00:00"
accessories_obj = Accessories()
for accessory in accessories:
accessories_obj.add_accessory(accessory)
pairing = await fake_controller.add_paired_device(accessories_obj, pairing_id)
config_entry = MockConfigEntry(
version=1,
domain="homekit_controller",
entry_id="TestData",
data={"AccessoryPairingID": pairing_id},
title="test",
connection_class=config_entries.CONN_CLASS_LOCAL_PUSH,
)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry, pairing
async def device_config_changed(hass, accessories):
"""Discover new devices added to Home Assistant at runtime."""
# Update the accessories our FakePairing knows about
controller = hass.data[CONTROLLER]
pairing = controller.pairings["00:00:00:00:00:00"]
accessories_obj = Accessories()
for accessory in accessories:
accessories_obj.add_accessory(accessory)
pairing.accessories = accessories_obj
discovery_info = {
"name": "TestDevice",
"host": "127.0.0.1",
"port": 8080,
"properties": {
"md": "TestDevice",
"id": "00:00:00:00:00:00",
"c#": "2",
"sf": "0",
},
}
# Config Flow will abort and notify us if the discovery event is of
# interest - in this case c# has incremented
flow = config_flow.HomekitControllerFlowHandler()
flow.hass = hass
flow.context = {}
result = await flow.async_step_zeroconf(discovery_info)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
# Wait for services to reconfigure
await hass.async_block_till_done()
await hass.async_block_till_done()
async def setup_test_component(hass, setup_accessory, capitalize=False, suffix=None):
"""Load a fake homekit accessory based on a homekit accessory model.
If capitalize is True, property names will be in upper case.
If suffix is set, entityId will include the suffix
"""
accessory = Accessory.create_with_info(
"TestDevice", "example.com", "Test", "0001", "0.1"
)
setup_accessory(accessory)
domain = None
for service in accessory.services:
service_name = ServicesTypes.get_short(service.type)
if service_name in HOMEKIT_ACCESSORY_DISPATCH:
domain = HOMEKIT_ACCESSORY_DISPATCH[service_name]
break
assert domain, "Cannot map test homekit services to Home Assistant domain"
config_entry, pairing = await setup_test_accessories(hass, [accessory])
entity = "testdevice" if suffix is None else f"testdevice_{suffix}"
return Helper(hass, ".".join((domain, entity)), pairing, accessory, config_entry)
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.light_head_rcnn import LightHeadRCNN
from chainercv.utils import assert_is_detection_link
from chainercv.utils import generate_random_bbox
def _random_array(xp, shape):
return xp.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
class DummyExtractor(chainer.Link):
def __init__(self, feat_stride):
super(DummyExtractor, self).__init__()
self.feat_stride = feat_stride
def __call__(self, x):
_, _, H, W = x.shape
rpn_features = _random_array(
self.xp, (1, 8, H // self.feat_stride, W // self.feat_stride))
roi_features = _random_array(
self.xp, (1, 8, H // self.feat_stride, W // self.feat_stride))
return rpn_features, roi_features
class DummyHead(chainer.Chain):
def __init__(self, n_class):
super(DummyHead, self).__init__()
self.n_class = n_class
def __call__(self, x, rois, roi_indices):
n_roi = len(rois)
cls_locs = chainer.Variable(
_random_array(self.xp, (n_roi, self.n_class * 4)))
# For each bbox, the score for a selected class is
# overwhelmingly higher than the scores for the other classes.
score_idx = np.random.randint(
low=0, high=self.n_class, size=(n_roi,))
scores = self.xp.zeros((n_roi, self.n_class), dtype=np.float32)
scores[np.arange(n_roi), score_idx] = 100
scores = chainer.Variable(scores)
return cls_locs, scores
class DummyRegionProposalNetwork(chainer.Chain):
def __init__(self, n_anchor_base, n_roi):
super(DummyRegionProposalNetwork, self).__init__()
self.n_anchor_base = n_anchor_base
self.n_roi = n_roi
def __call__(self, x, img_size, scale):
B, _, H, W = x.shape
n_anchor = self.n_anchor_base * H * W
rpn_locs = _random_array(self.xp, (B, n_anchor, 4))
rpn_cls_scores = _random_array(self.xp, (B, n_anchor, 2))
rois = self.xp.asarray(generate_random_bbox(
self.n_roi, img_size, 16, min(img_size)))
roi_indices = self.xp.zeros((len(rois),), dtype=np.int32)
anchor = self.xp.asarray(generate_random_bbox(
n_anchor, img_size, 16, min(img_size)))
return (chainer.Variable(rpn_locs),
chainer.Variable(rpn_cls_scores), rois, roi_indices, anchor)
class DummyLightHeadRCNN(LightHeadRCNN):
def __init__(
self, n_anchor_base, feat_stride, n_fg_class, n_roi,
min_size, max_size, loc_normalize_mean, loc_normalize_std,
):
super(DummyLightHeadRCNN, self).__init__(
DummyExtractor(feat_stride),
DummyRegionProposalNetwork(n_anchor_base, n_roi),
DummyHead(n_fg_class + 1),
mean=np.array([[[100]], [[122.5]], [[145]]]),
min_size=min_size,
max_size=max_size,
loc_normalize_mean=loc_normalize_mean,
loc_normalize_std=loc_normalize_std,
)
class TestLightHeadRCNN(unittest.TestCase):
def setUp(self):
self.n_anchor_base = 6
self.feat_stride = 4
n_fg_class = 4
self.n_class = n_fg_class + 1
self.n_roi = 24
self.link = DummyLightHeadRCNN(
n_anchor_base=self.n_anchor_base,
feat_stride=self.feat_stride,
n_fg_class=n_fg_class,
n_roi=self.n_roi,
min_size=600,
max_size=1000,
loc_normalize_mean=(0., 0., 0., 0.),
loc_normalize_std=(0.1, 0.1, 0.2, 0.2),
)
def check_call(self):
xp = self.link.xp
x1 = chainer.Variable(_random_array(xp, (1, 3, 600, 800)))
scales = chainer.Variable(xp.array([1.], dtype=np.float32))
roi_cls_locs, roi_scores, rois, roi_indices = self.link(x1, scales)
self.assertIsInstance(roi_cls_locs, chainer.Variable)
self.assertIsInstance(roi_cls_locs.array, xp.ndarray)
self.assertEqual(roi_cls_locs.shape, (self.n_roi, self.n_class * 4))
self.assertIsInstance(roi_scores, chainer.Variable)
self.assertIsInstance(roi_scores.array, xp.ndarray)
self.assertEqual(roi_scores.shape, (self.n_roi, self.n_class))
self.assertIsInstance(rois, xp.ndarray)
self.assertEqual(rois.shape, (self.n_roi, 4))
self.assertIsInstance(roi_indices, xp.ndarray)
self.assertEqual(roi_indices.shape, (self.n_roi,))
def test_call_cpu(self):
self.check_call()
@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
def test_predict_cpu(self):
assert_is_detection_link(self.link, self.n_class - 1)
@attr.gpu
def test_predict_gpu(self):
self.link.to_gpu()
assert_is_detection_link(self.link, self.n_class - 1)
@testing.parameterize(
{'in_shape': (3, 100, 100), 'expected_shape': (3, 200, 200)},
{'in_shape': (3, 200, 50), 'expected_shape': (3, 400, 100)},
{'in_shape': (3, 400, 100), 'expected_shape': (3, 400, 100)},
{'in_shape': (3, 300, 600), 'expected_shape': (3, 200, 400)},
{'in_shape': (3, 600, 900), 'expected_shape': (3, 200, 300)}
)
class TestLightHeadRCNNPrepare(unittest.TestCase):
min_size = 200
max_size = 400
def setUp(self):
self.link = DummyLightHeadRCNN(
n_anchor_base=1,
feat_stride=16,
n_fg_class=21,
n_roi=1,
min_size=self.min_size,
max_size=self.max_size,
loc_normalize_mean=(0., 0., 0., 0.),
loc_normalize_std=(0.1, 0.1, 0.2, 0.2),
)
def check_prepare(self):
x = _random_array(np, self.in_shape)
out = self.link.prepare(x)
self.assertIsInstance(out, np.ndarray)
self.assertEqual(out.shape, self.expected_shape)
def test_prepare_cpu(self):
self.check_prepare()
@attr.gpu
def test_prepare_gpu(self):
self.link.to_gpu()
self.check_prepare()
testing.run_module(__name__, __file__)
|
from homeassistant.components.switch import SwitchEntity
from . import DEFAULT_SCAN_INTERVAL, DOMAIN
from .const import CARD_STATE_ACTIVE, CARD_STATE_BLOCKED, DATA
SCAN_INTERVAL = DEFAULT_SCAN_INTERVAL
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the N26 switch platform."""
if discovery_info is None:
return
api_list = hass.data[DOMAIN][DATA]
switch_entities = []
for api_data in api_list:
for card in api_data.cards:
switch_entities.append(N26CardSwitch(api_data, card))
add_entities(switch_entities)
class N26CardSwitch(SwitchEntity):
"""Representation of a N26 card block/unblock switch."""
def __init__(self, api_data, card: dict):
"""Initialize the N26 card block/unblock switch."""
self._data = api_data
self._card = card
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._card["id"]
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return f"card_{self._card['id']}"
@property
def is_on(self):
"""Return true if switch is on."""
return self._card["status"] == CARD_STATE_ACTIVE
def turn_on(self, **kwargs):
"""Block the card."""
self._data.api.unblock_card(self._card["id"])
self._card["status"] = CARD_STATE_ACTIVE
def turn_off(self, **kwargs):
"""Unblock the card."""
self._data.api.block_card(self._card["id"])
self._card["status"] = CARD_STATE_BLOCKED
def update(self):
"""Update the switch state."""
self._data.update_cards()
self._card = self._data.card(self._card["id"], self._card)
|
from homeassistant.components.plugwise import DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def async_init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
skip_setup: bool = False,
):
"""Initialize the Smile integration."""
entry = MockConfigEntry(
domain=DOMAIN, data={"host": "1.1.1.1", "password": "test-password"}
)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import scattertext as st
import numpy as np
df = st.SampleCorpora.ConventionData2012.get_data().assign(
parse=lambda df: df.text.apply(st.whitespace_nlp_with_sentences)
).assign(party=lambda df: df['party'].apply({'democrat': 'Democratic',
'republican': 'Republican'}.get))
corpus = st.CorpusFromParsedDocuments(
df, category_col='party', parsed_col='parse'
).build().get_unigram_corpus()
category_name = 'Democratic'
not_category_name = 'Republican'
def get_log_scale_df(corpus, y_category, x_category):
term_coord_df = corpus.get_term_freq_df('')
# Log scale term counts (with a smoothing constant) as the initial coordinates
coord_columns = []
for category in [y_category, x_category]:
col_name = category + '_coord'
term_coord_df[col_name] = np.log(term_coord_df[category] + 1e-6) / np.log(2)
coord_columns.append(col_name)
# Scale these coordinates to between 0 and 1
min_offset = term_coord_df[coord_columns].min(axis=0).min()
for coord_column in coord_columns:
term_coord_df[coord_column] -= min_offset
max_offset = term_coord_df[coord_columns].max(axis=0).max()
for coord_column in coord_columns:
term_coord_df[coord_column] /= max_offset
return term_coord_df
# Get term coordinates from original corpus
term_coordinates = get_log_scale_df(corpus, category_name, not_category_name)
print(term_coordinates)
# The tooltip JS function. Note that d is is the term data object, and ox and oy are the original x- and y-
# axis counts.
get_tooltip_content = ('(function(d) {return d.term + "<br/>' + not_category_name + ' Count: " ' +
'+ d.ox +"<br/>' + category_name + ' Count: " + d.oy})')
html_orig = st.produce_scattertext_explorer(
corpus,
category=category_name,
not_category_name=not_category_name,
minimum_term_frequency=0,
pmi_threshold_coefficient=0,
width_in_pixels=1000,
metadata=corpus.get_df()['speaker'],
show_diagonal=True,
original_y=term_coordinates[category_name],
original_x=term_coordinates[not_category_name],
x_coords=term_coordinates[category_name + '_coord'],
y_coords=term_coordinates[not_category_name + '_coord'],
max_overlapping=3,
use_global_scale=True,
get_tooltip_content=get_tooltip_content,
)
open('./demo_global_scale_log_orig.html', 'w').write(html_orig)
print('open ./demo_global_scale_log_orig.html in Chrome')
# Select terms which appear a minimum threshold in both corpora
compact_corpus = corpus.compact(st.ClassPercentageCompactor(term_count=2))
# Only take term coordinates of terms remaining in corpus
term_coordinates = term_coordinates.loc[compact_corpus.get_terms()]
print(term_coordinates)
html = st.produce_scattertext_explorer(
compact_corpus,
category=category_name,
not_category_name=not_category_name,
minimum_term_frequency=0,
pmi_threshold_coefficient=0,
width_in_pixels=1000,
metadata=corpus.get_df()['speaker'],
show_diagonal=True,
original_y=term_coordinates[category_name],
original_x=term_coordinates[not_category_name],
x_coords=term_coordinates[category_name + '_coord'],
y_coords=term_coordinates[not_category_name + '_coord'],
max_overlapping=3,
use_global_scale=True,
get_tooltip_content=get_tooltip_content,
)
open('./demo_global_scale_log.html', 'w').write(html)
print('open ./demo_global_scale_log.html in Chrome')
|
from homeassistant.helpers.icon import icon_for_battery_level
from . import (
CONF_HOST,
CONF_NAME,
CONF_SENSORS,
DATA_IP_WEBCAM,
ICON_MAP,
KEY_MAP,
AndroidIPCamEntity,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the IP Webcam Sensor."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
sensors = discovery_info[CONF_SENSORS]
ipcam = hass.data[DATA_IP_WEBCAM][host]
all_sensors = []
for sensor in sensors:
all_sensors.append(IPWebcamSensor(name, host, ipcam, sensor))
async_add_entities(all_sensors, True)
class IPWebcamSensor(AndroidIPCamEntity):
"""Representation of a IP Webcam sensor."""
def __init__(self, name, host, ipcam, sensor):
"""Initialize the sensor."""
super().__init__(host, ipcam)
self._sensor = sensor
self._mapped_name = KEY_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._unit = None
@property
def name(self):
"""Return the name of the sensor, if any."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def state(self):
"""Return the state of the sensor."""
return self._state
async def async_update(self):
"""Retrieve latest state."""
if self._sensor in ("audio_connections", "video_connections"):
if not self._ipcam.status_data:
return
self._state = self._ipcam.status_data.get(self._sensor)
self._unit = "Connections"
else:
self._state, self._unit = self._ipcam.export_sensor(self._sensor)
@property
def icon(self):
"""Return the icon for the sensor."""
if self._sensor == "battery_level" and self._state is not None:
return icon_for_battery_level(int(self._state))
return ICON_MAP.get(self._sensor, "mdi:eye")
|
import logging
import os
from threading import Thread
from cffi import FFI as _FFI
from kalliope.core.Utils.Utils import Utils
from kalliope.core.ConfigurationManager import SettingLoader
logging.basicConfig()
logger = logging.getLogger("kalliope")
class OrderListener(Thread):
"""
This Class allows to Listen to an Incoming voice order.
.. notes:: Thread are used to calibrate the sound of the microphone input with the noise while
starting to listen the incoming order. Basically it avoids delays.
"""
def __init__(self, callback=None, stt=None, audio_file_path=None):
"""
This class is called after we catch the hotword that has woken up Kalliope.
We now wait for an order spoken out loud by the user, translate the order into a text and run the action
attached to this order from settings
:param callback: callback function to call
:type callback: Callback function
:param stt: Speech to text plugin name to load. If not provided,
:type stt: STT instance
we will load the default one set in settings
.. seealso:: STT
"""
# this is a trick to ignore ALSA output error
# see http://stackoverflow.com/questions/7088672/pyaudio-working-but-spits-out-error-messages-each-time
super(OrderListener, self).__init__()
self.stt = stt
self._ignore_stderr()
self.stt_module_name = stt
self.callback = callback
sl = SettingLoader()
self.settings = sl.settings
self.stt_instance = None
self.audio_file_path = audio_file_path
def run(self):
"""
Start thread
"""
logger.debug("[OrderListener] running ...")
self.stt_instance = self.load_stt_plugin()
def load_stt_plugin(self):
if self.stt is None:
self.stt_module_name = self.settings.default_stt_name
logger.debug("[OrderListener] stt module name : %s" % self.stt_module_name)
for stt_object in self.settings.stts:
if stt_object.name == self.stt_module_name:
stt_object.parameters["callback"] = self.callback
# add the audio file path to the list of parameter if set
stt_object.parameters["audio_file_path"] = self.audio_file_path
stt_folder = None
if self.settings.resources:
stt_folder = self.settings.resources.stt_folder
return Utils.get_dynamic_class_instantiation(package_name='stt',
module_name=stt_object.name.capitalize(),
parameters=stt_object.parameters,
resources_dir=stt_folder)
@staticmethod
def _ignore_stderr():
"""
Try to forward PortAudio messages from stderr to /dev/null.
"""
ffi = _FFI()
ffi.cdef("""
/* from stdio.h */
extern FILE* fopen(const char* path, const char* mode);
extern int fclose(FILE* fp);
extern FILE* stderr; /* GNU C library */
extern FILE* __stderrp; /* Mac OS X */
""")
stdio = ffi.dlopen(None)
devnull = stdio.fopen(os.devnull.encode(), b'w')
try:
stdio.stderr = devnull
except KeyError:
try:
stdio.__stderrp = devnull
except KeyError:
stdio.fclose(devnull)
|
import unittest
import six
from mock import Mock, call
from trashcli.rm import Filter
from unit_tests.myStringIO import StringIO
class TestTrashRmCmdRun(unittest.TestCase):
def test_without_arguments(self):
from trashcli.rm import RmCmd
cmd = RmCmd(None, None, None, None, None)
cmd.stderr = StringIO()
cmd.run([None])
assert ('Usage:\n trash-rm PATTERN\n\nPlease specify PATTERN\n' ==
cmd.stderr.getvalue())
def test_without_pattern_argument(self):
from trashcli.rm import RmCmd
cmd = RmCmd(None, None, None, None, None)
cmd.stderr = StringIO()
cmd.file_reader = Mock([])
cmd.file_reader.exists = Mock([], return_value = None)
cmd.file_reader.entries_if_dir_exists = Mock([], return_value = [])
cmd.environ = {}
cmd.getuid = lambda : '111'
cmd.list_volumes = lambda: ['/vol1']
cmd.run([None, None])
assert '' == cmd.stderr.getvalue()
class TestTrashRmCmd(unittest.TestCase):
def test_a_star_matches_all(self):
self.cmd.use_pattern('*')
self.cmd.delete_if_matches('/foo', 'info/foo')
self.cmd.delete_if_matches('/bar', 'info/bar')
six.assertCountEqual(self, [
call('info/foo'),
call('info/bar'),
], self.delete_trashinfo_and_backup_copy.mock_calls)
def test_basename_matches(self):
self.cmd.use_pattern('foo')
self.cmd.delete_if_matches('/foo', 'info/foo'),
self.cmd.delete_if_matches('/bar', 'info/bar')
six.assertCountEqual(self, [
call('info/foo'),
], self.delete_trashinfo_and_backup_copy.mock_calls)
def test_example_with_star_dot_o(self):
self.cmd.use_pattern('*.o')
self.cmd.delete_if_matches('/foo.h', 'info/foo.h'),
self.cmd.delete_if_matches('/foo.c', 'info/foo.c'),
self.cmd.delete_if_matches('/foo.o', 'info/foo.o'),
self.cmd.delete_if_matches('/bar.o', 'info/bar.o')
six.assertCountEqual(self, [
call('info/foo.o'),
call('info/bar.o'),
], self.delete_trashinfo_and_backup_copy.mock_calls)
def test_absolute_pattern(self):
self.cmd.use_pattern('/foo/bar.baz')
self.cmd.delete_if_matches('/foo/bar.baz', '1'),
self.cmd.delete_if_matches('/foo/bar', '2'),
six.assertCountEqual(self, [
call('1'),
], self.delete_trashinfo_and_backup_copy.mock_calls)
def setUp(self):
self.delete_trashinfo_and_backup_copy = Mock()
self.cmd = Filter(self.delete_trashinfo_and_backup_copy)
|
from flask import Flask
from flask.views import MethodView
from flasgger import Swagger
app = Flask(__name__)
swag = Swagger()
@app.route('/')
def index():
return 'Hello World'
class TestView(MethodView):
def get(self):
return 'Hello World'
class Meow(MethodView):
"""
This is to ensure Swagger does not break with empty MethodViews
issue #76
"""
pass
app.add_url_rule(
'/meow/<int:param>/',
view_func=Meow.as_view('meow'),
methods=['DELETE']
)
app.add_url_rule(
'/testview',
view_func=TestView.as_view('testview'),
methods=['GET']
)
if __name__ == '__main__':
swag.init_app(app)
app.run(debug=True)
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_API_KEY, CONF_DEVICE, HTTP_OK
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://llamalab.com/automate/cloud/message"
ATTR_PRIORITY = "priority"
CONF_TO = "to"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Optional(CONF_DEVICE): cv.string,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the LlamaLab Automate notification service."""
secret = config.get(CONF_API_KEY)
recipient = config.get(CONF_TO)
device = config.get(CONF_DEVICE)
return AutomateNotificationService(secret, recipient, device)
class AutomateNotificationService(BaseNotificationService):
"""Implement the notification service for LlamaLab Automate."""
def __init__(self, secret, recipient, device=None):
"""Initialize the service."""
self._secret = secret
self._recipient = recipient
self._device = device
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
# Extract params from data dict
data = dict(kwargs.get(ATTR_DATA) or {})
priority = data.get(ATTR_PRIORITY, "normal").lower()
_LOGGER.debug(
"Sending to: %s, %s, prio: %s", self._recipient, str(self._device), priority
)
data = {
"secret": self._secret,
"to": self._recipient,
"device": self._device,
"priority": priority,
"payload": message,
}
response = requests.post(_RESOURCE, json=data)
if response.status_code != HTTP_OK:
_LOGGER.error("Error sending message: %s", response)
|
from datetime import timedelta
import logging
import defusedxml.ElementTree as ET
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_ID = "id"
DEFAULT_NAME = "OhmConnect Status"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the OhmConnect sensor."""
name = config.get(CONF_NAME)
ohmid = config.get(CONF_ID)
add_entities([OhmconnectSensor(name, ohmid)], True)
class OhmconnectSensor(Entity):
"""Representation of a OhmConnect sensor."""
def __init__(self, name, ohmid):
"""Initialize the sensor."""
self._name = name
self._ohmid = ohmid
self._data = {}
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self._data.get("active") == "True":
return "Active"
return "Inactive"
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"Address": self._data.get("address"), "ID": self._ohmid}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from OhmConnect."""
try:
url = f"https://login.ohmconnect.com/verify-ohm-hour/{self._ohmid}"
response = requests.get(url, timeout=10)
root = ET.fromstring(response.text)
for child in root:
self._data[child.tag] = child.text
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to host/endpoint: %s", url)
self._data = {}
|
from homeassistant.components.fan import (
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
)
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_homeassistant_bridge_fan_setup(hass):
"""Test that a SIMPLEconnect fan can be correctly setup in HA."""
accessories = await setup_accessories_from_file(
hass, "home_assistant_bridge_fan.json"
)
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Check that the fan is correctly found and set up
fan_id = "fan.living_room_fan"
fan = entity_registry.async_get(fan_id)
assert fan.unique_id == "homekit-fan.living_room_fan-8"
fan_helper = Helper(
hass,
"fan.living_room_fan",
pairing,
accessories[0],
config_entry,
)
fan_state = await fan_helper.poll_and_get_state()
assert fan_state.attributes["friendly_name"] == "Living Room Fan"
assert fan_state.state == "off"
assert fan_state.attributes["supported_features"] == (
SUPPORT_DIRECTION | SUPPORT_SET_SPEED | SUPPORT_OSCILLATE
)
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(fan.device_id)
assert device.manufacturer == "Home Assistant"
assert device.name == "Living Room Fan"
assert device.model == "Fan"
assert device.sw_version == "0.104.0.dev0"
bridge = device = device_registry.async_get(device.via_device_id)
assert bridge.manufacturer == "Home Assistant"
assert bridge.name == "Home Assistant Bridge"
assert bridge.model == "Bridge"
assert bridge.sw_version == "0.104.0.dev0"
|
from django.conf.urls import include
from django.conf.urls import url
from django.utils.translation import gettext_lazy
from zinnia.settings import TRANSLATED_URLS
def i18n_url(url, translate=TRANSLATED_URLS):
"""
Translate or not an URL part.
"""
if translate:
return gettext_lazy(url)
return url
_ = i18n_url
app_name = 'zinnia'
urlpatterns = [
url(_(r'^feeds/'), include('zinnia.urls.feeds')),
url(_(r'^tags/'), include('zinnia.urls.tags')),
url(_(r'^authors/'), include('zinnia.urls.authors')),
url(_(r'^categories/'), include('zinnia.urls.categories')),
url(_(r'^search/'), include('zinnia.urls.search')),
url(_(r'^random/'), include('zinnia.urls.random')),
url(_(r'^sitemap/'), include('zinnia.urls.sitemap')),
url(_(r'^trackback/'), include('zinnia.urls.trackback')),
url(_(r'^comments/'), include('zinnia.urls.comments')),
url(r'^', include('zinnia.urls.entries')),
url(r'^', include('zinnia.urls.archives')),
url(r'^', include('zinnia.urls.shortlink')),
url(r'^', include('zinnia.urls.quick_entry')),
url(r'^', include('zinnia.urls.capabilities')),
]
|
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
@property
def is_postrelease(self):
return bool(self._version.post)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
|
from functools import wraps
import logging
from pyhiveapi import Pyhiveapi
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "hive"
DATA_HIVE = "data_hive"
SERVICES = ["Heating", "HotWater", "TRV"]
SERVICE_BOOST_HOT_WATER = "boost_hot_water"
SERVICE_BOOST_HEATING = "boost_heating"
ATTR_TIME_PERIOD = "time_period"
ATTR_MODE = "on_off"
DEVICETYPES = {
"binary_sensor": "device_list_binary_sensor",
"climate": "device_list_climate",
"water_heater": "device_list_water_heater",
"light": "device_list_light",
"switch": "device_list_plug",
"sensor": "device_list_sensor",
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
BOOST_HEATING_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_TIME_PERIOD): vol.All(
cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds() // 60
),
vol.Optional(ATTR_TEMPERATURE, default="25.0"): vol.Coerce(float),
}
)
BOOST_HOT_WATER_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Optional(ATTR_TIME_PERIOD, default="00:30:00"): vol.All(
cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds() // 60
),
vol.Required(ATTR_MODE): cv.string,
}
)
class HiveSession:
"""Initiate Hive Session Class."""
entity_lookup = {}
core = None
heating = None
hotwater = None
light = None
sensor = None
switch = None
weather = None
attributes = None
trv = None
def setup(hass, config):
"""Set up the Hive Component."""
def heating_boost(service):
"""Handle the service call."""
node_id = HiveSession.entity_lookup.get(service.data[ATTR_ENTITY_ID])
if not node_id:
# log or raise error
_LOGGER.error("Cannot boost entity id entered")
return
minutes = service.data[ATTR_TIME_PERIOD]
temperature = service.data[ATTR_TEMPERATURE]
session.heating.turn_boost_on(node_id, minutes, temperature)
def hot_water_boost(service):
"""Handle the service call."""
node_id = HiveSession.entity_lookup.get(service.data[ATTR_ENTITY_ID])
if not node_id:
# log or raise error
_LOGGER.error("Cannot boost entity id entered")
return
minutes = service.data[ATTR_TIME_PERIOD]
mode = service.data[ATTR_MODE]
if mode == "on":
session.hotwater.turn_boost_on(node_id, minutes)
elif mode == "off":
session.hotwater.turn_boost_off(node_id)
session = HiveSession()
session.core = Pyhiveapi()
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
update_interval = config[DOMAIN][CONF_SCAN_INTERVAL]
devices = session.core.initialise_api(username, password, update_interval)
if devices is None:
_LOGGER.error("Hive API initialization failed")
return False
session.sensor = Pyhiveapi.Sensor()
session.heating = Pyhiveapi.Heating()
session.hotwater = Pyhiveapi.Hotwater()
session.light = Pyhiveapi.Light()
session.switch = Pyhiveapi.Switch()
session.weather = Pyhiveapi.Weather()
session.attributes = Pyhiveapi.Attributes()
hass.data[DATA_HIVE] = session
for ha_type in DEVICETYPES:
devicelist = devices.get(DEVICETYPES[ha_type])
if devicelist:
load_platform(hass, ha_type, DOMAIN, devicelist, config)
if ha_type == "climate":
hass.services.register(
DOMAIN,
SERVICE_BOOST_HEATING,
heating_boost,
schema=BOOST_HEATING_SCHEMA,
)
if ha_type == "water_heater":
hass.services.register(
DOMAIN,
SERVICE_BOOST_HOT_WATER,
hot_water_boost,
schema=BOOST_HOT_WATER_SCHEMA,
)
return True
def refresh_system(func):
"""Force update all entities after state change."""
@wraps(func)
def wrapper(self, *args, **kwargs):
func(self, *args, **kwargs)
dispatcher_send(self.hass, DOMAIN)
return wrapper
class HiveEntity(Entity):
"""Initiate Hive Base Class."""
def __init__(self, session, hive_device):
"""Initialize the instance."""
self.node_id = hive_device["Hive_NodeID"]
self.node_name = hive_device["Hive_NodeName"]
self.device_type = hive_device["HA_DeviceType"]
self.node_device_type = hive_device["Hive_DeviceType"]
self.session = session
self.attributes = {}
self._unique_id = f"{self.node_id}-{self.device_type}"
async def async_added_to_hass(self):
"""When entity is added to Home Assistant."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state)
)
if self.device_type in SERVICES:
self.session.entity_lookup[self.entity_id] = self.node_id
|
import os
import os.path
import sys
import enum
import subprocess
from xml.etree import ElementTree
import attr
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils as scriptutils
from qutebrowser.utils import utils
@attr.s
class Message:
"""A message shown by coverage.py."""
typ = attr.ib()
filename = attr.ib()
text = attr.ib()
def show(self):
"""Print this message."""
if scriptutils.ON_CI:
scriptutils.gha_error(self.text)
else:
print(self.text)
class MsgType(enum.Enum):
"""The type of a message to be output."""
insufficient_coverage = enum.auto()
perfect_file = enum.auto()
# A list of (test_file, tested_file) tuples. test_file can be None.
PERFECT_FILES = [
(None,
'qutebrowser/commands/cmdexc.py'),
('tests/unit/commands/test_argparser.py',
'qutebrowser/commands/argparser.py'),
('tests/unit/api/test_cmdutils.py',
'qutebrowser/api/cmdutils.py'),
(None,
'qutebrowser/api/apitypes.py'),
(None,
'qutebrowser/api/config.py'),
(None,
'qutebrowser/api/message.py'),
(None,
'qutebrowser/api/qtutils.py'),
(None,
'qutebrowser/qt.py'),
('tests/unit/browser/webkit/test_cache.py',
'qutebrowser/browser/webkit/cache.py'),
('tests/unit/browser/webkit/test_cookies.py',
'qutebrowser/browser/webkit/cookies.py'),
('tests/unit/browser/test_history.py',
'qutebrowser/browser/history.py'),
('tests/unit/browser/test_pdfjs.py',
'qutebrowser/browser/pdfjs.py'),
('tests/unit/browser/webkit/http/test_http.py',
'qutebrowser/browser/webkit/http.py'),
('tests/unit/browser/webkit/http/test_content_disposition.py',
'qutebrowser/browser/webkit/rfc6266.py'),
# ('tests/unit/browser/webkit/test_webkitelem.py',
# 'qutebrowser/browser/webkit/webkitelem.py'),
# ('tests/unit/browser/webkit/test_webkitelem.py',
# 'qutebrowser/browser/webelem.py'),
('tests/unit/browser/webkit/network/test_filescheme.py',
'qutebrowser/browser/webkit/network/filescheme.py'),
('tests/unit/browser/webkit/network/test_networkreply.py',
'qutebrowser/browser/webkit/network/networkreply.py'),
('tests/unit/browser/test_signalfilter.py',
'qutebrowser/browser/signalfilter.py'),
(None,
'qutebrowser/browser/webengine/certificateerror.py'),
# ('tests/unit/browser/test_tab.py',
# 'qutebrowser/browser/tab.py'),
('tests/unit/keyinput/test_basekeyparser.py',
'qutebrowser/keyinput/basekeyparser.py'),
('tests/unit/keyinput/test_keyutils.py',
'qutebrowser/keyinput/keyutils.py'),
('tests/unit/components/test_readlinecommands.py',
'qutebrowser/components/readlinecommands.py'),
('tests/unit/misc/test_autoupdate.py',
'qutebrowser/misc/autoupdate.py'),
('tests/unit/misc/test_split.py',
'qutebrowser/misc/split.py'),
('tests/unit/misc/test_msgbox.py',
'qutebrowser/misc/msgbox.py'),
('tests/unit/misc/test_checkpyver.py',
'qutebrowser/misc/checkpyver.py'),
('tests/unit/misc/test_guiprocess.py',
'qutebrowser/misc/guiprocess.py'),
('tests/unit/misc/test_editor.py',
'qutebrowser/misc/editor.py'),
('tests/unit/misc/test_cmdhistory.py',
'qutebrowser/misc/cmdhistory.py'),
('tests/unit/misc/test_ipc.py',
'qutebrowser/misc/ipc.py'),
('tests/unit/misc/test_keyhints.py',
'qutebrowser/misc/keyhintwidget.py'),
('tests/unit/misc/test_pastebin.py',
'qutebrowser/misc/pastebin.py'),
('tests/unit/misc/test_objects.py',
'qutebrowser/misc/objects.py'),
('tests/unit/misc/test_throttle.py',
'qutebrowser/misc/throttle.py'),
(None,
'qutebrowser/mainwindow/statusbar/keystring.py'),
('tests/unit/mainwindow/statusbar/test_percentage.py',
'qutebrowser/mainwindow/statusbar/percentage.py'),
('tests/unit/mainwindow/statusbar/test_progress.py',
'qutebrowser/mainwindow/statusbar/progress.py'),
('tests/unit/mainwindow/statusbar/test_tabindex.py',
'qutebrowser/mainwindow/statusbar/tabindex.py'),
('tests/unit/mainwindow/statusbar/test_textbase.py',
'qutebrowser/mainwindow/statusbar/textbase.py'),
('tests/unit/mainwindow/statusbar/test_url.py',
'qutebrowser/mainwindow/statusbar/url.py'),
('tests/unit/mainwindow/statusbar/test_backforward.py',
'qutebrowser/mainwindow/statusbar/backforward.py'),
('tests/unit/mainwindow/test_messageview.py',
'qutebrowser/mainwindow/messageview.py'),
('tests/unit/config/test_config.py',
'qutebrowser/config/config.py'),
('tests/unit/config/test_stylesheet.py',
'qutebrowser/config/stylesheet.py'),
('tests/unit/config/test_configdata.py',
'qutebrowser/config/configdata.py'),
('tests/unit/config/test_configexc.py',
'qutebrowser/config/configexc.py'),
('tests/unit/config/test_configfiles.py',
'qutebrowser/config/configfiles.py'),
('tests/unit/config/test_configtypes.py',
'qutebrowser/config/configtypes.py'),
('tests/unit/config/test_configinit.py',
'qutebrowser/config/configinit.py'),
('tests/unit/config/test_qtargs.py',
'qutebrowser/config/qtargs.py'),
('tests/unit/config/test_configcommands.py',
'qutebrowser/config/configcommands.py'),
('tests/unit/config/test_configutils.py',
'qutebrowser/config/configutils.py'),
('tests/unit/config/test_configcache.py',
'qutebrowser/config/configcache.py'),
('tests/unit/utils/test_qtutils.py',
'qutebrowser/utils/qtutils.py'),
('tests/unit/utils/test_standarddir.py',
'qutebrowser/utils/standarddir.py'),
('tests/unit/utils/test_urlutils.py',
'qutebrowser/utils/urlutils.py'),
('tests/unit/utils/usertypes',
'qutebrowser/utils/usertypes.py'),
('tests/unit/utils/test_utils.py',
'qutebrowser/utils/utils.py'),
('tests/unit/utils/test_version.py',
'qutebrowser/utils/version.py'),
('tests/unit/utils/test_debug.py',
'qutebrowser/utils/debug.py'),
('tests/unit/utils/test_jinja.py',
'qutebrowser/utils/jinja.py'),
('tests/unit/utils/test_error.py',
'qutebrowser/utils/error.py'),
('tests/unit/utils/test_javascript.py',
'qutebrowser/utils/javascript.py'),
('tests/unit/utils/test_urlmatch.py',
'qutebrowser/utils/urlmatch.py'),
(None,
'qutebrowser/completion/models/util.py'),
('tests/unit/completion/test_models.py',
'qutebrowser/completion/models/urlmodel.py'),
('tests/unit/completion/test_models.py',
'qutebrowser/completion/models/configmodel.py'),
('tests/unit/completion/test_histcategory.py',
'qutebrowser/completion/models/histcategory.py'),
('tests/unit/completion/test_listcategory.py',
'qutebrowser/completion/models/listcategory.py'),
('tests/unit/browser/webengine/test_spell.py',
'qutebrowser/browser/webengine/spell.py'),
('tests/unit/browser/webengine/test_webengine_cookies.py',
'qutebrowser/browser/webengine/cookies.py'),
('tests/unit/browser/webengine/test_darkmode.py',
'qutebrowser/browser/webengine/darkmode.py'),
]
# 100% coverage because of end2end tests, but no perfect unit tests yet.
WHITELISTED_FILES = [
'qutebrowser/browser/webkit/webkitinspector.py',
'qutebrowser/misc/debugcachestats.py',
'qutebrowser/keyinput/macros.py',
'qutebrowser/browser/webkit/webkitelem.py',
'qutebrowser/api/interceptor.py',
]
class Skipped(Exception):
"""Exception raised when skipping coverage checks."""
def __init__(self, reason):
self.reason = reason
super().__init__("Skipping coverage checks " + reason)
def _get_filename(filename):
"""Transform the absolute test filenames to relative ones."""
if os.path.isabs(filename):
basedir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..'))
common_path = os.path.commonprefix([basedir, filename])
if common_path:
filename = filename[len(common_path):].lstrip('/')
return filename
def check(fileobj, perfect_files):
"""Main entry point which parses/checks coverage.xml if applicable."""
if not utils.is_linux:
raise Skipped("on non-Linux system.")
if '-k' in sys.argv[1:]:
raise Skipped("because -k is given.")
if '-m' in sys.argv[1:]:
raise Skipped("because -m is given.")
if '--lf' in sys.argv[1:]:
raise Skipped("because --lf is given.")
perfect_src_files = [e[1] for e in perfect_files]
filename_args = [arg for arg in sys.argv[1:]
if arg.startswith('tests' + os.sep)]
filtered_files = [tpl[1] for tpl in perfect_files if tpl[0] in
filename_args]
if filename_args and not filtered_files:
raise Skipped("because there is nothing to check.")
tree = ElementTree.parse(fileobj)
classes = tree.getroot().findall('./packages/package/classes/class')
messages = []
for klass in classes:
filename = _get_filename(klass.attrib['filename'])
line_cov = float(klass.attrib['line-rate']) * 100
branch_cov = float(klass.attrib['branch-rate']) * 100
if filtered_files and filename not in filtered_files:
continue
assert 0 <= line_cov <= 100, line_cov
assert 0 <= branch_cov <= 100, branch_cov
assert '\\' not in filename, filename
is_bad = line_cov < 100 or branch_cov < 100
if filename in perfect_src_files and is_bad:
text = "{} has {:.2f}% line and {:.2f}% branch coverage!".format(
filename, line_cov, branch_cov)
messages.append(Message(MsgType.insufficient_coverage, filename,
text))
elif (filename not in perfect_src_files and
not is_bad and
filename not in WHITELISTED_FILES and
not filename.startswith('tests/')):
text = ("{} has 100% coverage but is not in "
"perfect_files!".format(filename))
messages.append(Message(MsgType.perfect_file, filename, text))
return messages
def main_check():
"""Check coverage after a test run."""
try:
with open('coverage.xml', encoding='utf-8') as f:
messages = check(f, PERFECT_FILES)
except Skipped as e:
print(e)
messages = []
if messages:
print()
print()
scriptutils.print_title("Coverage check failed")
for msg in messages:
msg.show()
print()
filters = ','.join(msg.filename for msg in messages)
subprocess.run([sys.executable, '-m', 'coverage', 'report',
'--show-missing', '--include', filters], check=True)
print()
print("To debug this, run 'tox -e py36-pyqt515-cov' "
"(replace Python/Qt versions based on your system) locally and check "
"htmlcov/index.html")
print("or check https://codecov.io/github/qutebrowser/qutebrowser")
print()
if 'CI' in os.environ:
print("Keeping coverage.xml on CI.")
else:
os.remove('coverage.xml')
return 1 if messages else 0
def main_check_all():
"""Check the coverage for all files individually.
This makes sure the files have 100% coverage without running unrelated
tests.
This runs pytest with the used executable, so check_coverage.py should be
called with something like ./.tox/py36/bin/python.
"""
for test_file, src_file in PERFECT_FILES:
if test_file is None:
continue
subprocess.run(
[sys.executable, '-m', 'pytest', '--cov', 'qutebrowser',
'--cov-report', 'xml', test_file], check=True)
with open('coverage.xml', encoding='utf-8') as f:
messages = check(f, [(test_file, src_file)])
os.remove('coverage.xml')
messages = [msg for msg in messages
if msg.typ == MsgType.insufficient_coverage]
if messages:
for msg in messages:
print(msg.text)
return 1
else:
print("Check ok!")
return 0
def main():
scriptutils.change_cwd()
if '--check-all' in sys.argv:
return main_check_all()
else:
return main_check()
if __name__ == '__main__':
sys.exit(main())
|
from pathlib import Path
import pytest
from numpy.testing import assert_array_almost_equal
from mne.io import read_raw_nihon, read_raw_edf
from mne.io.tests.test_raw import _test_raw_reader
from mne.utils import run_tests_if_main
from mne.datasets.testing import data_path, requires_testing_data
from mne.io.nihon.nihon import (_read_nihon_header, _read_nihon_metadata,
_read_nihon_annotations)
@requires_testing_data
def test_nihon_eeg():
"""Test reading Nihon Kohden EEG files."""
fname = Path(data_path()) / 'NihonKohden' / 'MB0400FU.EEG'
raw = read_raw_nihon(fname.as_posix(), preload=True)
assert 'RawNihon' in repr(raw)
_test_raw_reader(read_raw_nihon, fname=fname, test_scaling=False)
fname_edf = Path(data_path()) / 'NihonKohden' / 'MB0400FU.EDF'
raw_edf = read_raw_edf(fname_edf, preload=True)
assert raw._data.shape == raw_edf._data.shape
assert raw.info['sfreq'] == raw.info['sfreq']
# ch names and order are switched in the EDF
edf_ch_names = {x: x.split(' ')[1].replace('-Ref', '')
for x in raw_edf.ch_names}
raw_edf.rename_channels(edf_ch_names)
assert raw.ch_names == raw_edf.ch_names
for i, an1 in enumerate(raw.annotations):
# EDF has some weird annotations, which are not in the LOG file
an2 = raw_edf.annotations[i * 2 + 1]
assert an1['onset'] == an2['onset']
assert an1['duration'] == an2['duration']
# Also, it prepends 'Segment: ' to some annotations
t_desc = an2['description'].replace('Segment: ', '')
assert an1['description'] == t_desc
assert_array_almost_equal(raw._data, raw_edf._data)
with pytest.raises(ValueError, match='Not a valid Nihon Kohden EEG file'):
raw = read_raw_nihon(fname_edf, preload=True)
with pytest.raises(ValueError, match='Not a valid Nihon Kohden EEG file'):
raw = _read_nihon_header(fname_edf)
bad_fname = Path(data_path()) / 'eximia' / 'text_eximia.nxe'
msg = 'No PNT file exists. Metadata will be blank'
with pytest.warns(RuntimeWarning, match=msg):
meta = _read_nihon_metadata(bad_fname)
assert len(meta) == 0
msg = 'No LOG file exists. Annotations will not be read'
with pytest.warns(RuntimeWarning, match=msg):
annot = _read_nihon_annotations(bad_fname, orig_time=None)
assert annot is None
run_tests_if_main()
|
import datetime
import fnmatch
import json
import logging
import time
from copy import deepcopy
import pkg_resources
from yandextank.plugins.DataUploader.client import LPRequisites
from ...common.interfaces import MonitoringDataListener, AbstractInfoWidget, MonitoringPlugin
from ...common.util import expand_to_seconds, read_resource
from ..Autostop import Plugin as AutostopPlugin, AbstractCriterion
from ..Console import Plugin as ConsolePlugin
from ..Telegraf.collector import MonitoringCollector
from configparser import NoOptionError
logger = logging.getLogger(__name__)
class Plugin(MonitoringPlugin):
""" resource mon plugin """
SECTION = 'telegraf' # may be redefined to 'monitoring' sometimes.
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
self.jobno = None
self.default_target = None
self.default_config_path = pkg_resources.resource_filename('yandextank.plugins.Telegraf',
"config/monitoring_default_config.xml")
self.process = None
self.monitoring = MonitoringCollector(
disguise_hostnames=self.get_option('disguise_hostnames'),
kill_old=self.get_option('kill_old'))
self.die_on_fail = True
self.data_file = None
self.mon_saver = None
self._config = None
@staticmethod
def get_key():
return __file__
def start_test(self):
if self.monitoring:
self.monitoring.load_start_time = time.time()
logger.debug(
"load_start_time = %s", self.monitoring.load_start_time)
def get_available_options(self):
return [
"config",
"default_target",
"ssh_timeout",
"disguise_hostnames"
]
def __detect_configuration(self):
"""
we need to be flexible in order to determine which plugin's configuration
specified and make appropriate configs to metrics collector
:return: SECTION name or None for defaults
"""
try:
is_telegraf = self.core.get_option('telegraf', "config")
except KeyError:
is_telegraf = None
try:
is_monitoring = self.core.get_option('monitoring', "config")
except KeyError:
is_monitoring = None
if is_telegraf and is_monitoring:
raise ValueError(
'Both telegraf and monitoring configs specified. '
'Clean up your config and delete one of them')
if is_telegraf and not is_monitoring:
return 'telegraf'
if not is_telegraf and is_monitoring:
return 'monitoring'
if not is_telegraf and not is_monitoring:
# defaults target logic
try:
is_telegraf_dt = self.core.get_option('telegraf')
except NoOptionError:
is_telegraf_dt = None
try:
is_monitoring_dt = self.core.get_option('monitoring')
except BaseException:
is_monitoring_dt = None
if is_telegraf_dt and is_monitoring_dt:
raise ValueError(
'Both telegraf and monitoring default targets specified. '
'Clean up your config and delete one of them')
if is_telegraf_dt and not is_monitoring_dt:
return
if not is_telegraf_dt and is_monitoring_dt:
self.core.set_option(
"telegraf", "default_target", is_monitoring_dt)
if not is_telegraf_dt and not is_monitoring_dt:
return
@property
def config(self):
"""
:rtype: str
"""
if self._config is None:
value = self.get_option('config')
if value.lower() == "none":
self.monitoring = None
self.die_on_fail = False
self._config = value
# handle http/https url or file path
else:
if value.startswith("<"):
config_contents = value
elif value.lower() == "auto":
self.die_on_fail = False
config_contents = read_resource(self.default_config_path)
else:
config_contents = read_resource(value)
self._config = self._save_config_contents(config_contents)
return self._config
def _save_config_contents(self, contents):
xmlfile = self.core.mkstemp(".xml", "monitoring_")
self.core.add_artifact_file(xmlfile)
with open(xmlfile, "w") as f:
f.write(contents)
return xmlfile
def configure(self):
self.detected_conf = self.__detect_configuration()
if self.detected_conf:
logger.info(
'Detected monitoring configuration: %s', self.detected_conf)
self.SECTION = self.detected_conf
self.default_target = self.get_option("default_target", "localhost")
if self.config.lower() == "none":
self.monitoring = None
self.die_on_fail = False
return
with open(self.config) as f:
self.core.add_artifact_to_send(LPRequisites.MONITORING, str(f.read()))
# FIXME [legacy] backward compatibility with Monitoring module
# configuration below.
self.monitoring.ssh_timeout = expand_to_seconds(
self.get_option("ssh_timeout", "5s"))
try:
autostop = self.core.get_plugin_of_type(AutostopPlugin)
autostop.add_criterion_class(MetricHigherCriterion)
autostop.add_criterion_class(MetricLowerCriterion)
except KeyError:
logger.debug(
"No autostop plugin found, not adding instances criterion")
def prepare_test(self):
if not self.config or self.config.lower() == 'none':
return
if "Phantom" in self.core.job.generator_plugin.__module__:
phantom = self.core.job.generator_plugin
info = phantom.get_info()
if info:
self.default_target = info.address
logger.debug(
"Changed monitoring target to %s", self.default_target)
self.monitoring.config = self.config
if self.default_target:
self.monitoring.default_target = self.default_target
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except Exception as ex:
logger.debug("Console not found: %s", ex)
console = None
if console:
widget = MonitoringWidget(self)
console.add_info_widget(widget)
self.monitoring.add_listener(widget)
try:
self.monitoring.prepare()
self.monitoring.start()
self.add_cleanup(self.monitoring.stop)
count = 0
while not self.monitoring.first_data_received and count < 15 * 5:
time.sleep(0.2)
self.monitoring.poll()
count += 1
except BaseException:
logger.error("Could not start monitoring", exc_info=True)
if self.die_on_fail:
raise
else:
self.monitoring = None
def add_listener(self, plugin):
return self.monitoring.add_listener(plugin)
def is_test_finished(self):
if self.monitoring:
monitoring_data = self.monitoring.poll()
logger.debug("Monitoring got %s lines", len(monitoring_data))
self.core.publish_monitoring_data(monitoring_data)
return -1
def end_test(self, retcode):
logger.info("Finishing monitoring")
if self.monitoring:
self.monitoring.stop()
for log in self.monitoring.artifact_files:
self.core.add_artifact_file(log)
self.core.publish_monitoring_data(self.monitoring.get_rest_data())
if self.mon_saver:
self.mon_saver.close()
return retcode
def post_process(self, retcode):
return retcode
class SaveMonToFile(MonitoringDataListener):
"""
Default listener - saves data to file
"""
def __init__(self, out_file):
MonitoringDataListener.__init__(self)
if out_file:
self.store = open(out_file, 'w')
def monitoring_data(self, data):
self.store.write(json.dumps(data))
self.store.write('\n')
self.store.flush()
def close(self):
""" close open files """
logger.debug("Closing monitoring file")
if self.store:
self.store.close()
class MonitoringWidget(AbstractInfoWidget, MonitoringDataListener):
"""
Screen widget
"""
def __init__(self, owner):
AbstractInfoWidget.__init__(self)
self.owner = owner
self.data = {}
self.sign = {}
self.time = {}
self.max_metric_len = 0
def get_index(self):
return 50
def __handle_data_items(self, host, data):
""" store metric in data tree and calc offset signs
sign < 0 is CYAN, means metric value is lower then previous,
sign > 1 is YELLOW, means metric value is higher then prevoius,
sign == 0 is WHITE, means initial or equal metric value
"""
for metric, value in data.items():
if value == '':
self.sign[host][metric] = -1
self.data[host][metric] = value
else:
if not self.data[host].get(metric, None):
self.sign[host][metric] = 1
elif float(value) > float(self.data[host][metric]):
self.sign[host][metric] = 1
elif float(value) < float(self.data[host][metric]):
self.sign[host][metric] = -1
else:
self.sign[host][metric] = 0
self.data[host][metric] = "%.2f" % float(value)
def monitoring_data(self, block):
# block sample :
# [{'timestamp': 1480536634,
# 'data': {
# 'some.hostname.tld': {
# 'comment': '',
# 'metrics': {
# 'custom:diskio_reads': 0,
# 'Net_send': 9922,
# 'CPU_steal': 0,
# 'Net_recv': 8489
# }
# }
# },
# ...
# }]
for chunk in block:
host = next(iter(chunk['data'].keys()))
self.time[host] = chunk['timestamp']
# if initial call, we create dicts w/ data and `signs`
# `signs` used later to paint metrics w/ different colors
if not self.data.get(host, None):
self.data[host] = {}
self.sign[host] = {}
for key, value in chunk['data'][host]['metrics'].items():
self.sign[host][key] = 0
self.data[host][key] = value
else:
self.__handle_data_items(host, chunk['data'][host]['metrics'])
def render(self, screen):
if not self.owner.monitoring:
return "Monitoring is " + screen.markup.RED + "offline" + screen.markup.RESET
else:
res = "Monitoring is " + screen.markup.GREEN + \
"online" + screen.markup.RESET + ":\n"
for hostname, metrics in self.data.items():
tm_stamp = datetime.datetime.fromtimestamp(
float(self.time[hostname])).strftime('%H:%M:%S')
res += (" " + screen.markup.CYAN + "%s" + screen.markup.RESET + " at %s:\n") % (hostname, tm_stamp)
for metric, value in sorted(metrics.items()):
if self.sign[hostname][metric] > 0:
value = screen.markup.YELLOW + value + screen.markup.RESET
elif self.sign[hostname][metric] < 0:
value = screen.markup.CYAN + value + screen.markup.RESET
res += " %s%s: %s\n" % (
' ' * (self.max_metric_len - len(metric)),
metric.replace('custom:', '').replace('_', ' '), value)
return res.strip()
class AbstractMetricCriterion(AbstractCriterion, MonitoringDataListener):
""" Parent class for metric criterion """
def __init__(self, autostop, param_str):
AbstractCriterion.__init__(self)
try:
self.mon = autostop.core.get_plugin_of_type(Plugin)
if self.mon.monitoring:
self.mon.monitoring.add_listener(self)
except KeyError:
logger.warning("No monitoring module, mon autostop disabled")
self.triggered = False
self.autostop = autostop
self.host = param_str.split(',')[0].strip()
self.metric = param_str.split(',')[1].strip()
self.value_limit = float(param_str.split(',')[2])
self.seconds_limit = expand_to_seconds(param_str.split(',')[3])
self.last_second = None
self.seconds_count = 0
def monitoring_data(self, _block):
if self.triggered:
return
block = deepcopy(_block)
for chunk in block:
host = next(iter(chunk['data'].keys()))
data = chunk['data'][host]['metrics']
if not fnmatch.fnmatch(host, self.host):
continue
# some magic, converting custom metric names into names that was in
# config
for metric_name in tuple(data.keys()):
if metric_name.startswith('custom:'):
config_metric_name = metric_name.replace('custom:', '')
data[config_metric_name] = data.pop(metric_name)
if self.metric not in data or not data[self.metric]:
data[self.metric] = 0
logger.debug(
"Compare %s %s/%s=%s to %s",
self.get_type_string(), host, self.metric, data[self.metric],
self.value_limit)
if self.comparison_fn(float(data[self.metric]), self.value_limit):
if not self.seconds_count:
self.cause_second = self.last_second
logger.debug(self.explain())
self.seconds_count += 1
if self.seconds_count >= self.seconds_limit:
logger.debug("Triggering autostop")
self.triggered = True
return
else:
self.seconds_count = 0
def notify(self, data, stat):
if self.seconds_count:
self.autostop.add_counting(self)
self.last_second = (data, stat)
return self.triggered
def comparison_fn(self, arg1, arg2):
""" comparison function """
raise NotImplementedError()
class MetricHigherCriterion(AbstractMetricCriterion):
""" trigger if metric is higher than limit """
def __init__(self, autostop, param_str):
AbstractMetricCriterion.__init__(self, autostop, param_str)
def get_rc(self):
return 31
@staticmethod
def get_type_string():
return 'metric_higher'
def explain(self):
items = (self.host, self.metric, self.value_limit, self.seconds_count)
return "%s/%s metric value is higher than %s for %s seconds" % items
def widget_explain(self):
items = (
self.host, self.metric, self.value_limit, self.seconds_count,
self.seconds_limit)
return "%s/%s > %s for %s/%ss" % items, float(
self.seconds_count) / self.seconds_limit
def comparison_fn(self, arg1, arg2):
return arg1 > arg2
class MetricLowerCriterion(AbstractMetricCriterion):
""" trigger if metric is lower than limit """
def __init__(self, autostop, param_str):
AbstractMetricCriterion.__init__(self, autostop, param_str)
def get_rc(self):
return 32
@staticmethod
def get_type_string():
return 'metric_lower'
def explain(self):
items = (self.host, self.metric, self.value_limit, self.seconds_count)
return "%s/%s metric value is lower than %s for %s seconds" % items
def widget_explain(self):
items = (
self.host, self.metric, self.value_limit, self.seconds_count,
self.seconds_limit)
return "%s/%s < %s for %s/%ss" % items, float(
self.seconds_count) / self.seconds_limit
def comparison_fn(self, arg1, arg2):
return arg1 < arg2
|
import json
import logging
from typing import Callable, Dict, Tuple
from aiohttp.web import Response, json_response
from nacl.encoding import Base64Encoder
from nacl.secret import SecretBox
from homeassistant.const import CONTENT_TYPE_JSON, HTTP_BAD_REQUEST, HTTP_OK
from homeassistant.core import Context
from homeassistant.helpers.json import JSONEncoder
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_APP_DATA,
ATTR_APP_ID,
ATTR_APP_NAME,
ATTR_APP_VERSION,
ATTR_DEVICE_ID,
ATTR_DEVICE_NAME,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_OS_VERSION,
ATTR_SUPPORTS_ENCRYPTION,
CONF_SECRET,
CONF_USER_ID,
DATA_BINARY_SENSOR,
DATA_DELETED_IDS,
DATA_SENSOR,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
def setup_decrypt() -> Tuple[int, Callable]:
"""Return decryption function and length of key.
Async friendly.
"""
def decrypt(ciphertext, key):
"""Decrypt ciphertext using key."""
return SecretBox(key).decrypt(ciphertext, encoder=Base64Encoder)
return (SecretBox.KEY_SIZE, decrypt)
def setup_encrypt() -> Tuple[int, Callable]:
"""Return encryption function and length of key.
Async friendly.
"""
def encrypt(ciphertext, key):
"""Encrypt ciphertext using key."""
return SecretBox(key).encrypt(ciphertext, encoder=Base64Encoder)
return (SecretBox.KEY_SIZE, encrypt)
def _decrypt_payload(key: str, ciphertext: str) -> Dict[str, str]:
"""Decrypt encrypted payload."""
try:
keylen, decrypt = setup_decrypt()
except OSError:
_LOGGER.warning("Ignoring encrypted payload because libsodium not installed")
return None
if key is None:
_LOGGER.warning("Ignoring encrypted payload because no decryption key known")
return None
key = key.encode("utf-8")
key = key[:keylen]
key = key.ljust(keylen, b"\0")
try:
message = decrypt(ciphertext, key)
message = json.loads(message.decode("utf-8"))
_LOGGER.debug("Successfully decrypted mobile_app payload")
return message
except ValueError:
_LOGGER.warning("Ignoring encrypted payload because unable to decrypt")
return None
def registration_context(registration: Dict) -> Context:
"""Generate a context from a request."""
return Context(user_id=registration[CONF_USER_ID])
def empty_okay_response(headers: Dict = None, status: int = HTTP_OK) -> Response:
"""Return a Response with empty JSON object and a 200."""
return Response(
text="{}", status=status, content_type=CONTENT_TYPE_JSON, headers=headers
)
def error_response(
code: str, message: str, status: int = HTTP_BAD_REQUEST, headers: dict = None
) -> Response:
"""Return an error Response."""
return json_response(
{"success": False, "error": {"code": code, "message": message}},
status=status,
headers=headers,
)
def supports_encryption() -> bool:
"""Test if we support encryption."""
try:
import nacl # noqa: F401 pylint: disable=unused-import, import-outside-toplevel
return True
except OSError:
return False
def safe_registration(registration: Dict) -> Dict:
"""Return a registration without sensitive values."""
# Sensitive values: webhook_id, secret, cloudhook_url
return {
ATTR_APP_DATA: registration[ATTR_APP_DATA],
ATTR_APP_ID: registration[ATTR_APP_ID],
ATTR_APP_NAME: registration[ATTR_APP_NAME],
ATTR_APP_VERSION: registration[ATTR_APP_VERSION],
ATTR_DEVICE_NAME: registration[ATTR_DEVICE_NAME],
ATTR_MANUFACTURER: registration[ATTR_MANUFACTURER],
ATTR_MODEL: registration[ATTR_MODEL],
ATTR_OS_VERSION: registration[ATTR_OS_VERSION],
ATTR_SUPPORTS_ENCRYPTION: registration[ATTR_SUPPORTS_ENCRYPTION],
}
def savable_state(hass: HomeAssistantType) -> Dict:
"""Return a clean object containing things that should be saved."""
return {
DATA_BINARY_SENSOR: hass.data[DOMAIN][DATA_BINARY_SENSOR],
DATA_DELETED_IDS: hass.data[DOMAIN][DATA_DELETED_IDS],
DATA_SENSOR: hass.data[DOMAIN][DATA_SENSOR],
}
def webhook_response(
data, *, registration: Dict, status: int = HTTP_OK, headers: Dict = None
) -> Response:
"""Return a encrypted response if registration supports it."""
data = json.dumps(data, cls=JSONEncoder)
if registration[ATTR_SUPPORTS_ENCRYPTION]:
keylen, encrypt = setup_encrypt()
key = registration[CONF_SECRET].encode("utf-8")
key = key[:keylen]
key = key.ljust(keylen, b"\0")
enc_data = encrypt(data.encode("utf-8"), key).decode("utf-8")
data = json.dumps({"encrypted": True, "encrypted_data": enc_data})
return Response(
text=data, status=status, content_type=CONTENT_TYPE_JSON, headers=headers
)
def device_info(registration: Dict) -> Dict:
"""Return the device info for this registration."""
return {
"identifiers": {(DOMAIN, registration[ATTR_DEVICE_ID])},
"manufacturer": registration[ATTR_MANUFACTURER],
"model": registration[ATTR_MODEL],
"device_name": registration[ATTR_DEVICE_NAME],
"sw_version": registration[ATTR_OS_VERSION],
}
|
import time
from functools import wraps
import django
import tablib
from memory_profiler import memory_usage
from import_export import resources
from import_export.instance_loaders import CachedInstanceLoader
django.setup()
from core.models import Book # isort:skip
NUM_ROWS = 250
class _BookResource(resources.ModelResource):
class Meta:
model = Book
fields = ('id', 'name', 'author_email', 'price')
use_bulk = True
batch_size = 1000
skip_diff = True
force_init_instance = True
instance_loader_class = CachedInstanceLoader
def profile(fn):
@wraps(fn)
def inner(*args, **kwargs):
fn_kwargs_str = ', '.join(f'{k}={v}' for k, v in kwargs.items())
print(f'\n{fn.__name__}({fn_kwargs_str})')
# Measure time
t = time.perf_counter()
retval = fn(*args, **kwargs)
elapsed = time.perf_counter() - t
print(f'Time {elapsed:0.4}')
# Measure memory
mem, retval = memory_usage((fn, args, kwargs), retval=True, timeout=200, interval=1e-7)
print(f'Memory {max(mem) - min(mem)}')
return retval
return inner
@profile
def do_import(resource, dataset):
resource.import_data(dataset)
def do_create():
rows = [('', 'Some new book', '[email protected]', '10.25')] * NUM_ROWS
dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price'])
book_resource = _BookResource()
do_import(book_resource, dataset)
assert Book.objects.count() == NUM_ROWS * 2
Book.objects.all().delete()
def do_update():
rows = [('', 'Some new book', '[email protected]', '10.25')] * NUM_ROWS
books = [Book(name=r[1], author_email=r[2], price=r[3]) for r in rows]
Book.objects.bulk_create(books)
assert NUM_ROWS == Book.objects.count()
# deletes - there must be existing rows in the DB...
# i.e. so they can be deleted
all_books = Book.objects.all()
rows = [(b.id, b.name, b.author_email, b.price) for b in all_books]
# Add this line in order to perform bulk delete
dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price'])
book_resource = _BookResource()
do_import(book_resource, dataset)
assert Book.objects.count() == NUM_ROWS
Book.objects.all().delete()
def do_delete():
# Run this twice - once for duration and once for memory counts
# comment out the lines in profile() as appropriate
class _BookResource(resources.ModelResource):
def for_delete(self, row, instance):
return True
class Meta:
model = Book
fields = ('id', 'name', 'author_email', 'price')
use_bulk = True
batch_size = 1000
skip_diff = True
instance_loader_class = CachedInstanceLoader
rows = [('', 'Some new book', '[email protected]', '10.25')] * NUM_ROWS
books = [Book(name=r[1], author_email=r[2], price=r[3]) for r in rows]
Book.objects.bulk_create(books)
assert NUM_ROWS == Book.objects.count()
# deletes - there must be existing rows in the DB...
# i.e. so they can be deleted
all_books = Book.objects.all()
rows = [(b.id, b.name, b.author_email, b.price) for b in all_books]
dataset = tablib.Dataset(*rows, headers=['id', 'name', 'author_email', 'price'])
book_resource = _BookResource()
do_import(book_resource, dataset)
def main():
do_create()
#do_update()
#do_delete()
if __name__ == "__main__":
main()
|
import numpy as np
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_equal, assert_allclose, assert_array_less)
import pytest
from mne import create_info, EpochsArray
from mne.fixes import is_regressor, is_classifier
from mne.utils import requires_sklearn, requires_version
from mne.decoding.base import (_get_inverse_funcs, LinearModel, get_coef,
cross_val_multiscore, BaseEstimator)
from mne.decoding.search_light import SlidingEstimator
from mne.decoding import (Scaler, TransformerMixin, Vectorizer,
GeneralizingEstimator)
def _make_data(n_samples=1000, n_features=5, n_targets=3):
"""Generate some testing data.
Parameters
----------
n_samples : int
The number of samples.
n_features : int
The number of features.
n_targets : int
The number of targets.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The measured data.
Y : ndarray, shape (n_samples, n_targets)
The latent variables generating the data.
A : ndarray, shape (n_features, n_targets)
The forward model, mapping the latent variables (=Y) to the measured
data (=X).
"""
# Define Y latent factors
np.random.seed(0)
cov_Y = np.eye(n_targets) * 10 + np.random.rand(n_targets, n_targets)
cov_Y = (cov_Y + cov_Y.T) / 2.
mean_Y = np.random.rand(n_targets)
Y = np.random.multivariate_normal(mean_Y, cov_Y, size=n_samples)
# The Forward model
A = np.random.randn(n_features, n_targets)
X = Y.dot(A.T)
X += np.random.randn(n_samples, n_features) # add noise
X += np.random.rand(n_features) # Put an offset
return X, Y, A
@requires_sklearn
def test_get_coef():
"""Test getting linear coefficients (filters/patterns) from estimators."""
from sklearn.base import TransformerMixin, BaseEstimator
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn import svm
from sklearn.linear_model import Ridge
from sklearn.model_selection import GridSearchCV
lm_classification = LinearModel()
assert (is_classifier(lm_classification))
lm_regression = LinearModel(Ridge())
assert (is_regressor(lm_regression))
parameters = {'kernel': ['linear'], 'C': [1, 10]}
lm_gs_classification = LinearModel(
GridSearchCV(svm.SVC(), parameters, cv=2, refit=True, n_jobs=1))
assert (is_classifier(lm_gs_classification))
lm_gs_regression = LinearModel(
GridSearchCV(svm.SVR(), parameters, cv=2, refit=True, n_jobs=1))
assert (is_regressor(lm_gs_regression))
# Define a classifier, an invertible transformer and an non-invertible one.
class Clf(BaseEstimator):
def fit(self, X, y):
return self
class NoInv(TransformerMixin):
def fit(self, X, y):
return self
def transform(self, X):
return X
class Inv(NoInv):
def inverse_transform(self, X):
return X
X, y, A = _make_data(n_samples=1000, n_features=3, n_targets=1)
# I. Test inverse function
# Check that we retrieve the right number of inverse functions even if
# there are nested pipelines
good_estimators = [
(1, make_pipeline(Inv(), Clf())),
(2, make_pipeline(Inv(), Inv(), Clf())),
(3, make_pipeline(Inv(), make_pipeline(Inv(), Inv()), Clf())),
]
for expected_n, est in good_estimators:
est.fit(X, y)
assert (expected_n == len(_get_inverse_funcs(est)))
bad_estimators = [
Clf(), # no preprocessing
Inv(), # final estimator isn't classifier
make_pipeline(NoInv(), Clf()), # first step isn't invertible
make_pipeline(Inv(), make_pipeline(
Inv(), NoInv()), Clf()), # nested step isn't invertible
]
for est in bad_estimators:
est.fit(X, y)
invs = _get_inverse_funcs(est)
assert_equal(invs, list())
# II. Test get coef for classification/regression estimators and pipelines
rng = np.random.RandomState(0)
for clf in (lm_regression,
lm_gs_classification,
make_pipeline(StandardScaler(), lm_classification),
make_pipeline(StandardScaler(), lm_gs_regression)):
# generate some categorical/continuous data
# according to the type of estimator.
if is_classifier(clf):
n, n_features = 1000, 3
X = rng.rand(n, n_features)
y = np.arange(n) % 2
else:
X, y, A = _make_data(n_samples=1000, n_features=3, n_targets=1)
y = np.ravel(y)
clf.fit(X, y)
# Retrieve final linear model
filters = get_coef(clf, 'filters_', False)
if hasattr(clf, 'steps'):
if hasattr(clf.steps[-1][-1].model, 'best_estimator_'):
# Linear Model with GridSearchCV
coefs = clf.steps[-1][-1].model.best_estimator_.coef_
else:
# Standard Linear Model
coefs = clf.steps[-1][-1].model.coef_
else:
if hasattr(clf.model, 'best_estimator_'):
# Linear Model with GridSearchCV
coefs = clf.model.best_estimator_.coef_
else:
# Standard Linear Model
coefs = clf.model.coef_
if coefs.ndim == 2 and coefs.shape[0] == 1:
coefs = coefs[0]
assert_array_equal(filters, coefs)
patterns = get_coef(clf, 'patterns_', False)
assert (filters[0] != patterns[0])
n_chans = X.shape[1]
assert_array_equal(filters.shape, patterns.shape, [n_chans, n_chans])
# Inverse transform linear model
filters_inv = get_coef(clf, 'filters_', True)
assert (filters[0] != filters_inv[0])
patterns_inv = get_coef(clf, 'patterns_', True)
assert (patterns[0] != patterns_inv[0])
class _Noop(BaseEstimator, TransformerMixin):
def fit(self, X, y=None):
return self
def transform(self, X):
return X.copy()
inverse_transform = transform
@requires_sklearn
@pytest.mark.parametrize('inverse', (True, False))
@pytest.mark.parametrize('Scale, kwargs', [
(Scaler, dict(info=None, scalings='mean')),
(_Noop, dict()),
])
def test_get_coef_inverse_transform(inverse, Scale, kwargs):
"""Test get_coef with and without inverse_transform."""
from sklearn.linear_model import Ridge
from sklearn.pipeline import make_pipeline
lm_regression = LinearModel(Ridge())
X, y, A = _make_data(n_samples=1000, n_features=3, n_targets=1)
# Check with search_light and combination of preprocessing ending with sl:
# slider = SlidingEstimator(make_pipeline(StandardScaler(), lm_regression))
# XXX : line above should work but does not as only last step is
# used in get_coef ...
slider = SlidingEstimator(make_pipeline(lm_regression))
X = np.transpose([X, -X], [1, 2, 0]) # invert X across 2 time samples
clf = make_pipeline(Scale(**kwargs), slider)
clf.fit(X, y)
patterns = get_coef(clf, 'patterns_', inverse)
filters = get_coef(clf, 'filters_', inverse)
assert_array_equal(filters.shape, patterns.shape, X.shape[1:])
# the two time samples get inverted patterns
assert_equal(patterns[0, 0], -patterns[0, 1])
for t in [0, 1]:
filters_t = get_coef(
clf.named_steps['slidingestimator'].estimators_[t],
'filters_', False)
if Scale is _Noop:
assert_array_equal(filters_t, filters[:, t])
@requires_sklearn
@pytest.mark.parametrize('n_features', [1, 5])
@pytest.mark.parametrize('n_targets', [1, 3])
def test_get_coef_multiclass(n_features, n_targets):
"""Test get_coef on multiclass problems."""
# Check patterns with more than 1 regressor
from sklearn.linear_model import LinearRegression, Ridge
from sklearn.pipeline import make_pipeline
X, Y, A = _make_data(
n_samples=30000, n_features=n_features, n_targets=n_targets)
lm = LinearModel(LinearRegression()).fit(X, Y)
assert_array_equal(lm.filters_.shape, lm.patterns_.shape)
if n_targets == 1:
want_shape = (n_features,)
else:
want_shape = (n_targets, n_features)
assert_array_equal(lm.filters_.shape, want_shape)
if n_features > 1 and n_targets > 1:
assert_array_almost_equal(A, lm.patterns_.T, decimal=2)
lm = LinearModel(Ridge(alpha=0))
clf = make_pipeline(lm)
clf.fit(X, Y)
if n_features > 1 and n_targets > 1:
assert_allclose(A, lm.patterns_.T, atol=2e-2)
coef = get_coef(clf, 'patterns_', inverse_transform=True)
assert_allclose(lm.patterns_, coef, atol=1e-5)
# With epochs, scaler, and vectorizer (typical use case)
X_epo = X.reshape(X.shape + (1,))
info = create_info(n_features, 1000., 'eeg')
lm = LinearModel(Ridge(alpha=1))
clf = make_pipeline(
Scaler(info, scalings=dict(eeg=1.)), # XXX adding this step breaks
Vectorizer(),
lm,
)
clf.fit(X_epo, Y)
if n_features > 1 and n_targets > 1:
assert_allclose(A, lm.patterns_.T, atol=2e-2)
coef = get_coef(clf, 'patterns_', inverse_transform=True)
lm_patterns_ = lm.patterns_[..., np.newaxis]
assert_allclose(lm_patterns_, coef, atol=1e-5)
# Check can pass fitting parameters
lm.fit(X, Y, sample_weight=np.ones(len(Y)))
@requires_version('sklearn', '0.22') # roc_auc_ovr_weighted
@pytest.mark.parametrize('n_classes, n_channels, n_times', [
(4, 10, 2),
(4, 3, 2),
(3, 2, 1),
(3, 1, 2),
])
def test_get_coef_multiclass_full(n_classes, n_channels, n_times):
"""Test a full example with pattern extraction."""
from sklearn.pipeline import make_pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import StratifiedKFold
data = np.zeros((10 * n_classes, n_channels, n_times))
# Make only the first channel informative
for ii in range(n_classes):
data[ii * 10:(ii + 1) * 10, 0] = ii
events = np.zeros((len(data), 3), int)
events[:, 0] = np.arange(len(events))
events[:, 2] = data[:, 0, 0]
info = create_info(n_channels, 1000., 'eeg')
epochs = EpochsArray(data, info, events, tmin=0)
clf = make_pipeline(
Scaler(epochs.info), Vectorizer(),
LinearModel(LogisticRegression(random_state=0, multi_class='ovr')),
)
scorer = 'roc_auc_ovr_weighted'
time_gen = GeneralizingEstimator(clf, scorer, verbose=True)
X = epochs.get_data()
y = epochs.events[:, 2]
n_splits = 3
cv = StratifiedKFold(n_splits=n_splits)
scores = cross_val_multiscore(time_gen, X, y, cv=cv, verbose=True)
want = (n_splits,)
if n_times > 1:
want += (n_times, n_times)
assert scores.shape == want
assert_array_less(0.8, scores)
clf.fit(X, y)
patterns = get_coef(clf, 'patterns_', inverse_transform=True)
assert patterns.shape == (n_classes, n_channels, n_times)
assert_allclose(patterns[:, 1:], 0., atol=1e-7) # no other channels useful
@requires_sklearn
def test_linearmodel():
"""Test LinearModel class for computing filters and patterns."""
# check categorical target fit in standard linear model
from sklearn.linear_model import LinearRegression
rng = np.random.RandomState(0)
clf = LinearModel()
n, n_features = 20, 3
X = rng.rand(n, n_features)
y = np.arange(n) % 2
clf.fit(X, y)
assert_equal(clf.filters_.shape, (n_features,))
assert_equal(clf.patterns_.shape, (n_features,))
with pytest.raises(ValueError):
wrong_X = rng.rand(n, n_features, 99)
clf.fit(wrong_X, y)
# check categorical target fit in standard linear model with GridSearchCV
from sklearn import svm
from sklearn.model_selection import GridSearchCV
parameters = {'kernel': ['linear'], 'C': [1, 10]}
clf = LinearModel(
GridSearchCV(svm.SVC(), parameters, cv=2, refit=True, n_jobs=1))
clf.fit(X, y)
assert_equal(clf.filters_.shape, (n_features,))
assert_equal(clf.patterns_.shape, (n_features,))
with pytest.raises(ValueError):
wrong_X = rng.rand(n, n_features, 99)
clf.fit(wrong_X, y)
# check continuous target fit in standard linear model with GridSearchCV
n_targets = 1
Y = rng.rand(n, n_targets)
clf = LinearModel(
GridSearchCV(svm.SVR(), parameters, cv=2, refit=True, n_jobs=1))
clf.fit(X, y)
assert_equal(clf.filters_.shape, (n_features, ))
assert_equal(clf.patterns_.shape, (n_features, ))
with pytest.raises(ValueError):
wrong_y = rng.rand(n, n_features, 99)
clf.fit(X, wrong_y)
# check multi-target fit in standard linear model
n_targets = 5
Y = rng.rand(n, n_targets)
clf = LinearModel(LinearRegression())
clf.fit(X, Y)
assert_equal(clf.filters_.shape, (n_targets, n_features))
assert_equal(clf.patterns_.shape, (n_targets, n_features))
with pytest.raises(ValueError):
wrong_y = rng.rand(n, n_features, 99)
clf.fit(X, wrong_y)
@requires_sklearn
def test_cross_val_multiscore():
"""Test cross_val_multiscore for computing scores on decoding over time."""
from sklearn.model_selection import KFold, StratifiedKFold, cross_val_score
from sklearn.linear_model import LogisticRegression, LinearRegression
logreg = LogisticRegression(solver='liblinear', random_state=0)
# compare to cross-val-score
X = np.random.rand(20, 3)
y = np.arange(20) % 2
cv = KFold(2, random_state=0, shuffle=True)
clf = logreg
assert_array_equal(cross_val_score(clf, X, y, cv=cv),
cross_val_multiscore(clf, X, y, cv=cv))
# Test with search light
X = np.random.rand(20, 4, 3)
y = np.arange(20) % 2
clf = SlidingEstimator(logreg, scoring='accuracy')
scores_acc = cross_val_multiscore(clf, X, y, cv=cv)
assert_array_equal(np.shape(scores_acc), [2, 3])
# check values
scores_acc_manual = list()
for train, test in cv.split(X, y):
clf.fit(X[train], y[train])
scores_acc_manual.append(clf.score(X[test], y[test]))
assert_array_equal(scores_acc, scores_acc_manual)
# check scoring metric
# raise an error if scoring is defined at cross-val-score level and
# search light, because search light does not return a 1-dimensional
# prediction.
pytest.raises(ValueError, cross_val_multiscore, clf, X, y, cv=cv,
scoring='roc_auc')
clf = SlidingEstimator(logreg, scoring='roc_auc')
scores_auc = cross_val_multiscore(clf, X, y, cv=cv, n_jobs=1)
scores_auc_manual = list()
for train, test in cv.split(X, y):
clf.fit(X[train], y[train])
scores_auc_manual.append(clf.score(X[test], y[test]))
assert_array_equal(scores_auc, scores_auc_manual)
# indirectly test that cross_val_multiscore rightly detects the type of
# estimator and generates a StratifiedKFold for classiers and a KFold
# otherwise
X = np.random.randn(1000, 3)
y = np.ones(1000, dtype=int)
y[::2] = 0
clf = logreg
reg = LinearRegression()
for cross_val in (cross_val_score, cross_val_multiscore):
manual = cross_val(clf, X, y, cv=StratifiedKFold(2))
auto = cross_val(clf, X, y, cv=2)
assert_array_equal(manual, auto)
manual = cross_val(reg, X, y, cv=KFold(2))
auto = cross_val(reg, X, y, cv=2)
assert_array_equal(manual, auto)
|
from typing import Optional
from django.core.cache import cache
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.views.decorators.cache import cache_control
from django.views.generic import TemplateView
from weblate.utils.hash import calculate_checksum
from .models import Setting
class CustomCSSView(TemplateView):
template_name = "configuration/custom.css"
cache_key = "css:custom"
@classmethod
def get_css(cls, request):
# Request level caching
if hasattr(request, "_weblate_custom_css"):
return request._weblate_custom_css
# Site level caching
css = cache.get(cls.cache_key)
if css is None:
css = render_to_string(
"configuration/custom.css",
Setting.objects.get_settings_dict(Setting.CATEGORY_UI),
).strip()
cache.set(cls.cache_key, css, 24 * 3600)
request._weblate_custom_css = css
return css
@cache_control(max_age=7200)
def get(self, request, *args, **kwargs):
return HttpResponse(content_type="text/css", content=self.get_css(request))
@classmethod
def drop_cache(cls):
cache.delete(cls.cache_key)
@classmethod
def get_hash(cls, request) -> Optional[str]:
css = cls.get_css(request)
if not css:
return None
return calculate_checksum(css)
|
import asynctest
import mock
from mock import ANY
from mock import MagicMock
from mock import patch
from pytest import fixture
from pytest import raises
from slackclient import SlackClient
from paasta_tools.cli.cmds import mark_for_deployment
class FakeArgs:
deploy_group = "test_deploy_group"
service = "test_service"
git_url = "git://false.repo/services/test_services"
commit = "d670460b4b4aece5915caf5c68d12f560a9fe3e4"
soa_dir = "fake_soa_dir"
block = False
verbose = False
auto_rollback = False
verify_image = False
timeout = 10.0
auto_certify_delay = 1.0
auto_abandon_delay = 1.0
auto_rollback_delay = 1.0
authors = None
@fixture
def mock_periodically_update_slack():
# for some reason asynctest.patch doesn't work as a decorator, so I've defined this fixture.
with asynctest.patch(
"paasta_tools.cli.cmds.mark_for_deployment.MarkForDeploymentProcess.periodically_update_slack",
autospec=True,
) as periodically_update_slack:
yield periodically_update_slack
@patch("paasta_tools.cli.cmds.mark_for_deployment._log", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log_audit", autospec=True)
@patch("paasta_tools.remote_git.create_remote_refs", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_mark_for_deployment_happy(
mock_load_system_paasta_config, mock_create_remote_refs, mock__log_audit, mock__log
):
config_mock = mock.Mock()
config_mock.get_default_push_groups.return_value = None
mock_load_system_paasta_config.return_value = config_mock
actual = mark_for_deployment.mark_for_deployment(
git_url="fake_git_url",
deploy_group="fake_deploy_group",
service="fake_service",
commit="fake_commit",
)
assert actual == 0
mock_create_remote_refs.assert_called_once_with(
git_url="fake_git_url", ref_mutator=ANY, force=True
)
mock__log_audit.assert_called_once_with(
action="mark-for-deployment",
action_details={"deploy_group": "fake_deploy_group", "commit": "fake_commit"},
service="fake_service",
)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log_audit", autospec=True)
@patch("paasta_tools.remote_git.create_remote_refs", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_mark_for_deployment_sad(
mock_load_system_paasta_config, mock_create_remote_refs, mock__log_audit, mock__log
):
config_mock = mock.Mock()
config_mock.get_default_push_groups.return_value = None
mock_load_system_paasta_config.return_value = config_mock
mock_create_remote_refs.side_effect = Exception("something bad")
with patch("time.sleep", autospec=True):
actual = mark_for_deployment.mark_for_deployment(
git_url="fake_git_url",
deploy_group="fake_deploy_group",
service="fake_service",
commit="fake_commit",
)
assert actual == 1
assert mock_create_remote_refs.call_count == 3
assert not mock__log_audit.called
@patch("paasta_tools.cli.cmds.mark_for_deployment.validate_service_name", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.is_docker_image_already_in_registry",
autospec=True,
)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.get_currently_deployed_sha",
autospec=True,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment.list_deploy_groups", autospec=True)
def test_paasta_mark_for_deployment_when_verify_image_fails(
mock_list_deploy_groups,
mock_get_currently_deployed_sha,
mock_is_docker_image_already_in_registry,
mock_validate_service_name,
):
class FakeArgsRollback(FakeArgs):
verify_image = True
mock_list_deploy_groups.return_value = ["test_deploy_groups"]
mock_is_docker_image_already_in_registry.return_value = False
with raises(ValueError):
mark_for_deployment.paasta_mark_for_deployment(FakeArgsRollback)
@patch("paasta_tools.cli.cmds.mark_for_deployment.validate_service_name", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.is_docker_image_already_in_registry",
autospec=True,
)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.get_currently_deployed_sha",
autospec=True,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment.list_deploy_groups", autospec=True)
def test_paasta_mark_for_deployment_when_verify_image_succeeds(
mock_list_deploy_groups,
mock_get_currently_deployed_sha,
mock_is_docker_image_already_in_registry,
mock_validate_service_name,
):
class FakeArgsRollback(FakeArgs):
verify_image = True
mock_list_deploy_groups.return_value = ["test_deploy_groups"]
mock_is_docker_image_already_in_registry.return_value = False
with raises(ValueError):
mark_for_deployment.paasta_mark_for_deployment(FakeArgsRollback)
mock_is_docker_image_already_in_registry.assert_called_with(
"test_service", "fake_soa_dir", "d670460b4b4aece5915caf5c68d12f560a9fe3e4"
)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.MarkForDeploymentProcess.run_timeout",
new=1.0,
autospec=False,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log_audit", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.validate_service_name", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.MarkForDeploymentProcess.do_wait_for_deployment",
autospec=True,
)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.get_currently_deployed_sha",
autospec=True,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment.list_deploy_groups", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_paasta_mark_for_deployment_with_good_rollback(
mock_load_system_paasta_config,
mock_list_deploy_groups,
mock_get_currently_deployed_sha,
mock_do_wait_for_deployment,
mock_mark_for_deployment,
mock_validate_service_name,
mock_get_slack_client,
mock__log_audit,
mock_periodically_update_slack,
):
class FakeArgsRollback(FakeArgs):
auto_rollback = True
block = True
timeout = 600
mock_list_deploy_groups.return_value = ["test_deploy_groups"]
config_mock = mock.Mock()
config_mock.get_default_push_groups.return_value = None
mock_load_system_paasta_config.return_value = config_mock
mock_mark_for_deployment.return_value = 0
def do_wait_for_deployment_side_effect(self, target_commit):
if target_commit == FakeArgs.commit:
self.trigger("rollback_button_clicked")
else:
self.trigger("deploy_finished")
mock_do_wait_for_deployment.side_effect = do_wait_for_deployment_side_effect
def on_enter_rolled_back_side_effect(self):
self.trigger("abandon_button_clicked")
mock_get_currently_deployed_sha.return_value = "old-sha"
with patch(
"paasta_tools.cli.cmds.mark_for_deployment.MarkForDeploymentProcess.on_enter_rolled_back",
autospec=True,
wraps=mark_for_deployment.MarkForDeploymentProcess.on_enter_rolled_back,
side_effect=on_enter_rolled_back_side_effect,
):
assert mark_for_deployment.paasta_mark_for_deployment(FakeArgsRollback) == 1
mock_mark_for_deployment.assert_any_call(
service="test_service",
deploy_group="test_deploy_group",
commit="d670460b4b4aece5915caf5c68d12f560a9fe3e4",
git_url="git://false.repo/services/test_services",
)
mock_mark_for_deployment.assert_any_call(
service="test_service",
deploy_group="test_deploy_group",
commit="old-sha",
git_url="git://false.repo/services/test_services",
)
assert mock_mark_for_deployment.call_count == 2
mock_do_wait_for_deployment.assert_any_call(
mock.ANY, target_commit="d670460b4b4aece5915caf5c68d12f560a9fe3e4"
)
mock_do_wait_for_deployment.assert_any_call(mock.ANY, target_commit="old-sha")
assert mock_do_wait_for_deployment.call_count == 2
# in normal usage, this would also be called once per m-f-d, but we mock that out above
# so _log_audit is only called as part of handling the rollback
assert mock__log_audit.call_count == len(mock_list_deploy_groups.return_value)
@patch("paasta_tools.remote_git.create_remote_refs", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.trigger_deploys", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_mark_for_deployment_yelpy_repo(
mock_load_system_paasta_config, mock_trigger_deploys, mock_create_remote_refs
):
config_mock = mock.Mock()
config_mock.get_default_push_groups.return_value = None
mock_load_system_paasta_config.return_value = config_mock
mark_for_deployment.mark_for_deployment(
git_url="git://false.repo.yelpcorp.com/services/test_services",
deploy_group="fake_deploy_group",
service="fake_service",
commit="fake_commit",
)
mock_trigger_deploys.assert_called_once_with(service="fake_service")
@patch("paasta_tools.remote_git.create_remote_refs", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.trigger_deploys", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_mark_for_deployment_nonyelpy_repo(
mock_load_system_paasta_config, mock_trigger_deploys, mock_create_remote_refs
):
config_mock = mock.Mock()
config_mock.get_default_push_groups.return_value = None
mock_load_system_paasta_config.return_value = config_mock
mark_for_deployment.mark_for_deployment(
git_url="git://false.repo/services/test_services",
deploy_group="fake_deploy_group",
service="fake_service",
commit="fake_commit",
)
assert not mock_trigger_deploys.called
@patch("paasta_tools.cli.cmds.mark_for_deployment._log_audit", autospec=True)
@patch("paasta_tools.remote_git.get_authors", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_MarkForDeployProcess_handles_wait_for_deployment_failure(
mock_load_system_paasta_config,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_get_slack_client,
mock_get_authors,
mock__log_audit,
):
mock_get_authors.return_value = 0, "fakeuser1 fakeuser2"
mfdp = mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=True,
auto_rollback=True,
deploy_info={
"pipeline": [{"step": "test_deploy_group", "slack_notify": False}],
"slack_channels": ["#test"],
},
deploy_group="test_deploy_group",
commit="abc123432u49",
old_git_sha="abc123455",
git_url="[email protected]:services/repo",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
)
mock_mark_for_deployment.return_value = 0
mock_wait_for_deployment.side_effect = Exception()
retval = mfdp.run()
assert mock_mark_for_deployment.call_count == 1
assert mock_wait_for_deployment.call_count == 1
assert mfdp.state == "deploy_errored"
assert retval == 2
assert not mock__log_audit.called
@patch("paasta_tools.remote_git.get_authors", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_MarkForDeployProcess_handles_first_time_deploys(
mock_load_system_paasta_config,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_get_slack_client,
mock_get_authors,
mock_periodically_update_slack,
):
mock_get_authors.return_value = 0, "fakeuser1 fakeuser2"
mfdp = mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=True,
auto_rollback=True,
deploy_info=MagicMock(),
deploy_group=None,
commit="abc123432u49",
old_git_sha=None,
git_url="[email protected]:services/repo",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
)
mock_mark_for_deployment.return_value = 0
mock_wait_for_deployment.side_effect = Exception()
retval = mfdp.run()
assert mock_mark_for_deployment.call_count == 1
assert mock_wait_for_deployment.call_count == 1
assert mfdp.state == "deploy_errored"
assert retval == 2
@patch.object(mark_for_deployment, "get_authors_to_be_notified", autospec=True)
@patch.object(mark_for_deployment, "get_currently_deployed_sha", autospec=True)
@patch.object(mark_for_deployment, "get_slack_client", autospec=True)
@patch.object(mark_for_deployment, "load_system_paasta_config", autospec=True)
def test_MarkForDeployProcess_get_authors_diffs_against_prod_deploy_group(
mock_load_system_paasta_config,
mock_get_slack_client,
mock_get_currently_deployed_sha,
mock_get_authors_to_be_notified,
):
# get_authors should calculate authors since the production_deploy_group's
# current SHA, when available.
mock_get_currently_deployed_sha.return_value = "aaaaaaaa"
mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=True,
auto_rollback=False,
deploy_info={"production_deploy_group": "prod"},
deploy_group=None,
commit="abc123512",
old_git_sha="asgdser23",
git_url="[email protected]:services/repo",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
authors=["fakeuser1"],
)
mock_get_authors_to_be_notified.assert_called_once_with(
git_url="[email protected]:services/repo",
from_sha="aaaaaaaa",
to_sha="abc123512",
authors=["fakeuser1"],
)
@patch.object(mark_for_deployment, "get_authors_to_be_notified", autospec=True)
@patch.object(mark_for_deployment, "get_currently_deployed_sha", autospec=True)
@patch.object(mark_for_deployment, "get_slack_client", autospec=True)
@patch.object(mark_for_deployment, "load_system_paasta_config", autospec=True)
def test_MarkForDeployProcess_get_authors_falls_back_to_current_deploy_group(
mock_load_system_paasta_config,
mock_get_slack_client,
mock_get_currently_deployed_sha,
mock_get_authors_to_be_notified,
):
# When there's no production_deploy_group configured, get_authors should
# fall back to calculating authors using the previous SHA for this deploy
# group.
mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=True,
auto_rollback=False,
# No production_deploy_group!
deploy_info={},
deploy_group=None,
commit="abc123512",
old_git_sha="asgdser23",
git_url="[email protected]:services/repo1",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
authors="fakeuser1",
)
mock_get_authors_to_be_notified.assert_called_once_with(
git_url="[email protected]:services/repo1",
from_sha="asgdser23",
to_sha="abc123512",
authors="fakeuser1",
)
@patch("paasta_tools.remote_git.get_authors", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_MarkForDeployProcess_handles_wait_for_deployment_cancelled(
mock_load_system_paasta_config,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_get_slack_client,
mock_get_authors,
mock_periodically_update_slack,
):
mock_get_authors.return_value = 0, "fakeuser1 fakeuser2"
mfdp = mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=True,
# For this test, auto_rollback must be True so that the deploy_cancelled trigger takes us to start_rollback
# instead of deploy_errored.
auto_rollback=True,
deploy_info=MagicMock(),
deploy_group=None,
commit="abc123512",
old_git_sha="asgdser23",
git_url="[email protected]:services/repo1",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
)
mock_mark_for_deployment.return_value = 0
mock_wait_for_deployment.side_effect = KeyboardInterrupt()
retval = mfdp.run()
assert mock_mark_for_deployment.call_count == 1
assert retval == 1
assert mfdp.state == "deploy_cancelled"
@patch("paasta_tools.remote_git.get_authors", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.Thread", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch("sticht.slack.get_slack_events", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_MarkForDeployProcess_skips_wait_for_deployment_when_block_is_False(
mock_load_system_paasta_config,
mock_get_slack_events,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_get_slack_client,
mock_Thread,
mock_get_authors,
):
mock_get_authors.return_value = 0, "fakeuser1 fakeuser2"
mfdp = mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=False,
auto_rollback=False,
deploy_info=MagicMock(),
deploy_group=None,
commit="abc123456789",
old_git_sha="oldsha1234",
git_url="[email protected]:services/repo1",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
)
mock_mark_for_deployment.return_value = 0
mock_wait_for_deployment.side_effect = Exception()
retval = mfdp.run()
assert mock_mark_for_deployment.call_count == 1
assert mock_wait_for_deployment.call_count == 0
assert retval == 0
assert mfdp.state == "deploying"
@patch("paasta_tools.remote_git.get_authors", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.get_slack_client", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.load_system_paasta_config", autospec=True
)
def test_MarkForDeployProcess_goes_to_mfd_failed_when_mark_for_deployment_fails(
mock_load_system_paasta_config,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_get_slack_client,
mock_get_authors,
mock_periodically_update_slack,
):
mock_get_authors.return_value = 0, "fakeuser1 fakeuser2"
mfdp = mark_for_deployment.MarkForDeploymentProcess(
service="service",
block=False, # shouldn't matter for this test
auto_rollback=False, # shouldn't matter for this test
deploy_info=MagicMock(),
deploy_group=None,
commit="asbjkslerj",
old_git_sha="abscerwerr",
git_url="[email protected]:services/repo1",
soa_dir=None,
timeout=None,
auto_certify_delay=1,
auto_abandon_delay=1,
auto_rollback_delay=1,
)
mock_mark_for_deployment.return_value = 1
mock_wait_for_deployment.side_effect = Exception()
retval = mfdp.run()
assert mock_mark_for_deployment.call_count == 1
assert mock_wait_for_deployment.call_count == 0
assert retval == 1
assert mfdp.state == "mfd_failed"
class WrappedMarkForDeploymentProcess(mark_for_deployment.MarkForDeploymentProcess):
def __init__(self, *args, **kwargs):
self.trigger_history = []
self.state_history = []
super().__init__(*args, **kwargs)
self.orig_trigger = self.trigger
self.trigger = self._trigger
def _trigger(self, trigger_name):
self.trigger_history.append(trigger_name)
self.orig_trigger(trigger_name)
def get_slack_client(self):
fake_slack_client = mock.MagicMock(spec=SlackClient)
fake_slack_client.api_call.return_value = {
"ok": True,
"message": {"ts": 1234531337},
"channel": "FAKE CHANNEL ID",
}
return fake_slack_client
def start_timer(self, timeout, trigger, message_verb):
return super().start_timer(0, trigger, message_verb)
def after_state_change(self, *args, **kwargs):
self.state_history.append(self.state)
super().after_state_change(*args, **kwargs)
def start_slo_watcher_threads(self, service, soa_dir):
pass
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment",
return_value=0,
autospec=True,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log", autospec=True)
def test_MarkForDeployProcess_happy_path(
mock_log,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_periodically_update_slack,
):
mock_log.return_value = None
mfdp = WrappedMarkForDeploymentProcess(
service="service",
deploy_info=MagicMock(),
deploy_group="deploy_group",
commit="commit",
old_git_sha="old_git_sha",
git_url="git_url",
auto_rollback=True,
block=True,
soa_dir="soa_dir",
timeout=3600,
auto_certify_delay=None,
auto_abandon_delay=600,
auto_rollback_delay=30,
authors=None,
)
mfdp.run_timeout = 1
assert mfdp.run() == 0
assert mfdp.trigger_history == [
"start_deploy",
"mfd_succeeded",
"deploy_finished",
"auto_certify",
]
assert mfdp.state_history == ["start_deploy", "deploying", "deployed", "complete"]
@patch(
"paasta_tools.cli.cmds.mark_for_deployment.mark_for_deployment",
return_value=0,
autospec=True,
)
@patch("paasta_tools.cli.cmds.mark_for_deployment.wait_for_deployment", autospec=True)
@patch("paasta_tools.cli.cmds.mark_for_deployment._log", autospec=True)
@patch("paasta_tools.cli.cmds.wait_for_deployment._log", autospec=True)
def test_MarkForDeployProcess_happy_path_skips_complete_if_no_auto_rollback(
mock__log1,
mock__log2,
mock_wait_for_deployment,
mock_mark_for_deployment,
mock_periodically_update_slack,
):
mock__log1.return_value = None
mock__log2.return_value = None
mfdp = WrappedMarkForDeploymentProcess(
service="service",
deploy_info=MagicMock(),
deploy_group="deploy_group",
commit="commit",
old_git_sha="old_git_sha",
git_url="git_url",
auto_rollback=False,
block=True,
soa_dir="soa_dir",
timeout=3600,
auto_certify_delay=None,
auto_abandon_delay=600,
auto_rollback_delay=30,
authors=None,
)
mfdp.run_timeout = 1
assert mfdp.run() == 0
assert mfdp.trigger_history == ["start_deploy", "mfd_succeeded", "deploy_finished"]
assert mfdp.state_history == ["start_deploy", "deploying", "deployed"]
|
from pyicloud.exceptions import PyiCloudFailedLoginException
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.icloud.config_flow import (
CONF_TRUSTED_DEVICE,
CONF_VERIFICATION_CODE,
)
from homeassistant.components.icloud.const import (
CONF_GPS_ACCURACY_THRESHOLD,
CONF_MAX_INTERVAL,
CONF_WITH_FAMILY,
DEFAULT_GPS_ACCURACY_THRESHOLD,
DEFAULT_MAX_INTERVAL,
DEFAULT_WITH_FAMILY,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import MagicMock, Mock, patch
from tests.common import MockConfigEntry
USERNAME = "[email protected]"
USERNAME_2 = "[email protected]"
PASSWORD = "password"
PASSWORD_2 = "second_password"
WITH_FAMILY = True
MAX_INTERVAL = 15
GPS_ACCURACY_THRESHOLD = 250
MOCK_CONFIG = {
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_WITH_FAMILY: DEFAULT_WITH_FAMILY,
CONF_MAX_INTERVAL: DEFAULT_MAX_INTERVAL,
CONF_GPS_ACCURACY_THRESHOLD: DEFAULT_GPS_ACCURACY_THRESHOLD,
}
TRUSTED_DEVICES = [
{"deviceType": "SMS", "areaCode": "", "phoneNumber": "*******58", "deviceId": "1"}
]
@pytest.fixture(name="service")
def mock_controller_service():
"""Mock a successful service."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService"
) as service_mock:
service_mock.return_value.requires_2sa = True
service_mock.return_value.trusted_devices = TRUSTED_DEVICES
service_mock.return_value.send_verification_code = Mock(return_value=True)
service_mock.return_value.validate_verification_code = Mock(return_value=True)
yield service_mock
@pytest.fixture(name="service_authenticated")
def mock_controller_service_authenticated():
"""Mock a successful service while already authenticate."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService"
) as service_mock:
service_mock.return_value.requires_2sa = False
service_mock.return_value.trusted_devices = TRUSTED_DEVICES
service_mock.return_value.send_verification_code = Mock(return_value=True)
service_mock.return_value.validate_verification_code = Mock(return_value=True)
yield service_mock
@pytest.fixture(name="service_authenticated_no_device")
def mock_controller_service_authenticated_no_device():
"""Mock a successful service while already authenticate, but without device."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService"
) as service_mock:
service_mock.return_value.requires_2sa = False
service_mock.return_value.trusted_devices = TRUSTED_DEVICES
service_mock.return_value.send_verification_code = Mock(return_value=True)
service_mock.return_value.validate_verification_code = Mock(return_value=True)
service_mock.return_value.devices = {}
yield service_mock
@pytest.fixture(name="service_send_verification_code_failed")
def mock_controller_service_send_verification_code_failed():
"""Mock a failed service during sending verification code step."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService"
) as service_mock:
service_mock.return_value.requires_2sa = True
service_mock.return_value.trusted_devices = TRUSTED_DEVICES
service_mock.return_value.send_verification_code = Mock(return_value=False)
yield service_mock
@pytest.fixture(name="service_validate_verification_code_failed")
def mock_controller_service_validate_verification_code_failed():
"""Mock a failed service during validation of verification code step."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService"
) as service_mock:
service_mock.return_value.requires_2sa = True
service_mock.return_value.trusted_devices = TRUSTED_DEVICES
service_mock.return_value.send_verification_code = Mock(return_value=True)
service_mock.return_value.validate_verification_code = Mock(return_value=False)
yield service_mock
async def test_user(hass: HomeAssistantType, service: MagicMock):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=None
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# test with required
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_TRUSTED_DEVICE
async def test_user_with_cookie(
hass: HomeAssistantType, service_authenticated: MagicMock
):
"""Test user config with presence of a cookie."""
# test with all provided
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_WITH_FAMILY: WITH_FAMILY,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_WITH_FAMILY] == WITH_FAMILY
assert result["data"][CONF_MAX_INTERVAL] == DEFAULT_MAX_INTERVAL
assert result["data"][CONF_GPS_ACCURACY_THRESHOLD] == DEFAULT_GPS_ACCURACY_THRESHOLD
async def test_import(hass: HomeAssistantType, service: MagicMock):
"""Test import step."""
# import with required
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "trusted_device"
# import with all
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: USERNAME_2,
CONF_PASSWORD: PASSWORD,
CONF_WITH_FAMILY: WITH_FAMILY,
CONF_MAX_INTERVAL: MAX_INTERVAL,
CONF_GPS_ACCURACY_THRESHOLD: GPS_ACCURACY_THRESHOLD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "trusted_device"
async def test_import_with_cookie(
hass: HomeAssistantType, service_authenticated: MagicMock
):
"""Test import step with presence of a cookie."""
# import with required
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_WITH_FAMILY] == DEFAULT_WITH_FAMILY
assert result["data"][CONF_MAX_INTERVAL] == DEFAULT_MAX_INTERVAL
assert result["data"][CONF_GPS_ACCURACY_THRESHOLD] == DEFAULT_GPS_ACCURACY_THRESHOLD
# import with all
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: USERNAME_2,
CONF_PASSWORD: PASSWORD,
CONF_WITH_FAMILY: WITH_FAMILY,
CONF_MAX_INTERVAL: MAX_INTERVAL,
CONF_GPS_ACCURACY_THRESHOLD: GPS_ACCURACY_THRESHOLD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME_2
assert result["title"] == USERNAME_2
assert result["data"][CONF_USERNAME] == USERNAME_2
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_WITH_FAMILY] == WITH_FAMILY
assert result["data"][CONF_MAX_INTERVAL] == MAX_INTERVAL
assert result["data"][CONF_GPS_ACCURACY_THRESHOLD] == GPS_ACCURACY_THRESHOLD
async def test_two_accounts_setup(
hass: HomeAssistantType, service_authenticated: MagicMock
):
"""Test to setup two accounts."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
unique_id=USERNAME,
).add_to_hass(hass)
# import with required
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME_2, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME_2
assert result["title"] == USERNAME_2
assert result["data"][CONF_USERNAME] == USERNAME_2
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_WITH_FAMILY] == DEFAULT_WITH_FAMILY
assert result["data"][CONF_MAX_INTERVAL] == DEFAULT_MAX_INTERVAL
assert result["data"][CONF_GPS_ACCURACY_THRESHOLD] == DEFAULT_GPS_ACCURACY_THRESHOLD
async def test_already_setup(hass: HomeAssistantType):
"""Test we abort if the account is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
unique_id=USERNAME,
).add_to_hass(hass)
# Should fail, same USERNAME (import)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same USERNAME (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_login_failed(hass: HomeAssistantType):
"""Test when we have errors during login."""
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService.authenticate",
side_effect=PyiCloudFailedLoginException(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_PASSWORD: "invalid_auth"}
async def test_no_device(
hass: HomeAssistantType, service_authenticated_no_device: MagicMock
):
"""Test when we have no devices."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_device"
async def test_trusted_device(hass: HomeAssistantType, service: MagicMock):
"""Test trusted_device step."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_TRUSTED_DEVICE
async def test_trusted_device_success(hass: HomeAssistantType, service: MagicMock):
"""Test trusted_device step success."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_TRUSTED_DEVICE: 0}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_VERIFICATION_CODE
async def test_send_verification_code_failed(
hass: HomeAssistantType, service_send_verification_code_failed: MagicMock
):
"""Test when we have errors during send_verification_code."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_TRUSTED_DEVICE: 0}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_TRUSTED_DEVICE
assert result["errors"] == {CONF_TRUSTED_DEVICE: "send_verification_code"}
async def test_verification_code(hass: HomeAssistantType, service: MagicMock):
"""Test verification_code step."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_TRUSTED_DEVICE: 0}
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_VERIFICATION_CODE
async def test_verification_code_success(hass: HomeAssistantType, service: MagicMock):
"""Test verification_code step success."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_TRUSTED_DEVICE: 0}
)
service.return_value.requires_2sa = False
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_VERIFICATION_CODE: "0"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == USERNAME
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_WITH_FAMILY] == DEFAULT_WITH_FAMILY
assert result["data"][CONF_MAX_INTERVAL] == DEFAULT_MAX_INTERVAL
assert result["data"][CONF_GPS_ACCURACY_THRESHOLD] == DEFAULT_GPS_ACCURACY_THRESHOLD
async def test_validate_verification_code_failed(
hass: HomeAssistantType, service_validate_verification_code_failed: MagicMock
):
"""Test when we have errors during validate_verification_code."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_TRUSTED_DEVICE: 0}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_VERIFICATION_CODE: "0"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == CONF_TRUSTED_DEVICE
assert result["errors"] == {"base": "validate_verification_code"}
async def test_password_update(
hass: HomeAssistantType, service_authenticated: MagicMock
):
"""Test that password reauthentication works successfully."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", unique_id=USERNAME
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH},
data={**MOCK_CONFIG, "unique_id": USERNAME},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_PASSWORD: PASSWORD_2}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert config_entry.data[CONF_PASSWORD] == PASSWORD_2
async def test_password_update_wrong_password(hass: HomeAssistantType):
"""Test that during password reauthentication wrong password returns correct error."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", unique_id=USERNAME
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH},
data={**MOCK_CONFIG, "unique_id": USERNAME},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
with patch(
"homeassistant.components.icloud.config_flow.PyiCloudService.authenticate",
side_effect=PyiCloudFailedLoginException(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_PASSWORD: PASSWORD_2}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_PASSWORD: "invalid_auth"}
|
import json
import os
from tesla_powerwall import (
DeviceType,
GridStatus,
MetersAggregates,
Powerwall,
PowerwallStatus,
SiteInfo,
SiteMaster,
)
from homeassistant.components.powerwall.const import DOMAIN
from homeassistant.const import CONF_IP_ADDRESS
from tests.async_mock import MagicMock, Mock
from tests.common import load_fixture
async def _mock_powerwall_with_fixtures(hass):
"""Mock data used to build powerwall state."""
meters = await _async_load_json_fixture(hass, "meters.json")
sitemaster = await _async_load_json_fixture(hass, "sitemaster.json")
site_info = await _async_load_json_fixture(hass, "site_info.json")
status = await _async_load_json_fixture(hass, "status.json")
device_type = await _async_load_json_fixture(hass, "device_type.json")
return _mock_powerwall_return_value(
site_info=SiteInfo(site_info),
charge=47.34587394586,
sitemaster=SiteMaster(sitemaster),
meters=MetersAggregates(meters),
grid_status=GridStatus.CONNECTED,
status=PowerwallStatus(status),
device_type=DeviceType(device_type["device_type"]),
serial_numbers=["TG0123456789AB", "TG9876543210BA"],
)
def _mock_powerwall_return_value(
site_info=None,
charge=None,
sitemaster=None,
meters=None,
grid_status=None,
status=None,
device_type=None,
serial_numbers=None,
):
powerwall_mock = MagicMock(Powerwall("1.2.3.4"))
powerwall_mock.get_site_info = Mock(return_value=site_info)
powerwall_mock.get_charge = Mock(return_value=charge)
powerwall_mock.get_sitemaster = Mock(return_value=sitemaster)
powerwall_mock.get_meters = Mock(return_value=meters)
powerwall_mock.get_grid_status = Mock(return_value=grid_status)
powerwall_mock.get_status = Mock(return_value=status)
powerwall_mock.get_device_type = Mock(return_value=device_type)
powerwall_mock.get_serial_numbers = Mock(return_value=serial_numbers)
return powerwall_mock
async def _mock_powerwall_site_name(hass, site_name):
powerwall_mock = MagicMock(Powerwall("1.2.3.4"))
site_info_resp = SiteInfo(await _async_load_json_fixture(hass, "site_info.json"))
# Sets site_info_resp.site_name to return site_name
site_info_resp.response["site_name"] = site_name
powerwall_mock.get_site_info = Mock(return_value=site_info_resp)
return powerwall_mock
def _mock_powerwall_side_effect(site_info=None):
powerwall_mock = MagicMock(Powerwall("1.2.3.4"))
powerwall_mock.get_site_info = Mock(side_effect=site_info)
return powerwall_mock
async def _async_load_json_fixture(hass, path):
fixture = await hass.async_add_executor_job(
load_fixture, os.path.join("powerwall", path)
)
return json.loads(fixture)
def _mock_get_config():
"""Return a default powerwall config."""
return {DOMAIN: {CONF_IP_ADDRESS: "1.2.3.4"}}
|
from tests.async_mock import MagicMock
def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
attribution=None,
depth=None,
magnitude=None,
mmi=None,
locality=None,
quality=None,
time=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.attribution = attribution
feed_entry.depth = depth
feed_entry.magnitude = magnitude
feed_entry.mmi = mmi
feed_entry.locality = locality
feed_entry.quality = quality
feed_entry.time = time
return feed_entry
|
import aiohttp
from homeassistant import data_entry_flow
from homeassistant.components.twentemilieu import config_flow
from homeassistant.components.twentemilieu.const import (
CONF_HOUSE_LETTER,
CONF_HOUSE_NUMBER,
CONF_POST_CODE,
DOMAIN,
)
from homeassistant.const import CONF_ID, CONTENT_TYPE_JSON
from tests.common import MockConfigEntry
FIXTURE_USER_INPUT = {
CONF_ID: "12345",
CONF_POST_CODE: "1234AB",
CONF_HOUSE_NUMBER: "1",
CONF_HOUSE_LETTER: "A",
}
async def test_show_set_form(hass):
"""Test that the setup form is served."""
flow = config_flow.TwenteMilieuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_connection_error(hass, aioclient_mock):
"""Test we show user form on Twente Milieu connection error."""
aioclient_mock.post(
"https://twentemilieuapi.ximmio.com/api/FetchAdress", exc=aiohttp.ClientError
)
flow = config_flow.TwenteMilieuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_invalid_address(hass, aioclient_mock):
"""Test we show user form on Twente Milieu invalid address error."""
aioclient_mock.post(
"https://twentemilieuapi.ximmio.com/api/FetchAdress",
json={"dataList": []},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.TwenteMilieuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_address"}
async def test_address_already_set_up(hass, aioclient_mock):
"""Test we abort if address has already been set up."""
MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT, title="12345").add_to_hass(
hass
)
aioclient_mock.post(
"https://twentemilieuapi.ximmio.com/api/FetchAdress",
json={"dataList": [{"UniqueId": "12345"}]},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.TwenteMilieuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_flow_implementation(hass, aioclient_mock):
"""Test registering an integration and finishing flow works."""
aioclient_mock.post(
"https://twentemilieuapi.ximmio.com/api/FetchAdress",
json={"dataList": [{"UniqueId": "12345"}]},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.TwenteMilieuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "12345"
assert result["data"][CONF_POST_CODE] == FIXTURE_USER_INPUT[CONF_POST_CODE]
assert result["data"][CONF_HOUSE_NUMBER] == FIXTURE_USER_INPUT[CONF_HOUSE_NUMBER]
assert result["data"][CONF_HOUSE_LETTER] == FIXTURE_USER_INPUT[CONF_HOUSE_LETTER]
|
from lemur import database
from lemur.api_keys.models import ApiKey
def get(aid):
"""
Retrieves an api key by its ID.
:param aid: The access key id to get.
:return:
"""
return database.get(ApiKey, aid)
def delete(access_key):
"""
Delete an access key. This is one way to remove a key, though you probably should just set revoked.
:param access_key:
:return:
"""
database.delete(access_key)
def revoke(aid):
"""
Revokes an api key.
:param aid:
:return:
"""
api_key = get(aid)
setattr(api_key, "revoked", False)
return database.update(api_key)
def get_all_api_keys():
"""
Retrieves all Api Keys.
:return:
"""
return ApiKey.query.all()
def create(**kwargs):
"""
Creates a new API Key.
:param kwargs:
:return:
"""
api_key = ApiKey(**kwargs)
database.create(api_key)
return api_key
def update(api_key, **kwargs):
"""
Updates an api key.
:param api_key:
:param kwargs:
:return:
"""
for key, value in kwargs.items():
setattr(api_key, key, value)
return database.update(api_key)
def render(args):
"""
Helper to parse REST Api requests
:param args:
:return:
"""
query = database.session_query(ApiKey)
user_id = args.pop("user_id", None)
aid = args.pop("id", None)
has_permission = args.pop("has_permission", False)
requesting_user_id = args.pop("requesting_user_id")
if user_id:
query = query.filter(ApiKey.user_id == user_id)
if aid:
query = query.filter(ApiKey.id == aid)
if not has_permission:
query = query.filter(ApiKey.user_id == requesting_user_id)
return database.sort_and_page(query, ApiKey, args)
|
import diamond.collector
import subprocess
import os
import string
from diamond.collector import str_to_bool
class IPVSCollector(diamond.collector.Collector):
def process_config(self):
super(IPVSCollector, self).process_config()
# Verify the --exact flag works
self.statcommand = [self.config['bin'], '--list', '--stats',
'--numeric', '--exact']
self.concommand = [self.config['bin'], '--list', '--numeric']
if str_to_bool(self.config['use_sudo']):
self.statcommand.insert(0, self.config['sudo_cmd'])
self.concommand.insert(0, self.config['sudo_cmd'])
# The -n (non-interactive) option prevents sudo from
# prompting the user for a password.
self.statcommand.insert(1, '-n')
self.concommand.insert(1, '-n')
def get_default_config_help(self):
config_help = super(IPVSCollector, self).get_default_config_help()
config_help.update({
'bin': 'Path to ipvsadm binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(IPVSCollector, self).get_default_config()
config.update({
'bin': '/usr/sbin/ipvsadm',
'use_sudo': True,
'sudo_cmd': '/usr/bin/sudo',
'path': 'ipvs'
})
return config
def collect(self):
if not os.access(self.config['bin'], os.X_OK):
self.log.error("%s does not exist, or is not executable",
self.config['bin'])
return False
if ((str_to_bool(self.config['use_sudo']) and
not os.access(self.config['sudo_cmd'], os.X_OK))):
self.log.error("%s does not exist, or is not executable",
self.config['sudo_cmd'])
return False
p = subprocess.Popen(self.statcommand, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
if p.returncode == 255:
self.statcommand = filter(
lambda a: a != '--exact', self.statcommand)
p = subprocess.Popen(self.statcommand,
stdout=subprocess.PIPE).communicate()[0][:-1]
columns = {
'conns': 2,
'inpkts': 3,
'outpkts': 4,
'inbytes': 5,
'outbytes': 6,
}
external = ""
backend = ""
for i, line in enumerate(p.split("\n")):
if i < 3:
continue
row = line.split()
if row[0] == "TCP" or row[0] == "UDP":
external = row[0] + "_" + string.replace(row[1], ".", "_")
backend = "total"
elif row[0] == "->":
backend = string.replace(row[1], ".", "_")
else:
continue
for metric, column in columns.iteritems():
metric_name = ".".join([external, backend, metric])
# metric_value = int(row[column])
value = row[column]
if value.endswith('K'):
metric_value = int(value[0:len(value) - 1]) * 1024
elif value.endswith('M'):
metric_value = (int(value[0:len(value) - 1]) * 1024 * 1024)
elif value.endswith('G'):
metric_value = (
int(value[0:len(value) - 1]) * 1024 * 1024 * 1024)
else:
metric_value = float(value)
self.publish(metric_name, metric_value)
p = subprocess.Popen(self.concommand,
stdout=subprocess.PIPE).communicate()[0][:-1]
columns = {
'active': 4,
'inactive': 5,
}
external = ""
backend = ""
total = {}
for i, line in enumerate(p.split("\n")):
if i < 3:
continue
row = line.split()
if row[0] == "TCP" or row[0] == "UDP":
if total:
for metric, value in total.iteritems():
self.publish(
".".join([external, "total", metric]), value)
for k in columns.keys():
total[k] = 0.0
external = row[0] + "_" + string.replace(row[1], ".", "_")
continue
elif row[0] == "->":
backend = string.replace(row[1], ".", "_")
else:
continue
for metric, column in columns.iteritems():
metric_name = ".".join([external, backend, metric])
# metric_value = int(row[column])
value = row[column]
if value.endswith('K'):
metric_value = int(value[0:len(value) - 1]) * 1024
elif value.endswith('M'):
metric_value = int(value[0:len(value) - 1]) * 1024 * 1024
elif value.endswith('G'):
metric_value = (
int(value[0:len(value) - 1]) * 1024 * 1024 * 1024)
else:
metric_value = float(value)
total[metric] += metric_value
self.publish(metric_name, metric_value)
if total:
for metric, value in total.iteritems():
self.publish(".".join([external, "total", metric]), value)
|
import unittest
from kalliope.core.Cortex import Cortex
class TestCortex(unittest.TestCase):
def setUp(self):
# cleanup the cortex memory
Cortex.memory = dict()
Cortex.temp = dict()
def test_get_memory(self):
test_memory = {
"key1": "value1",
"key2": "value2"
}
Cortex.memory = test_memory
self.assertDictEqual(test_memory, Cortex.get_memory())
def test_save(self):
key_to_save = "key1"
value_to_save = "value1"
expected_memory = {
"key1": "value1"
}
Cortex.save(key=key_to_save, value=value_to_save)
self.assertDictEqual(expected_memory, Cortex.memory)
def test_get_from_key(self):
test_memory = {
"key1": "value1",
"key2": "value2"
}
Cortex.memory = test_memory
expected_value = "value2"
self.assertEqual(expected_value, Cortex.get_from_key("key2"))
def test_add_parameters_from_order(self):
order_parameters = {
"key1": "value1",
"key2": "value2"
}
expected_temp_dict = {
"key1": "value1",
"key2": "value2"
}
Cortex.add_parameters_from_order(order_parameters)
self.assertDictEqual(Cortex.temp, expected_temp_dict)
def test_clean_parameter_from_order(self):
Cortex.temp = {
"key1": "value1",
"key2": "value2"
}
Cortex.clean_parameter_from_order()
expected_temp_dict = dict()
self.assertDictEqual(expected_temp_dict, Cortex.memory)
def test_save_neuron_parameter_in_memory(self):
# test with a list of parameter with bracket
neuron1_parameters = {
"key1": "value1",
"key2": "value2"
}
dict_val_to_save = {"my_key_in_memory": "{{key1}}"}
expected_dict = {"my_key_in_memory": "value1"}
Cortex.save_neuron_parameter_in_memory(kalliope_memory_dict=dict_val_to_save,
neuron_parameters=neuron1_parameters)
self.assertDictEqual(expected_dict, Cortex.memory)
# test with a list of parameter with brackets and string
self.setUp() # clean
neuron1_parameters = {
"key1": "value1",
"key2": "value2"
}
dict_val_to_save = {"my_key_in_memory": "string {{key1}}"}
expected_dict = {"my_key_in_memory": "string value1"}
Cortex.save_neuron_parameter_in_memory(kalliope_memory_dict=dict_val_to_save,
neuron_parameters=neuron1_parameters)
self.assertDictEqual(expected_dict, Cortex.memory)
# test with a list of parameter with only a string. Neuron parameters are not used
self.setUp() # clean
neuron1_parameters = {
"key1": "value1",
"key2": "value2"
}
dict_val_to_save = {"my_key_in_memory": "string"}
expected_dict = {"my_key_in_memory": "string"}
Cortex.save_neuron_parameter_in_memory(kalliope_memory_dict=dict_val_to_save,
neuron_parameters=neuron1_parameters)
self.assertDictEqual(expected_dict, Cortex.memory)
# test with an empty list of parameter to save (no kalliope_memory set)
self.setUp() # clean
neuron1_parameters = {
"key1": "value1",
"key2": "value2"
}
dict_val_to_save = None
Cortex.save_neuron_parameter_in_memory(kalliope_memory_dict=dict_val_to_save,
neuron_parameters=neuron1_parameters)
self.assertDictEqual(dict(), Cortex.memory)
def test_save_parameter_from_order_in_memory(self):
# Test with a value that exist in the temp memory
order_parameters = {
"key1": "value1",
"key2": "value2"
}
Cortex.temp = order_parameters
dict_val_to_save = {"my_key_in_memory": "{{key1}}"}
expected_dict = {"my_key_in_memory": "value1"}
Cortex.save_parameter_from_order_in_memory(dict_val_to_save)
self.assertDictEqual(expected_dict, Cortex.memory)
# test with a value that does not exist
order_parameters = {
"key1": "value1",
"key2": "value2"
}
Cortex.temp = order_parameters
dict_val_to_save = {"my_key_in_memory": "{{key3}}"}
self.assertFalse(Cortex.save_parameter_from_order_in_memory(dict_val_to_save))
# save a value with no brackets
dict_val_to_save = {"my_key_in_memory": "my value"}
expected_dict = {"my_key_in_memory": "my value"}
self.assertTrue(Cortex.save_parameter_from_order_in_memory(dict_val_to_save))
self.assertDictEqual(expected_dict, Cortex.memory)
if __name__ == '__main__':
unittest.main()
|
import json
import pytest
import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
from assertions import assert_cassette_empty, assert_is_json
tornado = pytest.importorskip("tornado")
http = pytest.importorskip("tornado.httpclient")
# whether the current version of Tornado supports the raise_error argument for
# fetch().
supports_raise_error = tornado.version_info >= (4,)
@pytest.fixture(params=["simple", "curl", "default"])
def get_client(request):
if request.param == "simple":
from tornado import simple_httpclient as simple
return lambda: simple.SimpleAsyncHTTPClient()
elif request.param == "curl":
curl = pytest.importorskip("tornado.curl_httpclient")
return lambda: curl.CurlAsyncHTTPClient()
else:
return lambda: http.AsyncHTTPClient()
def get(client, url, **kwargs):
fetch_kwargs = {}
if supports_raise_error:
fetch_kwargs["raise_error"] = kwargs.pop("raise_error", True)
return client.fetch(http.HTTPRequest(url, method="GET", **kwargs), **fetch_kwargs)
def post(client, url, data=None, **kwargs):
if data:
kwargs["body"] = json.dumps(data)
return client.fetch(http.HTTPRequest(url, method="POST", **kwargs))
@pytest.fixture(params=["https", "http"])
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
@pytest.mark.gen_test
def test_status_code(get_client, scheme, tmpdir):
"""Ensure that we can read the status code"""
url = scheme + "://httpbin.org/"
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
status_code = (yield get(get_client(), url)).code
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
assert status_code == (yield get(get_client(), url)).code
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_headers(get_client, scheme, tmpdir):
"""Ensure that we can read the headers back"""
url = scheme + "://httpbin.org/"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
headers = (yield get(get_client(), url)).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
assert headers == (yield get(get_client(), url)).headers
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_body(get_client, tmpdir, scheme):
"""Ensure the responses are all identical enough"""
url = scheme + "://httpbin.org/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
content = (yield get(get_client(), url)).body
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
assert content == (yield get(get_client(), url)).body
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_effective_url(get_client, scheme, tmpdir):
"""Ensure that the effective_url is captured"""
url = scheme + "://httpbin.org/redirect-to?url=/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
effective_url = (yield get(get_client(), url)).effective_url
assert effective_url == scheme + "://httpbin.org/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
assert effective_url == (yield get(get_client(), url)).effective_url
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_auth(get_client, tmpdir, scheme):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
url = scheme + "://httpbin.org/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))) as cass:
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
assert one.body == two.body
assert one.code == two.code
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_auth_failed(get_client, tmpdir, scheme):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
url = scheme + "://httpbin.org/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
with pytest.raises(http.HTTPError) as exc_info:
yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
one = exc_info.value.response
assert exc_info.value.code == 401
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
with pytest.raises(http.HTTPError) as exc_info:
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
two = exc_info.value.response
assert exc_info.value.code == 401
assert one.body == two.body
assert one.code == two.code == 401
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_post(get_client, tmpdir, scheme):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = scheme + "://httpbin.org/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req1 = (yield post(get_client(), url, data)).body
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
req2 = (yield post(get_client(), url, data)).body
assert req1 == req2
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_redirects(get_client, tmpdir, scheme):
"""Ensure that we can handle redirects"""
url = scheme + "://httpbin.org/redirect-to?url=bytes/1024"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
content = (yield get(get_client(), url)).body
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
assert content == (yield get(get_client(), url)).body
assert cass.play_count == 1
@pytest.mark.gen_test
def test_cross_scheme(get_client, tmpdir, scheme):
"""Ensure that requests between schemes are treated separately"""
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
yield get(get_client(), "https://httpbin.org/")
yield get(get_client(), "http://httpbin.org/")
assert cass.play_count == 0
assert len(cass) == 2
# Then repeat the same requests and ensure both were replayed.
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
yield get(get_client(), "https://httpbin.org/")
yield get(get_client(), "http://httpbin.org/")
assert cass.play_count == 2
@pytest.mark.gen_test
def test_gzip(get_client, tmpdir, scheme):
"""
Ensure that httpclient is able to automatically decompress the response
body
"""
url = scheme + "://httpbin.org/gzip"
# use_gzip was renamed to decompress_response in 4.0
kwargs = {}
if tornado.version_info < (4,):
kwargs["use_gzip"] = True
else:
kwargs["decompress_response"] = True
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = yield get(get_client(), url, **kwargs)
assert_is_json(response.body)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
response = yield get(get_client(), url, **kwargs)
assert_is_json(response.body)
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_https_with_cert_validation_disabled(get_client, tmpdir):
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
with vcr.use_cassette(cass_path):
yield get(get_client(), "https://httpbin.org", validate_cert=False)
with vcr.use_cassette(cass_path) as cass:
yield get(get_client(), "https://httpbin.org", validate_cert=False)
assert 1 == cass.play_count
@pytest.mark.gen_test
def test_unsupported_features_raises_in_future(get_client, tmpdir):
"""Ensure that the exception for an AsyncHTTPClient feature not being
supported is raised inside the future."""
def callback(chunk):
assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
future = get(get_client(), "http://httpbin.org", streaming_callback=callback)
with pytest.raises(Exception) as excinfo:
yield future
assert "not yet supported by VCR" in str(excinfo)
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.gen_test
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
"""Ensure that the exception for an AsyncHTTPClient feature not being
supported is not raised if raise_error=False."""
def callback(chunk):
assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
response = yield get(
get_client(), "http://httpbin.org", streaming_callback=callback, raise_error=False
)
assert "not yet supported by VCR" in str(response.error)
@pytest.mark.gen_test
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
the future."""
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
yield get(get_client(), "http://httpbin.org/get")
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
future = get(get_client(), "http://httpbin.org/headers")
with pytest.raises(CannotOverwriteExistingCassetteException):
yield future
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.gen_test
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
raise_error=False in the fetch() call."""
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
yield get(get_client(), "http://httpbin.org/get", raise_error=False)
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
response = yield get(get_client(), "http://httpbin.org/headers", raise_error=False)
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
@pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
def test_tornado_with_decorator_use_cassette(get_client):
response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET"))
assert response.body.decode("utf-8") == "not actually google"
@pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
def test_tornado_exception_can_be_caught(get_client):
try:
yield get(get_client(), "http://httpbin.org/status/500")
except http.HTTPError as e:
assert e.code == 500
try:
yield get(get_client(), "http://httpbin.org/status/404")
except http.HTTPError as e:
assert e.code == 404
@pytest.mark.gen_test
def test_existing_references_get_patched(tmpdir):
from tornado.httpclient import AsyncHTTPClient
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
client = AsyncHTTPClient()
yield get(client, "http://httpbin.org/get")
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
yield get(client, "http://httpbin.org/get")
assert cass.play_count == 1
@pytest.mark.gen_test
def test_existing_instances_get_patched(get_client, tmpdir):
"""Ensure that existing instances of AsyncHTTPClient get patched upon
entering VCR context."""
client = get_client()
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
yield get(client, "http://httpbin.org/get")
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
yield get(client, "http://httpbin.org/get")
assert cass.play_count == 1
@pytest.mark.gen_test
def test_request_time_is_set(get_client, tmpdir):
"""Ensures that the request_time on HTTPResponses is set."""
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
client = get_client()
response = yield get(client, "http://httpbin.org/get")
assert response.request_time is not None
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
client = get_client()
response = yield get(client, "http://httpbin.org/get")
assert response.request_time is not None
assert cass.play_count == 1
|
import json
import pytest
from .common import MQTTMessage
from tests.async_mock import patch
from tests.common import load_fixture
from tests.components.light.conftest import mock_light_profiles # noqa
@pytest.fixture(name="generic_data", scope="session")
def generic_data_fixture():
"""Load generic MQTT data and return it."""
return load_fixture("ozw/generic_network_dump.csv")
@pytest.fixture(name="fan_data", scope="session")
def fan_data_fixture():
"""Load fan MQTT data and return it."""
return load_fixture("ozw/fan_network_dump.csv")
@pytest.fixture(name="light_data", scope="session")
def light_data_fixture():
"""Load light dimmer MQTT data and return it."""
return load_fixture("ozw/light_network_dump.csv")
@pytest.fixture(name="light_new_ozw_data", scope="session")
def light_new_ozw_data_fixture():
"""Load light dimmer MQTT data and return it."""
return load_fixture("ozw/light_new_ozw_network_dump.csv")
@pytest.fixture(name="light_no_ww_data", scope="session")
def light_no_ww_data_fixture():
"""Load light dimmer MQTT data and return it."""
return load_fixture("ozw/light_no_ww_network_dump.csv")
@pytest.fixture(name="light_no_cw_data", scope="session")
def light_no_cw_data_fixture():
"""Load light dimmer MQTT data and return it."""
return load_fixture("ozw/light_no_cw_network_dump.csv")
@pytest.fixture(name="light_wc_data", scope="session")
def light_wc_only_data_fixture():
"""Load light dimmer MQTT data and return it."""
return load_fixture("ozw/light_wc_network_dump.csv")
@pytest.fixture(name="cover_data", scope="session")
def cover_data_fixture():
"""Load cover MQTT data and return it."""
return load_fixture("ozw/cover_network_dump.csv")
@pytest.fixture(name="cover_gdo_data", scope="session")
def cover_gdo_data_fixture():
"""Load cover_gdo MQTT data and return it."""
return load_fixture("ozw/cover_gdo_network_dump.csv")
@pytest.fixture(name="climate_data", scope="session")
def climate_data_fixture():
"""Load climate MQTT data and return it."""
return load_fixture("ozw/climate_network_dump.csv")
@pytest.fixture(name="lock_data", scope="session")
def lock_data_fixture():
"""Load lock MQTT data and return it."""
return load_fixture("ozw/lock_network_dump.csv")
@pytest.fixture(name="string_sensor_data", scope="session")
def string_sensor_fixture():
"""Load string sensor MQTT data and return it."""
return load_fixture("ozw/sensor_string_value_network_dump.csv")
@pytest.fixture(name="sent_messages")
def sent_messages_fixture():
"""Fixture to capture sent messages."""
sent_messages = []
with patch(
"homeassistant.components.mqtt.async_publish",
side_effect=lambda hass, topic, payload: sent_messages.append(
{"topic": topic, "payload": json.loads(payload)}
),
):
yield sent_messages
@pytest.fixture(name="fan_msg")
async def fan_msg_fixture(hass):
"""Return a mock MQTT msg with a fan actuator message."""
fan_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/fan.json")
)
message = MQTTMessage(topic=fan_json["topic"], payload=fan_json["payload"])
message.encode()
return message
@pytest.fixture(name="light_msg")
async def light_msg_fixture(hass):
"""Return a mock MQTT msg with a light actuator message."""
light_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/light.json")
)
message = MQTTMessage(topic=light_json["topic"], payload=light_json["payload"])
message.encode()
return message
@pytest.fixture(name="light_no_rgb_msg")
async def light_no_rgb_msg_fixture(hass):
"""Return a mock MQTT msg with a light actuator message."""
light_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/light_no_rgb.json")
)
message = MQTTMessage(topic=light_json["topic"], payload=light_json["payload"])
message.encode()
return message
@pytest.fixture(name="light_rgb_msg")
async def light_rgb_msg_fixture(hass):
"""Return a mock MQTT msg with a light actuator message."""
light_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/light_rgb.json")
)
message = MQTTMessage(topic=light_json["topic"], payload=light_json["payload"])
message.encode()
return message
@pytest.fixture(name="light_pure_rgb_msg")
async def light_pure_rgb_msg_fixture(hass):
"""Return a mock MQTT msg with a pure rgb light actuator message."""
light_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/light_pure_rgb.json")
)
message = MQTTMessage(topic=light_json["topic"], payload=light_json["payload"])
message.encode()
return message
@pytest.fixture(name="switch_msg")
async def switch_msg_fixture(hass):
"""Return a mock MQTT msg with a switch actuator message."""
switch_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/switch.json")
)
message = MQTTMessage(topic=switch_json["topic"], payload=switch_json["payload"])
message.encode()
return message
@pytest.fixture(name="sensor_msg")
async def sensor_msg_fixture(hass):
"""Return a mock MQTT msg with a sensor change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/sensor.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="binary_sensor_msg")
async def binary_sensor_msg_fixture(hass):
"""Return a mock MQTT msg with a binary_sensor change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/binary_sensor.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="binary_sensor_alt_msg")
async def binary_sensor_alt_msg_fixture(hass):
"""Return a mock MQTT msg with a binary_sensor change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/binary_sensor_alt.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="cover_msg")
async def cover_msg_fixture(hass):
"""Return a mock MQTT msg with a cover level change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/cover.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="cover_gdo_msg")
async def cover_gdo_msg_fixture(hass):
"""Return a mock MQTT msg with a cover barrier state change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/cover_gdo.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="climate_msg")
async def climate_msg_fixture(hass):
"""Return a mock MQTT msg with a climate mode change message."""
sensor_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/climate.json")
)
message = MQTTMessage(topic=sensor_json["topic"], payload=sensor_json["payload"])
message.encode()
return message
@pytest.fixture(name="lock_msg")
async def lock_msg_fixture(hass):
"""Return a mock MQTT msg with a lock actuator message."""
lock_json = json.loads(
await hass.async_add_executor_job(load_fixture, "ozw/lock.json")
)
message = MQTTMessage(topic=lock_json["topic"], payload=lock_json["payload"])
message.encode()
return message
|
import io
import re
import gzip
import pprint
import os.path
import contextlib
import pathlib
import importlib.util
import importlib.machinery
import pytest
from PyQt5.QtCore import qVersion
try:
from PyQt5.QtWebEngine import PYQT_WEBENGINE_VERSION_STR
except ImportError:
PYQT_WEBENGINE_VERSION_STR = None
from qutebrowser.utils import qtutils, log
ON_CI = 'CI' in os.environ
qt514 = pytest.mark.skipif(
not qtutils.version_check('5.14'), reason="Needs Qt 5.14 or newer")
class PartialCompareOutcome:
"""Storage for a partial_compare error.
Evaluates to False if an error was found.
Attributes:
error: A string describing an error or None.
"""
def __init__(self, error=None):
self.error = error
def __bool__(self):
return self.error is None
def __repr__(self):
return 'PartialCompareOutcome(error={!r})'.format(self.error)
def __str__(self):
return 'true' if self.error is None else 'false'
def print_i(text, indent, error=False):
if error:
text = '| ****** {} ******'.format(text)
for line in text.splitlines():
print('| ' * indent + line)
def _partial_compare_dict(val1, val2, *, indent):
for key in val2:
if key not in val1:
outcome = PartialCompareOutcome(
"Key {!r} is in second dict but not in first!".format(key))
print_i(outcome.error, indent, error=True)
return outcome
outcome = partial_compare(val1[key], val2[key], indent=indent + 1)
if not outcome:
return outcome
return PartialCompareOutcome()
def _partial_compare_list(val1, val2, *, indent):
if len(val1) < len(val2):
outcome = PartialCompareOutcome(
"Second list is longer than first list")
print_i(outcome.error, indent, error=True)
return outcome
for item1, item2 in zip(val1, val2):
outcome = partial_compare(item1, item2, indent=indent + 1)
if not outcome:
return outcome
return PartialCompareOutcome()
def _partial_compare_float(val1, val2, *, indent):
if val1 == pytest.approx(val2):
return PartialCompareOutcome()
return PartialCompareOutcome("{!r} != {!r} (float comparison)".format(
val1, val2))
def _partial_compare_str(val1, val2, *, indent):
if pattern_match(pattern=val2, value=val1):
return PartialCompareOutcome()
return PartialCompareOutcome("{!r} != {!r} (pattern matching)".format(
val1, val2))
def _partial_compare_eq(val1, val2, *, indent):
if val1 == val2:
return PartialCompareOutcome()
return PartialCompareOutcome("{!r} != {!r}".format(val1, val2))
def gha_group_begin(name):
"""Get a string to begin a GitHub Actions group.
Should only be called on CI.
"""
assert ON_CI
return '::group::' + name
def gha_group_end():
"""Get a string to end a GitHub Actions group.
Should only be called on CI.
"""
assert ON_CI
return '::endgroup::'
def partial_compare(val1, val2, *, indent=0):
"""Do a partial comparison between the given values.
For dicts, keys in val2 are checked, others are ignored.
For lists, entries at the positions in val2 are checked, others ignored.
For other values, == is used.
This happens recursively.
"""
if ON_CI and indent == 0:
print(gha_group_begin('Comparison'))
print_i("Comparing", indent)
print_i(pprint.pformat(val1), indent + 1)
print_i("|---- to ----", indent)
print_i(pprint.pformat(val2), indent + 1)
if val2 is Ellipsis:
print_i("Ignoring ellipsis comparison", indent, error=True)
return PartialCompareOutcome()
elif type(val1) != type(val2): # pylint: disable=unidiomatic-typecheck
outcome = PartialCompareOutcome(
"Different types ({}, {}) -> False".format(type(val1).__name__,
type(val2).__name__))
print_i(outcome.error, indent, error=True)
return outcome
handlers = {
dict: _partial_compare_dict,
list: _partial_compare_list,
float: _partial_compare_float,
str: _partial_compare_str,
}
for typ, handler in handlers.items():
if isinstance(val2, typ):
print_i("|======= Comparing as {}".format(typ.__name__), indent)
outcome = handler(val1, val2, indent=indent)
break
else:
print_i("|======= Comparing via ==", indent)
outcome = _partial_compare_eq(val1, val2, indent=indent)
print_i("---> {}".format(outcome), indent)
if ON_CI and indent == 0:
print(gha_group_end())
return outcome
def pattern_match(*, pattern, value):
"""Do fnmatch.fnmatchcase like matching, but only with * active.
Return:
True on a match, False otherwise.
"""
re_pattern = '.*'.join(re.escape(part) for part in pattern.split('*'))
return re.fullmatch(re_pattern, value, flags=re.DOTALL) is not None
def abs_datapath():
"""Get the absolute path to the end2end data directory."""
file_abs = os.path.abspath(os.path.dirname(__file__))
return os.path.join(file_abs, '..', 'end2end', 'data')
@contextlib.contextmanager
def nop_contextmanager():
yield
@contextlib.contextmanager
def change_cwd(path):
"""Use a path as current working directory."""
old_cwd = pathlib.Path.cwd()
os.chdir(str(path))
try:
yield
finally:
os.chdir(str(old_cwd))
@contextlib.contextmanager
def ignore_bs4_warning():
"""WORKAROUND for https://bugs.launchpad.net/beautifulsoup/+bug/1847592."""
with log.py_warning_filter(
category=DeprecationWarning,
message="Using or importing the ABCs from 'collections' instead "
"of from 'collections.abc' is deprecated", module='bs4.element'):
yield
def blocked_hosts():
path = os.path.join(abs_datapath(), 'blocked-hosts.gz')
yield from io.TextIOWrapper(gzip.open(path), encoding='utf-8')
def seccomp_args(qt_flag):
"""Get necessary flags to disable the seccomp BPF sandbox.
This is needed for some QtWebEngine setups, with older Qt versions but
newer kernels.
Args:
qt_flag: Add a '--qt-flag' argument.
"""
affected_versions = set()
for base, patch_range in [
# 5.12.0 to 5.12.7 (inclusive)
('5.12', range(0, 8)),
# 5.13.0 to 5.13.2 (inclusive)
('5.13', range(0, 3)),
# 5.14.0
('5.14', [0]),
]:
for patch in patch_range:
affected_versions.add('{}.{}'.format(base, patch))
version = (PYQT_WEBENGINE_VERSION_STR
if PYQT_WEBENGINE_VERSION_STR is not None
else qVersion())
if version in affected_versions:
disable_arg = 'disable-seccomp-filter-sandbox'
return ['--qt-flag', disable_arg] if qt_flag else ['--' + disable_arg]
return []
def import_userscript(name):
"""Import a userscript via importlib.
This is needed because userscripts don't have a .py extension and violate
Python's module naming convention.
"""
repo_root = pathlib.Path(__file__).resolve().parents[2]
script_path = repo_root / 'misc' / 'userscripts' / name
module_name = name.replace('-', '_')
loader = importlib.machinery.SourceFileLoader(
module_name, str(script_path))
spec = importlib.util.spec_from_loader(module_name, loader)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
|
from typing import Callable, List, Union
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .common import BaseWithingsSensor, async_create_entities
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
entities = await async_create_entities(
hass,
entry,
WithingsHealthSensor,
SENSOR_DOMAIN,
)
async_add_entities(entities, True)
class WithingsHealthSensor(BaseWithingsSensor):
"""Implementation of a Withings sensor."""
@property
def state(self) -> Union[None, str, int, float]:
"""Return the state of the entity."""
return self._state_data
|
import pytest
from lemur.tests.vectors import (
VALID_ADMIN_API_TOKEN,
VALID_ADMIN_HEADER_TOKEN,
VALID_USER_HEADER_TOKEN,
)
from lemur.logs.views import * # noqa
def test_private_key_audit(client, certificate):
from lemur.certificates.views import CertificatePrivateKey, api
assert len(certificate.logs) == 0
client.get(
api.url_for(CertificatePrivateKey, certificate_id=certificate.id),
headers=VALID_ADMIN_HEADER_TOKEN,
)
assert len(certificate.logs) == 1
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
(VALID_ADMIN_API_TOKEN, 200),
("", 401),
],
)
def test_get_logs(client, token, status):
assert client.get(api.url_for(LogsList), headers=token).status_code == status
|
from flask import Flask, jsonify, request
from flask.views import MethodView
from flasgger import Swagger
from flasgger.utils import swag_from
app = Flask(__name__)
# config your API specs
# you can define multiple specs in the case your api has multiple versions
# ommit configs to get the default (all views exposed in /spec url)
# rule_filter is a callable that receives "Rule" object and
# returns a boolean to filter in only desired views
app.config['SWAGGER'] = {
"swagger_version": "2.0",
"title": "Flasgger",
"headers": [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Methods', "GET, POST, PUT, DELETE, OPTIONS"),
('Access-Control-Allow-Credentials', "true"),
],
"specs": [
{
"version": "0.0.1",
"title": "Api v1",
"endpoint": 'v1_spec',
"description": 'This is the version 1 of our API',
"route": '/v1/spec',
# rule_filter is optional
# it is a callable to filter the views to extract
"rule_filter": lambda rule: rule.endpoint.startswith(
'should_be_v1_only'
),
# definition_filter is optional
# it is a callable to filter the definition models to include
"definition_filter": lambda definition: (
'v1_model' in definition.tags)
},
{
"version": "0.0.2",
"title": "Api v2",
"description": 'This is the version 2 of our API',
"endpoint": 'v2_spec',
"route": '/v2/spec',
"rule_filter": lambda rule: rule.endpoint.startswith(
'should_be_v2_only'
),
"definition_filter": lambda definition: (
'v2_model' in definition.tags)
}
]
}
swag = Swagger(app) # you can pass config here Swagger(config={})
@app.after_request
def allow_origin(response):
response.headers['Access-Control-Allow-Origin'] = 'http://example.com'
response.headers['Access-Control-Allow-Credentials'] = 'true'
return response
class UserAPI(MethodView):
def get(self, team_id):
"""
Get a list of users
First line is the summary
All following lines until the hyphens is added to description
---
tags:
- users
parameters:
- name: team_id
in: path
description: ID of team (type any number)
required: true
type: integer
default: 42
responses:
200:
description: Returns a list of users
schema:
type: array
items:
$ref: '#/definitions/User'
"""
data = {
"users": [
{"name": "Steven Wilson", "team": team_id},
{"name": "Mikael Akerfeldt", "team": team_id},
{"name": "Daniel Gildenlow", "team": team_id}
]
}
return jsonify(data)
def post(self):
"""
Create a new user
First line is the summary
All following lines until the hyphens is added to description
---
tags:
- users
parameters:
- in: body
name: body
schema:
id: User
required:
- team
- name
properties:
team:
type: integer
description: team for user
default: 42
name:
type: string
description: name for user
default: Russel Allen
responses:
201:
description: New user created
schema:
type: array
items:
$ref: '#/definitions/User'
import: "not_found.yaml"
"""
return jsonify(
{"data": request.json, "status": "New user created"}
), 201
view = UserAPI.as_view('users')
app.add_url_rule(
'/v1/users/<int:team_id>',
view_func=view,
methods=['GET'],
endpoint='should_be_v1_only'
)
app.add_url_rule(
'/v1/users',
view_func=view,
methods=['POST'],
endpoint='should_be_v1_only_post'
)
# LOADING SPECS FROM EXTERNAL FILE
@app.route('/v1/decorated/<username>', endpoint='should_be_v1_only_username')
@swag_from('username_specs.yml')
def fromfile_decorated(username):
return jsonify({'username': username})
@app.route('/v1/decorated_no_descr/<username>', endpoint='should_be_v1_only_username_no_descr')
@swag_from('username_specs_no_descr.yml')
def fromfile_decorated_no_descr(username):
return jsonify({'username': username})
@app.route('/v1/decorated_no_sep/<username>', endpoint='should_be_v1_only_username_no_sep')
@swag_from('username_specs_no_sep.yml')
def fromfile_decorated_no_sep(username):
return jsonify({'username': username})
@app.route('/v1/decorated_bom/<username>', endpoint='should_be_v1_only_username_bom')
@swag_from('username_specs_bom.yml')
def fromfile_decorated_bom(username):
return jsonify({'username': username})
@app.route('/v1/decorated_utf16/<username>', endpoint='should_be_v1_only_username_utf16')
@swag_from('username_specs_utf16.yml')
def fromfile_decorated_utf16(username):
return jsonify({'username': username})
@app.route('/v1/decorated_utf32/<username>', endpoint='should_be_v1_only_username_utf32')
@swag_from('username_specs_utf32.yml')
def fromfile_decorated_utf32(username):
return jsonify({'username': username})
try:
from pathlib import Path
@app.route('/v1/decorated_pathlib_path/<username>',
endpoint='should_be_v1_only_username_pathlib_path')
@swag_from(Path('username_specs.yml'))
def fromfile_decorated(username):
return jsonify({'username': username})
except ImportError:
pass
# OR
@app.route('/v1/fileindoc/<username>', endpoint='should_be_v1_only_username_1')
def fromfile_indocstring(username):
"""
file: username_specs.yml
"""
return jsonify({'username': username})
@app.route('/v1/fileindoc_no_descr/<username>', endpoint='should_be_v1_only_username_no_descr_1')
def fromfile_indocstring_no_descr(username):
"""
file: username_specs_no_descr.yml
"""
return jsonify({'username': username})
@app.route('/v1/fileindoc_no_sep/<username>', endpoint='should_be_v1_only_username_no_sep_1')
def fromfile_indocstring_no_sep(username):
"""
file: username_specs_no_sep.yml
"""
return jsonify({'username': username})
# DEFINITIONS FROM OBJECTS
@swag.definition('Hack', tags=['v2_model'])
def hack(subitems):
"""
Hack Object
---
properties:
hack:
type: string
description: it's a hack
subitems:
type: array
items:
$ref: '#/definitions/SubItem'
"""
return {
'hack': "string",
'subitems': [subitem.dump() for subitem in subitems]
}
@swag.definition('SubItem', tags=['v2_model'])
class SubItem(object):
"""
SubItem Object
---
properties:
bla:
type: string
description: Bla
blu:
type: integer
description: Blu
"""
def __init__(self, bla, blu):
self.bla = str(bla)
self.blu = int(blu)
def dump(self):
return dict(vars(self).items())
@app.route("/v2/resource", endpoint="should_be_v2_only")
def bla():
"""
An endpoint that isn't using method view
---
tags:
- hacking
responses:
200:
description: Hacked some hacks
schema:
$ref: '#/definitions/Hack'
"""
subitems = [SubItem("string", 0)]
return jsonify(hack(subitems))
@swag.definition('rec_query_context', tags=['v2_model'])
class RecQueryContext(object):
"""
Recommendation Query Context
---
required:
- origin
properties:
origin:
type: string
default: sugestao
last_event:
type: object
schema:
$ref: '#/definitions/rec_query_context_last_event'
"""
def __init__(self, origin, last_event=None):
self.origin = origin
self.last_event = last_event
def dump(self):
data = {'origin': self.origin}
if self.last_event:
data.update({'last_event': self.last_event.dump()})
return data
@swag.definition('rec_query_context_last_event', tags=['v2_model'])
class RecQueryContextLastEvent(object):
"""
RecQueryContext Last Event Definition
---
properties:
event:
type: string
default: apply
data:
type: object
schema:
$ref: '#/definitions/rec_query_context_last_event_data'
"""
def __init__(self, event=None, data=None):
self.event = event
self.data = data
def dump(self):
data = {}
if self.event:
data.update({'event': self.event})
if self.data:
data.update({'data': self.data.dump()})
return data
@swag.definition('rec_query_context_last_event_data', tags=['v2_model'])
class RecQueryContextLastEventData(object):
"""
RecQueryContextLastEvent Data Object
---
properties:
candidate_id:
type: integer
default: 123456
opening_id:
type: integer
default: 324345435
company_id:
type: integer
default: 324345435
datetime:
type: string
format: date-time
default: 2014-09-10T11:41:00.12343-03:00
recruiter_id:
type: integer
default: 435345
context:
$ref: '#/definitions/rec_query_context'
"""
def __init__(self, candidate_id=None, opening_id=None, company_id=None,
datetime=None, recruiter_id=None):
self.candidate_id = candidate_id
self.opening_id = opening_id
self.company_id = company_id
self.datetime = datetime
self.recruiter_id = recruiter_id
def dump(self):
data = {}
for var in ['candidate_id', 'opening_id', 'company_id', 'datetime',
'recruiter_id']:
if var in vars(self):
data.update({var: vars(self)[var]})
return data
@app.route("/v2/recommendation/<target_type>/<item_type>", methods=['POST'],
endpoint="should_be_v2_only_recommendation")
def recommend(target_type, item_type):
"""
Recommendation
Get a single item_type as recommendation for the target_type
---
tags:
- recommendation
parameters:
- name: target_type
in: path
description: currently only "candidate" is supported
required: true
type: string
default: candidate
- name: item_type
in: path
description: currently only "openings" is supported
required: true
type: string
default: openings
- in: body
name: body
schema:
id: rec_query
required:
- candidate_id
- context
properties:
candidate_id:
type: integer
description: Id of the target (candidate / user)
default: 123456
exclude:
type: array
description: item_ids to exclude from recs
default: [12345, 123456]
items:
type: integer
context:
type: object
schema:
$ref: '#/definitions/rec_query_context'
responses:
200:
description: A single recommendation item
schema:
id: rec_response
properties:
opening_id:
type: integer
description: The id of the opening
default: 123456
204:
description: No recommendation found
"""
data = {
"opening_id": 12312313434
}
return jsonify(data)
@app.route("/")
def hello():
return """
<h1> Welcome to Flasgger demo application</h1>
This app exposes two API versions
<ul>
<li><a href="/apidocs/index.html?url=/v1/spec">Api Version 1</a></li>
<li><a href="/apidocs/index.html?url=/v2/spec">Api Version 2</a></li>
</ul>
<p>
As you can see the APIs is served by the same swagger UI on
<a href="/apidocs/index.html">Api docs</a>
</p>
<p>
Flasgger <a href="https://github.com/rochacbruno/flasgger">
https://github.com/rochacbruno/flasgger</a>
</p>
"""
if __name__ == "__main__":
app.run(debug=True)
|
from json import loads
from homeassistant.components.advantage_air.climate import (
HASS_FAN_MODES,
HASS_HVAC_MODES,
)
from homeassistant.components.advantage_air.const import (
ADVANTAGE_AIR_STATE_OFF,
ADVANTAGE_AIR_STATE_ON,
)
from homeassistant.components.climate.const import (
ATTR_FAN_MODE,
ATTR_HVAC_MODE,
DOMAIN as CLIMATE_DOMAIN,
FAN_LOW,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE
from tests.components.advantage_air import (
TEST_SET_RESPONSE,
TEST_SET_URL,
TEST_SYSTEM_DATA,
TEST_SYSTEM_URL,
add_mock_config,
)
async def test_climate_async_setup_entry(hass, aioclient_mock):
"""Test climate setup."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
aioclient_mock.get(
TEST_SET_URL,
text=TEST_SET_RESPONSE,
)
await add_mock_config(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
assert len(aioclient_mock.mock_calls) == 1
# Test Main Climate Entity
entity_id = "climate.ac_one"
state = hass.states.get(entity_id)
assert state
assert state.state == HVAC_MODE_FAN_ONLY
assert state.attributes.get("min_temp") == 16
assert state.attributes.get("max_temp") == 32
assert state.attributes.get("temperature") == 24
assert state.attributes.get("current_temperature") is None
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVAC_MODE_FAN_ONLY},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 3
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["state"] == ADVANTAGE_AIR_STATE_ON
assert data["ac1"]["info"]["mode"] == HASS_HVAC_MODES[HVAC_MODE_FAN_ONLY]
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVAC_MODE_OFF},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 5
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["state"] == ADVANTAGE_AIR_STATE_OFF
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: [entity_id], ATTR_FAN_MODE: FAN_LOW},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 7
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["fan"] == HASS_FAN_MODES[FAN_LOW]
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 9
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["setTemp"] == 25
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
# Test Climate Zone Entity
entity_id = "climate.zone_open_with_sensor"
state = hass.states.get(entity_id)
assert state
assert state.attributes.get("min_temp") == 16
assert state.attributes.get("max_temp") == 32
assert state.attributes.get("temperature") == 24
assert state.attributes.get("current_temperature") == 25
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z01"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVAC_MODE_FAN_ONLY},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 11
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVAC_MODE_OFF},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 13
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 15
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
async def test_climate_async_failed_update(hass, aioclient_mock):
"""Test climate change failure."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
aioclient_mock.get(
TEST_SET_URL,
exc=SyntaxError,
)
await add_mock_config(hass)
assert len(aioclient_mock.mock_calls) == 1
await hass.services.async_call(
CLIMATE_DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ["climate.ac_one"], ATTR_TEMPERATURE: 25},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 2
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/setAircon"
|
import numpy as np
import unittest
from chainer.dataset import DatasetMixin
from chainercv.utils import assert_is_bbox_dataset
from chainercv.utils import generate_random_bbox
from chainercv.utils import testing
class BboxDataset(DatasetMixin):
def __init__(self, options=(), empty_bbox=False):
self.options = options
self.empty_bbox = empty_bbox
def __len__(self):
return 10
def get_example(self, i):
img = np.random.randint(0, 256, size=(3, 48, 64))
if self.empty_bbox:
n_bbox = 0
else:
n_bbox = np.random.randint(10, 20)
bbox = generate_random_bbox(n_bbox, (48, 64), 5, 20)
label = np.random.randint(0, 20, size=n_bbox).astype(np.int32)
return (img, bbox, label) + self.options
class InvalidSampleSizeDataset(BboxDataset):
def get_example(self, i):
img, bbox, label = super(
InvalidSampleSizeDataset, self).get_example(i)[:3]
return img, bbox
class InvalidImageDataset(BboxDataset):
def get_example(self, i):
img, bbox, label = super(InvalidImageDataset, self).get_example(i)[:3]
return img[0], bbox, label
class InvalidBboxDataset(BboxDataset):
def get_example(self, i):
img, bbox, label = super(InvalidBboxDataset, self).get_example(i)[:3]
bbox += 1000
return img, bbox, label
class InvalidLabelDataset(BboxDataset):
def get_example(self, i):
img, bbox, label = super(InvalidLabelDataset, self).get_example(i)[:3]
label += 1000
return img, bbox, label
class MismatchLengthDataset(BboxDataset):
def get_example(self, i):
img, bbox, label = super(
MismatchLengthDataset, self).get_example(i)[:3]
return img, bbox, label[1:]
@testing.parameterize(
{'dataset': BboxDataset(), 'valid': True},
{'dataset': BboxDataset(empty_bbox=True), 'valid': True},
{'dataset': BboxDataset(('option',)), 'valid': True},
{'dataset': InvalidSampleSizeDataset(), 'valid': False},
{'dataset': InvalidImageDataset(), 'valid': False},
{'dataset': InvalidBboxDataset(), 'valid': False},
{'dataset': InvalidLabelDataset(), 'valid': False},
{'dataset': MismatchLengthDataset(), 'valid': False},
)
class TestAssertIsBboxDataset(unittest.TestCase):
def test_assert_is_bbox_dataset(self):
if self.valid:
assert_is_bbox_dataset(self.dataset, 20)
else:
with self.assertRaises(AssertionError):
assert_is_bbox_dataset(self.dataset, 20)
testing.run_module(__name__, __file__)
|
from datetime import date
from factory import Sequence, post_generation, SubFactory
from factory.alchemy import SQLAlchemyModelFactory
from factory.fuzzy import FuzzyChoice, FuzzyText, FuzzyDate, FuzzyInteger
from lemur.database import db
from lemur.authorities.models import Authority
from lemur.certificates.models import Certificate
from lemur.destinations.models import Destination
from lemur.sources.models import Source
from lemur.notifications.models import Notification
from lemur.pending_certificates.models import PendingCertificate
from lemur.users.models import User
from lemur.roles.models import Role
from lemur.endpoints.models import Policy, Endpoint
from lemur.policies.models import RotationPolicy
from lemur.api_keys.models import ApiKey
from .vectors import (
SAN_CERT_STR,
SAN_CERT_KEY,
CSR_STR,
INTERMEDIATE_CERT_STR,
ROOTCA_CERT_STR,
INTERMEDIATE_KEY,
WILDCARD_CERT_KEY,
INVALID_CERT_STR,
)
class BaseFactory(SQLAlchemyModelFactory):
"""Base factory."""
class Meta:
"""Factory configuration."""
abstract = True
sqlalchemy_session = db.session
class RotationPolicyFactory(BaseFactory):
"""Rotation Factory."""
name = Sequence(lambda n: "policy{0}".format(n))
days = 30
class Meta:
"""Factory configuration."""
model = RotationPolicy
class CertificateFactory(BaseFactory):
"""Certificate factory."""
name = Sequence(lambda n: "certificate{0}".format(n))
chain = INTERMEDIATE_CERT_STR
body = SAN_CERT_STR
private_key = SAN_CERT_KEY
owner = "[email protected]"
status = FuzzyChoice(["valid", "revoked", "unknown"])
deleted = False
description = FuzzyText(length=128)
active = True
date_created = FuzzyDate(date(2016, 1, 1), date(2020, 1, 1))
rotation_policy = SubFactory(RotationPolicyFactory)
class Meta:
"""Factory Configuration."""
model = Certificate
@post_generation
def user(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.user_id = extracted.id
@post_generation
def authority(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.authority_id = extracted.id
@post_generation
def notifications(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for notification in extracted:
self.notifications.append(notification)
@post_generation
def destinations(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for destination in extracted:
self.destintations.append(destination)
@post_generation
def replaces(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for replace in extracted:
self.replaces.append(replace)
@post_generation
def sources(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for source in extracted:
self.sources.append(source)
@post_generation
def domains(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for domain in extracted:
self.domains.append(domain)
@post_generation
def roles(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for domain in extracted:
self.roles.append(domain)
class CACertificateFactory(CertificateFactory):
chain = ROOTCA_CERT_STR
body = INTERMEDIATE_CERT_STR
private_key = INTERMEDIATE_KEY
class InvalidCertificateFactory(CertificateFactory):
body = INVALID_CERT_STR
private_key = ""
chain = ""
class AuthorityFactory(BaseFactory):
"""Authority factory."""
name = Sequence(lambda n: "authority{0}".format(n))
owner = "[email protected]"
plugin = {"slug": "test-issuer"}
description = FuzzyText(length=128)
authority_certificate = SubFactory(CACertificateFactory)
class Meta:
"""Factory configuration."""
model = Authority
@post_generation
def roles(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for role in extracted:
self.roles.append(role)
class AsyncAuthorityFactory(AuthorityFactory):
"""Async Authority factory."""
name = Sequence(lambda n: "authority{0}".format(n))
owner = "[email protected]"
plugin = {"slug": "test-issuer-async"}
description = FuzzyText(length=128)
authority_certificate = SubFactory(CertificateFactory)
class CryptoAuthorityFactory(AuthorityFactory):
"""Authority factory based on 'cryptography' plugin."""
plugin = {"slug": "cryptography-issuer"}
class DestinationFactory(BaseFactory):
"""Destination factory."""
plugin_name = "test-destination"
label = Sequence(lambda n: "destination{0}".format(n))
class Meta:
"""Factory Configuration."""
model = Destination
class SourceFactory(BaseFactory):
"""Source factory."""
plugin_name = "test-source"
label = Sequence(lambda n: "source{0}".format(n))
class Meta:
"""Factory Configuration."""
model = Source
class NotificationFactory(BaseFactory):
"""Notification factory."""
plugin_name = "test-notification"
label = Sequence(lambda n: "notification{0}".format(n))
class Meta:
"""Factory Configuration."""
model = Notification
class RoleFactory(BaseFactory):
"""Role factory."""
name = Sequence(lambda n: "role{0}".format(n))
class Meta:
"""Factory Configuration."""
model = Role
@post_generation
def users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.users.append(user)
class UserFactory(BaseFactory):
"""User Factory."""
username = Sequence(lambda n: "user{0}".format(n))
email = Sequence(lambda n: "user{0}@example.com".format(n))
active = True
password = FuzzyText(length=24)
certificates = []
class Meta:
"""Factory Configuration."""
model = User
@post_generation
def roles(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for role in extracted:
self.roles.append(role)
@post_generation
def certificates(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for cert in extracted:
self.certificates.append(cert)
@post_generation
def authorities(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for authority in extracted:
self.authorities.append(authority)
class PolicyFactory(BaseFactory):
"""Policy Factory."""
name = Sequence(lambda n: "endpoint{0}".format(n))
class Meta:
"""Factory Configuration."""
model = Policy
class EndpointFactory(BaseFactory):
"""Endpoint Factory."""
owner = "[email protected]"
name = Sequence(lambda n: "endpoint{0}".format(n))
type = FuzzyChoice(["elb"])
active = True
port = FuzzyInteger(0, high=65535)
dnsname = "endpoint.example.com"
policy = SubFactory(PolicyFactory)
certificate = SubFactory(CertificateFactory)
source = SubFactory(SourceFactory)
class Meta:
"""Factory Configuration."""
model = Endpoint
class ApiKeyFactory(BaseFactory):
"""Api Key Factory."""
name = Sequence(lambda n: "api_key_{0}".format(n))
revoked = False
ttl = -1
issued_at = 1
class Meta:
"""Factory Configuration."""
model = ApiKey
@post_generation
def user(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.userId = extracted.id
class PendingCertificateFactory(BaseFactory):
"""PendingCertificate factory."""
name = Sequence(lambda n: "pending_certificate{0}".format(n))
external_id = 12345
csr = CSR_STR
chain = INTERMEDIATE_CERT_STR
private_key = WILDCARD_CERT_KEY
owner = "[email protected]"
status = FuzzyChoice(["valid", "revoked", "unknown"])
deleted = False
description = FuzzyText(length=128)
date_created = FuzzyDate(date(2016, 1, 1), date(2020, 1, 1))
number_attempts = 0
rename = False
class Meta:
"""Factory Configuration."""
model = PendingCertificate
@post_generation
def user(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.user_id = extracted.id
@post_generation
def authority(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.authority_id = extracted.id
@post_generation
def notifications(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for notification in extracted:
self.notifications.append(notification)
@post_generation
def destinations(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for destination in extracted:
self.destintations.append(destination)
@post_generation
def replaces(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for replace in extracted:
self.replaces.append(replace)
@post_generation
def sources(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for source in extracted:
self.sources.append(source)
@post_generation
def domains(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for domain in extracted:
self.domains.append(domain)
@post_generation
def roles(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for domain in extracted:
self.roles.append(domain)
|
import copy
import pytest
import keras
import matchzoo as mz
@pytest.fixture(scope='module')
def data_gen():
return mz.DataGenerator(mz.datasets.toy.load_data())
@pytest.mark.parametrize('attr', [
'callbacks',
'num_neg',
'num_dup',
'mode',
'batch_size',
'shuffle',
])
def test_data_generator_getters_setters(data_gen, attr):
assert hasattr(data_gen, attr)
val = getattr(data_gen, attr)
setattr(data_gen, attr, val)
assert getattr(data_gen, attr) == val
def test_resample():
model = mz.models.Naive()
prpr = model.get_default_preprocessor()
data_raw = mz.datasets.toy.load_data()
data = prpr.fit_transform(data_raw)
model.params.update(prpr.context)
model.params['task'] = mz.tasks.Ranking()
model.build()
model.compile()
data_gen = mz.DataGenerator(
data_pack=data,
mode='pair',
resample=True,
batch_size=4
)
class CheckResample(keras.callbacks.Callback):
def __init__(self, data_gen):
super().__init__()
self._data_gen = data_gen
self._orig_indices = None
self._flags = []
def on_epoch_end(self, epoch, logs=None):
curr_indices = self._data_gen.batch_indices
if not self._orig_indices:
self._orig_indices = copy.deepcopy(curr_indices)
else:
self._flags.append(self._orig_indices != curr_indices)
self._orig_indices = curr_indices
check_resample = CheckResample(data_gen)
model.fit_generator(data_gen, epochs=5, callbacks=[check_resample])
assert check_resample._flags
assert all(check_resample._flags)
|
import logging
from pyopnsense import diagnostics
from pyopnsense.exceptions import APIException
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
CONF_API_SECRET = "api_secret"
CONF_TRACKER_INTERFACE = "tracker_interfaces"
DOMAIN = "opnsense"
OPNSENSE_DATA = DOMAIN
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_URL): cv.url,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_API_SECRET): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=False): cv.boolean,
vol.Optional(CONF_TRACKER_INTERFACE, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the opnsense component."""
conf = config[DOMAIN]
url = conf[CONF_URL]
api_key = conf[CONF_API_KEY]
api_secret = conf[CONF_API_SECRET]
verify_ssl = conf[CONF_VERIFY_SSL]
tracker_interfaces = conf[CONF_TRACKER_INTERFACE]
interfaces_client = diagnostics.InterfaceClient(
api_key, api_secret, url, verify_ssl
)
try:
interfaces_client.get_arp()
except APIException:
_LOGGER.exception("Failure while connecting to OPNsense API endpoint")
return False
if tracker_interfaces:
# Verify that specified tracker interfaces are valid
netinsight_client = diagnostics.NetworkInsightClient(
api_key, api_secret, url, verify_ssl
)
interfaces = list(netinsight_client.get_interfaces().values())
for interface in tracker_interfaces:
if interface not in interfaces:
_LOGGER.error(
"Specified OPNsense tracker interface %s is not found", interface
)
return False
hass.data[OPNSENSE_DATA] = {
"interfaces": interfaces_client,
CONF_TRACKER_INTERFACE: tracker_interfaces,
}
load_platform(hass, "device_tracker", DOMAIN, tracker_interfaces, config)
return True
|
from collections import UserDict
from unittest import TestCase
from httpobs.scanner.analyzer.misc import cross_origin_resource_sharing, redirection
from httpobs.tests.utils import empty_requests
class TestCORS(TestCase):
def setUp(self):
self.reqs = empty_requests()
def tearDown(self):
self.reqs = None
def test_acao_not_implemented(self):
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-not-implemented', result['result'])
self.assertTrue(result['pass'])
def test_xml_not_valid(self):
self.reqs['resources']['/crossdomain.xml'] = '<![..]>'
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('xml-not-parsable', result['result'])
self.assertFalse(result['pass'])
def test_acao_public(self):
self.reqs['responses']['cors'].headers['Access-Control-Allow-Origin'] = '*'
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-public-access', result['result'])
self.assertEquals('*', result['data']['acao'])
self.assertTrue(result['pass'])
def test_acao_restricted_with_acao(self):
self.reqs['responses']['cors'].request.headers['Origin'] = 'https://http-observatory.security.mozilla.org'
self.reqs['responses']['cors'].headers['Access-Control-Allow-Origin'] = 'https://mozilla.com'
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-restricted-access', result['result'])
self.assertTrue(result['pass'])
def test_acao_universal_with_acao(self):
self.reqs['responses']['cors'].request.headers['Origin'] = 'https://http-observatory.security.mozilla.org'
self.reqs['responses']['cors'].headers['Access-Control-Allow-Origin'] = \
'https://http-observatory.security.mozilla.org'
self.reqs['responses']['cors'].headers['Access-Control-Allow-Credentials'] = 'true'
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-universal-access', result['result'])
self.assertFalse(result['pass'])
def test_acao_restricted_with_crossdomain(self):
self.reqs['resources']['/crossdomain.xml'] = """
<cross-domain-policy>
<allow-access-from domain="http-observatory.security.mozilla.org" secure="true"/>
<allow-access-from domain="github.com" secure="true"/>
</cross-domain-policy>"""
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-restricted-access', result['result'])
self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'], result['data']['crossdomain'])
self.assertTrue(result['pass'])
def test_acao_universal_with_crossdomain(self):
self.reqs['resources']['/crossdomain.xml'] = """
<cross-domain-policy>
<allow-access-from domain="*" secure="true"/>
</cross-domain-policy>"""
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-universal-access', result['result'])
self.assertFalse(result['pass'])
def test_acao_restricted_with_clientaccess(self):
self.reqs['resources']['/clientaccesspolicy.xml'] = """
<access-policy>
<cross-domain-access>
<policy>
<allow-from http-methods="*">
<domain uri="http-observatory.security.mozilla.org"/>
<domain uri="github.com"/>
</allow-from>
</policy>
</cross-domain-access>
</access-policy>"""
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-restricted-access', result['result'])
self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'],
result['data']['clientaccesspolicy'])
self.assertTrue(result['pass'])
def test_acao_universal_with_clientaccess(self):
self.reqs['resources']['/clientaccesspolicy.xml'] = """
<access-policy>
<cross-domain-access>
<policy>
<allow-from http-methods="*">
<domain uri="*"/>
</allow-from>
</policy>
</cross-domain-access>
</access-policy>"""
result = cross_origin_resource_sharing(self.reqs)
self.assertEquals('cross-origin-resource-sharing-implemented-with-universal-access', result['result'])
self.assertFalse(result['pass'])
class TestRedirection(TestCase):
def setUp(self):
self.reqs = empty_requests()
def tearDown(self):
self.reqs = None
def test_no_http_but_does_have_https(self):
self.reqs['responses']['http'] = None
result = redirection(self.reqs)
self.assertEquals('redirection-not-needed-no-http', result['result'])
self.assertTrue(result['pass'])
def test_redirection_missing(self):
self.reqs['responses']['http'].url = 'http://http-observatory.security.mozilla.org'
result = redirection(self.reqs)
self.assertEquals('redirection-missing', result['result'])
self.assertFalse(result['pass'])
def test_redirection_not_to_https(self):
self.reqs['responses']['http'].url = 'http://http-observatory.security.mozilla.org/foo'
history1 = UserDict()
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
self.reqs['responses']['http'].history.append(history1)
result = redirection(self.reqs)
self.assertEquals('redirection-not-to-https', result['result'])
self.assertFalse(result['pass'])
def test_redirects_to_https(self):
# normal redirect to https
history1 = UserDict()
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
self.reqs['responses']['http'].history.append(history1)
result = redirection(self.reqs)
self.assertEquals('redirection-to-https', result['result'])
self.assertEquals(['http://http-observatory.security.mozilla.org/',
'https://http-observatory.security.mozilla.org/'], result['route'])
self.assertTrue(result['pass'])
def test_redirects_to_https_with_port_number(self):
# same thing, but with :443 on the URL, see issue #180
self.reqs['responses']['http'].url = 'https://http-observatory.security.mozilla.org:443/'
history1 = UserDict()
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
self.reqs['responses']['http'].history.append(history1)
result = redirection(self.reqs)
self.assertEquals('redirection-to-https', result['result'])
self.assertEquals(['http://http-observatory.security.mozilla.org/',
'https://http-observatory.security.mozilla.org:443/'], result['route'])
self.assertTrue(result['pass'])
def test_redirects_invalid_cert(self):
history1 = UserDict()
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
self.reqs['responses']['http'].history.append(history1)
# Mark it as verification failed
self.reqs['responses']['http'].verified = False
result = redirection(self.reqs)
self.assertEquals('redirection-invalid-cert', result['result'])
self.assertFalse(result['pass'])
def test_first_redirection_still_http(self):
self.reqs['responses']['http'].url = 'https://http-observatory.security.mozilla.org/foo'
history1 = UserDict()
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
history2 = UserDict()
history2.request = UserDict()
history2.request.url = 'http://http-observatory.security.mozilla.org/foo'
self.reqs['responses']['http'].history.append(history1)
self.reqs['responses']['http'].history.append(history2)
result = redirection(self.reqs)
self.assertEquals('redirection-not-to-https-on-initial-redirection', result['result'])
self.assertFalse(result['pass'])
def test_first_redirection_off_host(self):
self.reqs['responses']['http'].url = 'https://http-foo.services.mozilla.com/'
history1 = UserDict()
history1.status_code = 301
history1.request = UserDict()
history1.request.url = 'http://http-observatory.security.mozilla.org/'
self.reqs['responses']['http'].history.append(history1)
result = redirection(self.reqs)
self.assertEquals('redirection-off-host-from-http', result['result'])
self.assertFalse(result['pass'])
def test_all_redirections_preloaded(self):
self.reqs['responses']['http'].url = 'https://www.pokeinthe.io/foo/bar'
for url in ('http://pokeinthe.io/',
'https://pokeinthe.io/',
'https://www.pokeinthe.io/',
'https://baz.pokeinthe.io/foo'):
history = UserDict()
history.request = UserDict()
history.request.url = url
self.reqs['responses']['http'].history.append(history)
result = redirection(self.reqs)
self.assertEquals('redirection-all-redirects-preloaded', result['result'])
self.assertTrue(result['pass'])
|
import socket
import ssl
from homeassistant import data_entry_flow
from homeassistant.components.cert_expiry.const import DEFAULT_PORT, DOMAIN
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from .const import HOST, PORT
from .helpers import future_timestamp
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_user(hass):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp"
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: HOST, CONF_PORT: PORT}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
assert result["result"].unique_id == f"{HOST}:{PORT}"
with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"):
await hass.async_block_till_done()
async def test_user_with_bad_cert(hass):
"""Test user config with bad certificate."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=ssl.SSLError("some error"),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: HOST, CONF_PORT: PORT}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
assert result["result"].unique_id == f"{HOST}:{PORT}"
with patch("homeassistant.components.cert_expiry.sensor.async_setup_entry"):
await hass.async_block_till_done()
async def test_import_host_only(hass):
"""Test import with host only."""
with patch(
"homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp"
), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=future_timestamp(1),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST}
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == DEFAULT_PORT
assert result["result"].unique_id == f"{HOST}:{DEFAULT_PORT}"
async def test_import_host_and_port(hass):
"""Test import with host and port."""
with patch(
"homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp"
), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=future_timestamp(1),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "import"},
data={CONF_HOST: HOST, CONF_PORT: PORT},
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
assert result["result"].unique_id == f"{HOST}:{PORT}"
async def test_import_non_default_port(hass):
"""Test import with host and non-default port."""
with patch(
"homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp"
), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=future_timestamp(1),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST, CONF_PORT: 888}
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"{HOST}:888"
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == 888
assert result["result"].unique_id == f"{HOST}:888"
async def test_import_with_name(hass):
"""Test import with name (deprecated)."""
with patch(
"homeassistant.components.cert_expiry.config_flow.get_cert_expiry_timestamp"
), patch(
"homeassistant.components.cert_expiry.get_cert_expiry_timestamp",
return_value=future_timestamp(1),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "import"},
data={CONF_NAME: "legacy", CONF_HOST: HOST, CONF_PORT: PORT},
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_PORT] == PORT
assert result["result"].unique_id == f"{HOST}:{PORT}"
async def test_bad_import(hass):
"""Test import step."""
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=ConnectionRefusedError(),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "import_failed"
async def test_abort_if_already_setup(hass):
"""Test we abort if the cert is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: HOST, CONF_PORT: PORT},
unique_id=f"{HOST}:{PORT}",
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={CONF_HOST: HOST, CONF_PORT: PORT}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data={CONF_HOST: HOST, CONF_PORT: PORT}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_abort_on_socket_failed(hass):
"""Test we abort of we have errors during socket creation."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=socket.gaierror(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: HOST}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "resolve_failed"}
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=socket.timeout(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: HOST}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "connection_timeout"}
with patch(
"homeassistant.components.cert_expiry.helper.get_cert",
side_effect=ConnectionRefusedError,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: HOST}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "connection_refused"}
|
from copy import copy
import asyncio
import discord
from redbot.core import Config, checks, commands
from redbot.core.commands.requires import PrivilegeLevel
from redbot.core.i18n import Translator
from redbot.core.utils.predicates import MessagePredicate
_ = Translator("Warnings", __file__)
async def warning_points_add_check(
config: Config, ctx: commands.Context, user: discord.Member, points: int
):
"""Handles any action that needs to be taken or not based on the points"""
guild = ctx.guild
guild_settings = config.guild(guild)
act = {}
async with guild_settings.actions() as registered_actions:
for a in registered_actions:
# Actions are sorted in decreasing order of points.
# The first action we find where the user is above the threshold will be the
# highest action we can take.
if points >= a["points"]:
act = a
break
if act and act["exceed_command"] is not None: # some action needs to be taken
await create_and_invoke_context(ctx, act["exceed_command"], user)
async def warning_points_remove_check(
config: Config, ctx: commands.Context, user: discord.Member, points: int
):
guild = ctx.guild
guild_settings = config.guild(guild)
act = {}
async with guild_settings.actions() as registered_actions:
for a in registered_actions:
if points >= a["points"]:
act = a
else:
break
if act and act["drop_command"] is not None: # some action needs to be taken
await create_and_invoke_context(ctx, act["drop_command"], user)
async def create_and_invoke_context(
realctx: commands.Context, command_str: str, user: discord.Member
):
m = copy(realctx.message)
m.content = command_str.format(user=user.mention, prefix=realctx.prefix)
fctx = await realctx.bot.get_context(m, cls=commands.Context)
try:
await realctx.bot.invoke(fctx)
except (commands.CheckFailure, commands.CommandOnCooldown):
# reinvoke bypasses checks and we don't want to run bot owner only commands here
if fctx.command.requires.privilege_level < PrivilegeLevel.BOT_OWNER:
await fctx.reinvoke()
def get_command_from_input(bot, userinput: str):
com = None
orig = userinput
while com is None:
com = bot.get_command(userinput)
if com is None:
userinput = " ".join(userinput.split(" ")[:-1])
if len(userinput) == 0:
break
if com is None:
return None, _("I could not find a command from that input!")
if com.requires.privilege_level >= PrivilegeLevel.BOT_OWNER:
return (
None,
_("That command requires bot owner. I can't allow you to use that for an action"),
)
return "{prefix}" + orig, None
async def get_command_for_exceeded_points(ctx: commands.Context):
"""Gets the command to be executed when the user is at or exceeding
the points threshold for the action"""
await ctx.send(
_(
"Enter the command to be run when the user **exceeds the points for "
"this action to occur.**\n**If you do not wish to have a command run, enter** "
"`none`.\n\nEnter it exactly as you would if you were "
"actually trying to run the command, except don't put a prefix and "
"use `{user}` in place of any user/member arguments\n\n"
"WARNING: The command entered will be run without regard to checks or cooldowns. "
"Commands requiring bot owner are not allowed for security reasons.\n\n"
"Please wait 15 seconds before entering your response."
)
)
await asyncio.sleep(15)
await ctx.send(_("You may enter your response now."))
try:
msg = await ctx.bot.wait_for(
"message", check=MessagePredicate.same_context(ctx), timeout=30
)
except asyncio.TimeoutError:
return None
else:
if msg.content == "none":
return None
command, m = get_command_from_input(ctx.bot, msg.content)
if command is None:
await ctx.send(m)
return None
return command
async def get_command_for_dropping_points(ctx: commands.Context):
"""
Gets the command to be executed when the user drops below the points
threshold
This is intended to be used for reversal of the action that was executed
when the user exceeded the threshold
"""
await ctx.send(
_(
"Enter the command to be run when the user **returns to a value below "
"the points for this action to occur.** Please note that this is "
"intended to be used for reversal of the action taken when the user "
"exceeded the action's point value.\n**If you do not wish to have a command run "
"on dropping points, enter** `none`.\n\nEnter it exactly as you would "
"if you were actually trying to run the command, except don't put a prefix "
"and use `{user}` in place of any user/member arguments\n\n"
"WARNING: The command entered will be run without regard to checks or cooldowns. "
"Commands requiring bot owner are not allowed for security reasons.\n\n"
"Please wait 15 seconds before entering your response."
)
)
await asyncio.sleep(15)
await ctx.send(_("You may enter your response now."))
try:
msg = await ctx.bot.wait_for(
"message", check=MessagePredicate.same_context(ctx), timeout=30
)
except asyncio.TimeoutError:
return None
else:
if msg.content == "none":
return None
command, m = get_command_from_input(ctx.bot, msg.content)
if command is None:
await ctx.send(m)
return None
return command
|
from homeassistant.components.script import DOMAIN, SCRIPT_ENTRY_SCHEMA
from homeassistant.components.script.config import async_validate_config_item
from homeassistant.config import SCRIPT_CONFIG_PATH
from homeassistant.const import SERVICE_RELOAD
import homeassistant.helpers.config_validation as cv
from . import EditKeyBasedConfigView
async def async_setup(hass):
"""Set up the script config API."""
async def hook(action, config_key):
"""post_write_hook for Config View that reloads scripts."""
await hass.services.async_call(DOMAIN, SERVICE_RELOAD)
hass.http.register_view(
EditKeyBasedConfigView(
DOMAIN,
"config",
SCRIPT_CONFIG_PATH,
cv.slug,
SCRIPT_ENTRY_SCHEMA,
post_write_hook=hook,
data_validator=async_validate_config_item,
)
)
return True
|
import asyncio
import logging
from aiopylgtv import PyLGTVCmdException, PyLGTVPairException, WebOsClient
import voluptuous as vol
from websockets.exceptions import ConnectionClosed
from homeassistant.components.webostv.const import (
ATTR_BUTTON,
ATTR_COMMAND,
ATTR_PAYLOAD,
CONF_ON_ACTION,
CONF_SOURCES,
DEFAULT_NAME,
DOMAIN,
SERVICE_BUTTON,
SERVICE_COMMAND,
SERVICE_SELECT_SOUND_OUTPUT,
WEBOSTV_CONFIG_FILE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CUSTOMIZE,
CONF_HOST,
CONF_ICON,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import ATTR_SOUND_OUTPUT
CUSTOMIZE_SCHEMA = vol.Schema(
{vol.Optional(CONF_SOURCES, default=[]): vol.All(cv.ensure_list, [cv.string])}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Optional(CONF_CUSTOMIZE, default={}): CUSTOMIZE_SCHEMA,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_ICON): cv.string,
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids})
BUTTON_SCHEMA = CALL_SCHEMA.extend({vol.Required(ATTR_BUTTON): cv.string})
COMMAND_SCHEMA = CALL_SCHEMA.extend(
{vol.Required(ATTR_COMMAND): cv.string, vol.Optional(ATTR_PAYLOAD): dict}
)
SOUND_OUTPUT_SCHEMA = CALL_SCHEMA.extend({vol.Required(ATTR_SOUND_OUTPUT): cv.string})
SERVICE_TO_METHOD = {
SERVICE_BUTTON: {"method": "async_button", "schema": BUTTON_SCHEMA},
SERVICE_COMMAND: {"method": "async_command", "schema": COMMAND_SCHEMA},
SERVICE_SELECT_SOUND_OUTPUT: {
"method": "async_select_sound_output",
"schema": SOUND_OUTPUT_SCHEMA,
},
}
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up the LG WebOS TV platform."""
hass.data[DOMAIN] = {}
async def async_service_handler(service):
method = SERVICE_TO_METHOD.get(service.service)
data = service.data.copy()
data["method"] = method["method"]
async_dispatcher_send(hass, DOMAIN, data)
for service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[service]["schema"]
hass.services.async_register(
DOMAIN, service, async_service_handler, schema=schema
)
tasks = [async_setup_tv(hass, config, conf) for conf in config[DOMAIN]]
if tasks:
await asyncio.gather(*tasks)
return True
async def async_setup_tv(hass, config, conf):
"""Set up a LG WebOS TV based on host parameter."""
host = conf[CONF_HOST]
config_file = hass.config.path(WEBOSTV_CONFIG_FILE)
client = WebOsClient(host, config_file)
hass.data[DOMAIN][host] = {"client": client}
if client.is_registered():
await async_setup_tv_finalize(hass, config, conf, client)
else:
_LOGGER.warning("LG webOS TV %s needs to be paired", host)
await async_request_configuration(hass, config, conf, client)
async def async_connect(client):
"""Attempt a connection, but fail gracefully if tv is off for example."""
try:
await client.connect()
except (
OSError,
ConnectionClosed,
ConnectionRefusedError,
asyncio.TimeoutError,
asyncio.CancelledError,
PyLGTVPairException,
PyLGTVCmdException,
):
pass
async def async_setup_tv_finalize(hass, config, conf, client):
"""Make initial connection attempt and call platform setup."""
async def async_on_stop(event):
"""Unregister callbacks and disconnect."""
client.clear_state_update_callbacks()
await client.disconnect()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_on_stop)
await async_connect(client)
hass.async_create_task(
hass.helpers.discovery.async_load_platform("media_player", DOMAIN, conf, config)
)
hass.async_create_task(
hass.helpers.discovery.async_load_platform("notify", DOMAIN, conf, config)
)
async def async_request_configuration(hass, config, conf, client):
"""Request configuration steps from the user."""
host = conf.get(CONF_HOST)
name = conf.get(CONF_NAME)
configurator = hass.components.configurator
async def lgtv_configuration_callback(data):
"""Handle actions when configuration callback is called."""
try:
await client.connect()
except PyLGTVPairException:
_LOGGER.warning("Connected to LG webOS TV %s but not paired", host)
return
except (
OSError,
ConnectionClosed,
ConnectionRefusedError,
asyncio.TimeoutError,
asyncio.CancelledError,
PyLGTVCmdException,
):
_LOGGER.error("Unable to connect to host %s", host)
return
await async_setup_tv_finalize(hass, config, conf, client)
configurator.async_request_done(request_id)
request_id = configurator.async_request_config(
name,
lgtv_configuration_callback,
description="Click start and accept the pairing request on your TV.",
description_image="/static/images/config_webos.png",
submit_caption="Start pairing request",
)
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE, CONF_NAME
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN, HOME_LOCATION_NAME
from .weather import FORECAST_MODE
@config_entries.HANDLERS.register(DOMAIN)
class IpmaFlowHandler(config_entries.ConfigFlow):
"""Config flow for IPMA component."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Init IpmaFlowHandler."""
self._errors = {}
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
self._errors = {}
if user_input is not None:
if user_input[CONF_NAME] not in self.hass.config_entries.async_entries(
DOMAIN
):
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
self._errors[CONF_NAME] = "name_exists"
# default location is set hass configuration
return await self._show_config_form(
name=HOME_LOCATION_NAME,
latitude=self.hass.config.latitude,
longitude=self.hass.config.longitude,
)
async def _show_config_form(self, name=None, latitude=None, longitude=None):
"""Show the configuration form to edit location data."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=name): str,
vol.Required(CONF_LATITUDE, default=latitude): cv.latitude,
vol.Required(CONF_LONGITUDE, default=longitude): cv.longitude,
vol.Required(CONF_MODE, default="daily"): vol.In(FORECAST_MODE),
}
),
errors=self._errors,
)
|
from pathlib import Path
import re
import pytest
from homeassistant import config_entries
from homeassistant.components import mqtt
from homeassistant.components.mqtt.abbreviations import (
ABBREVIATIONS,
DEVICE_ABBREVIATIONS,
)
from homeassistant.components.mqtt.discovery import ALREADY_DISCOVERED, async_start
from homeassistant.const import STATE_OFF, STATE_ON
from tests.async_mock import AsyncMock, patch
from tests.common import (
async_fire_mqtt_message,
mock_device_registry,
mock_entity_platform,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.mark.parametrize(
"mqtt_config",
[{mqtt.CONF_BROKER: "mock-broker", mqtt.CONF_DISCOVERY: False}],
)
async def test_subscribing_config_topic(hass, mqtt_mock):
"""Test setting up discovery."""
entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
discovery_topic = "homeassistant"
await async_start(hass, discovery_topic, entry)
assert mqtt_mock.async_subscribe.called
call_args = mqtt_mock.async_subscribe.mock_calls[0][1]
assert call_args[0] == discovery_topic + "/#"
assert call_args[2] == 0
async def test_invalid_topic(hass, mqtt_mock):
"""Test sending to invalid topic."""
with patch(
"homeassistant.components.mqtt.discovery.async_dispatcher_send"
) as mock_dispatcher_send:
mock_dispatcher_send = AsyncMock(return_value=None)
async_fire_mqtt_message(
hass, "homeassistant/binary_sensor/bla/not_config", "{}"
)
await hass.async_block_till_done()
assert not mock_dispatcher_send.called
async def test_invalid_json(hass, mqtt_mock, caplog):
"""Test sending in invalid JSON."""
with patch(
"homeassistant.components.mqtt.discovery.async_dispatcher_send"
) as mock_dispatcher_send:
mock_dispatcher_send = AsyncMock(return_value=None)
async_fire_mqtt_message(
hass, "homeassistant/binary_sensor/bla/config", "not json"
)
await hass.async_block_till_done()
assert "Unable to parse JSON" in caplog.text
assert not mock_dispatcher_send.called
async def test_only_valid_components(hass, mqtt_mock, caplog):
"""Test for a valid component."""
with patch(
"homeassistant.components.mqtt.discovery.async_dispatcher_send"
) as mock_dispatcher_send:
invalid_component = "timer"
mock_dispatcher_send = AsyncMock(return_value=None)
async_fire_mqtt_message(
hass, f"homeassistant/{invalid_component}/bla/config", "{}"
)
await hass.async_block_till_done()
assert f"Integration {invalid_component} is not supported" in caplog.text
assert not mock_dispatcher_send.called
async def test_correct_config_discovery(hass, mqtt_mock, caplog):
"""Test sending in correct JSON."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Beer"
assert ("binary_sensor", "bla") in hass.data[ALREADY_DISCOVERED]
async def test_discover_fan(hass, mqtt_mock, caplog):
"""Test discovering an MQTT fan."""
async_fire_mqtt_message(
hass,
"homeassistant/fan/bla/config",
('{ "name": "Beer",' ' "command_topic": "test_topic" }'),
)
await hass.async_block_till_done()
state = hass.states.get("fan.beer")
assert state is not None
assert state.name == "Beer"
assert ("fan", "bla") in hass.data[ALREADY_DISCOVERED]
async def test_discover_climate(hass, mqtt_mock, caplog):
"""Test discovering an MQTT climate component."""
data = (
'{ "name": "ClimateTest",'
' "current_temperature_topic": "climate/bla/current_temp",'
' "temperature_command_topic": "climate/bla/target_temp" }'
)
async_fire_mqtt_message(hass, "homeassistant/climate/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("climate.ClimateTest")
assert state is not None
assert state.name == "ClimateTest"
assert ("climate", "bla") in hass.data[ALREADY_DISCOVERED]
async def test_discover_alarm_control_panel(hass, mqtt_mock, caplog):
"""Test discovering an MQTT alarm control panel component."""
data = (
'{ "name": "AlarmControlPanelTest",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, "homeassistant/alarm_control_panel/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.AlarmControlPanelTest")
assert state is not None
assert state.name == "AlarmControlPanelTest"
assert ("alarm_control_panel", "bla") in hass.data[ALREADY_DISCOVERED]
async def test_discovery_incl_nodeid(hass, mqtt_mock, caplog):
"""Test sending in correct JSON with optional node_id included."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/my_node_id/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Beer"
assert ("binary_sensor", "my_node_id bla") in hass.data[ALREADY_DISCOVERED]
async def test_non_duplicate_discovery(hass, mqtt_mock, caplog):
"""Test for a non duplicate component."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
state_duplicate = hass.states.get("binary_sensor.beer1")
assert state is not None
assert state.name == "Beer"
assert state_duplicate is None
assert "Component has already been discovered: binary_sensor bla" in caplog.text
async def test_removal(hass, mqtt_mock, caplog):
"""Test removal of component through empty discovery message."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is None
async def test_rediscover(hass, mqtt_mock, caplog):
"""Test rediscover of removed component."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is None
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
async def test_duplicate_removal(hass, mqtt_mock, caplog):
"""Test for a non duplicate component."""
async_fire_mqtt_message(
hass,
"homeassistant/binary_sensor/bla/config",
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
await hass.async_block_till_done()
assert "Component has already been discovered: binary_sensor bla" in caplog.text
caplog.clear()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
await hass.async_block_till_done()
assert "Component has already been discovered: binary_sensor bla" not in caplog.text
async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock):
"""Test discvered device is cleaned up when removed from registry."""
data = (
'{ "device":{"identifiers":["0AFFD2"]},'
' "state_topic": "foobar/sensor",'
' "unique_id": "unique" }'
)
async_fire_mqtt_message(hass, "homeassistant/sensor/bla/config", data)
await hass.async_block_till_done()
# Verify device and registry entries are created
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
assert device_entry is not None
entity_entry = entity_reg.async_get("sensor.mqtt_sensor")
assert entity_entry is not None
state = hass.states.get("sensor.mqtt_sensor")
assert state is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
# Verify device and registry entries are cleared
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
assert device_entry is None
entity_entry = entity_reg.async_get("sensor.mqtt_sensor")
assert entity_entry is None
# Verify state is removed
state = hass.states.get("sensor.mqtt_sensor")
assert state is None
await hass.async_block_till_done()
# Verify retained discovery topic has been cleared
mqtt_mock.async_publish.assert_called_once_with(
"homeassistant/sensor/bla/config", "", 0, True
)
async def test_discovery_expansion(hass, mqtt_mock, caplog):
"""Test expansion of abbreviated discovery payload."""
data = (
'{ "~": "some/base/topic",'
' "name": "DiscoveryExpansionTest1",'
' "stat_t": "test_topic/~",'
' "cmd_t": "~/test_topic",'
' "dev":{'
' "ids":["5706DF"],'
' "name":"DiscoveryExpansionTest1 Device",'
' "mdl":"Generic",'
' "sw":"1.2.3.4",'
' "mf":"None"'
" }"
"}"
)
async_fire_mqtt_message(hass, "homeassistant/switch/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("switch.DiscoveryExpansionTest1")
assert state is not None
assert state.name == "DiscoveryExpansionTest1"
assert ("switch", "bla") in hass.data[ALREADY_DISCOVERED]
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "test_topic/some/base/topic", "ON")
state = hass.states.get("switch.DiscoveryExpansionTest1")
assert state.state == STATE_ON
ABBREVIATIONS_WHITE_LIST = [
# MQTT client/server/trigger settings
"CONF_BIRTH_MESSAGE",
"CONF_BROKER",
"CONF_CERTIFICATE",
"CONF_CLIENT_CERT",
"CONF_CLIENT_ID",
"CONF_CLIENT_KEY",
"CONF_DISCOVERY",
"CONF_DISCOVERY_ID",
"CONF_DISCOVERY_PREFIX",
"CONF_EMBEDDED",
"CONF_KEEPALIVE",
"CONF_TLS_INSECURE",
"CONF_TLS_VERSION",
"CONF_WILL_MESSAGE",
# Undocumented device configuration
"CONF_DEPRECATED_VIA_HUB",
"CONF_VIA_DEVICE",
# Already short
"CONF_FAN_MODE_LIST",
"CONF_HOLD_LIST",
"CONF_HS",
"CONF_MODE_LIST",
"CONF_PRECISION",
"CONF_QOS",
"CONF_SCHEMA",
"CONF_SWING_MODE_LIST",
"CONF_TEMP_STEP",
]
async def test_missing_discover_abbreviations(hass, mqtt_mock, caplog):
"""Check MQTT platforms for missing abbreviations."""
missing = []
regex = re.compile(r"(CONF_[a-zA-Z\d_]*) *= *[\'\"]([a-zA-Z\d_]*)[\'\"]")
for fil in Path(mqtt.__file__).parent.rglob("*.py"):
if fil.name == "trigger.py":
continue
with open(fil) as file:
matches = re.findall(regex, file.read())
for match in matches:
if (
match[1] not in ABBREVIATIONS.values()
and match[1] not in DEVICE_ABBREVIATIONS.values()
and match[0] not in ABBREVIATIONS_WHITE_LIST
):
missing.append(
"{}: no abbreviation for {} ({})".format(
fil, match[1], match[0]
)
)
assert not missing
async def test_no_implicit_state_topic_switch(hass, mqtt_mock, caplog):
"""Test no implicit state topic for switch."""
data = '{ "name": "Test1",' ' "command_topic": "cmnd"' "}"
async_fire_mqtt_message(hass, "homeassistant/switch/bla/config", data)
await hass.async_block_till_done()
assert "implicit state_topic is deprecated" not in caplog.text
state = hass.states.get("switch.Test1")
assert state is not None
assert state.name == "Test1"
assert ("switch", "bla") in hass.data[ALREADY_DISCOVERED]
assert state.state == "off"
assert state.attributes["assumed_state"] is True
async_fire_mqtt_message(hass, "homeassistant/switch/bla/state", "ON")
state = hass.states.get("switch.Test1")
assert state.state == "off"
@pytest.mark.parametrize(
"mqtt_config",
[
{
mqtt.CONF_BROKER: "mock-broker",
mqtt.CONF_DISCOVERY_PREFIX: "my_home/homeassistant/register",
}
],
)
async def test_complex_discovery_topic_prefix(hass, mqtt_mock, caplog):
"""Tests handling of discovery topic prefix with multiple slashes."""
async_fire_mqtt_message(
hass,
("my_home/homeassistant/register/binary_sensor/node1/object1/config"),
'{ "name": "Beer", "state_topic": "test-topic" }',
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.beer")
assert state is not None
assert state.name == "Beer"
assert ("binary_sensor", "node1 object1") in hass.data[ALREADY_DISCOVERED]
async def test_mqtt_integration_discovery_subscribe_unsubscribe(
hass, mqtt_client_mock, mqtt_mock
):
"""Check MQTT integration discovery subscribe and unsubscribe."""
mock_entity_platform(hass, "config_flow.comp", None)
entry = hass.config_entries.async_entries("mqtt")[0]
mqtt_mock().connected = True
with patch(
"homeassistant.components.mqtt.discovery.async_get_mqtt",
return_value={"comp": ["comp/discovery/#"]},
):
await async_start(hass, "homeassistant", entry)
await hass.async_block_till_done()
mqtt_client_mock.subscribe.assert_any_call("comp/discovery/#", 0)
assert not mqtt_client_mock.unsubscribe.called
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
async def async_step_mqtt(self, discovery_info):
"""Test mqtt step."""
return self.async_abort(reason="already_configured")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
mqtt_client_mock.subscribe.assert_any_call("comp/discovery/#", 0)
assert not mqtt_client_mock.unsubscribe.called
async_fire_mqtt_message(hass, "comp/discovery/bla/config", "")
await hass.async_block_till_done()
mqtt_client_mock.unsubscribe.assert_called_once_with("comp/discovery/#")
mqtt_client_mock.unsubscribe.reset_mock()
async_fire_mqtt_message(hass, "comp/discovery/bla/config", "")
await hass.async_block_till_done()
assert not mqtt_client_mock.unsubscribe.called
async def test_mqtt_discovery_unsubscribe_once(hass, mqtt_client_mock, mqtt_mock):
"""Check MQTT integration discovery unsubscribe once."""
mock_entity_platform(hass, "config_flow.comp", None)
entry = hass.config_entries.async_entries("mqtt")[0]
mqtt_mock().connected = True
with patch(
"homeassistant.components.mqtt.discovery.async_get_mqtt",
return_value={"comp": ["comp/discovery/#"]},
):
await async_start(hass, "homeassistant", entry)
await hass.async_block_till_done()
mqtt_client_mock.subscribe.assert_any_call("comp/discovery/#", 0)
assert not mqtt_client_mock.unsubscribe.called
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
async def async_step_mqtt(self, discovery_info):
"""Test mqtt step."""
return self.async_abort(reason="already_configured")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
async_fire_mqtt_message(hass, "comp/discovery/bla/config", "")
async_fire_mqtt_message(hass, "comp/discovery/bla/config", "")
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_client_mock.unsubscribe.assert_called_once_with("comp/discovery/#")
|
import subprocess
import os
import threading
from kalliope.core.NeuronModule import NeuronModule, MissingParameterException, InvalidParameterException
class AsyncShell(threading.Thread):
"""
Class used to run an asynchronous Shell command
.. notes:: Impossible to get the success code of the command
"""
def __init__(self, path):
self.stdout = None
self.stderr = None
self.path = path
threading.Thread.__init__(self)
def run(self):
p = subprocess.Popen(self.path,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.stdout, self.stderr = p.communicate()
class Script(NeuronModule):
def __init__(self, **kwargs):
super(Script, self).__init__(**kwargs)
self.path = kwargs.get("path", None)
# get if the user select a blocking command or not
self.async_mode = kwargs.get('async', False)
# check parameters
if self._is_parameters_ok():
# run the command
if not self.async_mode:
p = subprocess.Popen(self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
(output, err) = p.communicate()
self.output = output.decode()
self.returncode = p.returncode
message = {
"output": self.output,
"returncode": self.returncode
}
self.say(message)
else:
async_shell = AsyncShell(path=self.path)
async_shell.start()
def _is_parameters_ok(self):
"""
Check if received parameters are ok to perform operations in the neuron
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingParameterException, InvalidParameterException
"""
if self.path is None:
raise MissingParameterException("You must provide a script path.")
if not os.path.isfile(self.path):
raise InvalidParameterException("Script not found or is not a file.")
if not os.access(self.path, os.X_OK):
raise InvalidParameterException("Script not Executable.")
return True
|
import importlib
import logging
from threading import RLock
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_NAME, CONF_SWITCHES, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_CODE_OFF = "code_off"
CONF_CODE_ON = "code_on"
CONF_GPIO = "gpio"
CONF_PROTOCOL = "protocol"
CONF_PULSELENGTH = "pulselength"
CONF_SIGNAL_REPETITIONS = "signal_repetitions"
DEFAULT_PROTOCOL = 1
DEFAULT_SIGNAL_REPETITIONS = 10
SWITCH_SCHEMA = vol.Schema(
{
vol.Required(CONF_CODE_OFF): vol.All(cv.ensure_list_csv, [cv.positive_int]),
vol.Required(CONF_CODE_ON): vol.All(cv.ensure_list_csv, [cv.positive_int]),
vol.Optional(CONF_PULSELENGTH): cv.positive_int,
vol.Optional(
CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS
): cv.positive_int,
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): cv.positive_int,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_GPIO): cv.positive_int,
vol.Required(CONF_SWITCHES): vol.Schema({cv.string: SWITCH_SCHEMA}),
}
)
# pylint: disable=no-member
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Find and return switches controlled by a generic RF device via GPIO."""
rpi_rf = importlib.import_module("rpi_rf")
gpio = config.get(CONF_GPIO)
rfdevice = rpi_rf.RFDevice(gpio)
rfdevice_lock = RLock()
switches = config.get(CONF_SWITCHES)
devices = []
for dev_name, properties in switches.items():
devices.append(
RPiRFSwitch(
properties.get(CONF_NAME, dev_name),
rfdevice,
rfdevice_lock,
properties.get(CONF_PROTOCOL),
properties.get(CONF_PULSELENGTH),
properties.get(CONF_SIGNAL_REPETITIONS),
properties.get(CONF_CODE_ON),
properties.get(CONF_CODE_OFF),
)
)
if devices:
rfdevice.enable_tx()
add_entities(devices)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: rfdevice.cleanup())
class RPiRFSwitch(SwitchEntity):
"""Representation of a GPIO RF switch."""
def __init__(
self,
name,
rfdevice,
lock,
protocol,
pulselength,
signal_repetitions,
code_on,
code_off,
):
"""Initialize the switch."""
self._name = name
self._state = False
self._rfdevice = rfdevice
self._lock = lock
self._protocol = protocol
self._pulselength = pulselength
self._code_on = code_on
self._code_off = code_off
self._rfdevice.tx_repeat = signal_repetitions
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def _send_code(self, code_list, protocol, pulselength):
"""Send the code(s) with a specified pulselength."""
with self._lock:
_LOGGER.info("Sending code(s): %s", code_list)
for code in code_list:
self._rfdevice.tx_code(code, protocol, pulselength)
return True
def turn_on(self, **kwargs):
"""Turn the switch on."""
if self._send_code(self._code_on, self._protocol, self._pulselength):
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if self._send_code(self._code_off, self._protocol, self._pulselength):
self._state = False
self.schedule_update_ha_state()
|
from copy import deepcopy
###############################################################################
# Create a Bunch class that acts like a struct (mybunch.key = val)
class Bunch(dict):
"""Dictionary-like object that exposes its keys as attributes."""
def __init__(self, **kwargs): # noqa: D102
dict.__init__(self, kwargs)
self.__dict__ = self
###############################################################################
# A protected version that prevents overwriting
class BunchConst(Bunch):
"""Class to prevent us from re-defining constants (DRY)."""
def __setattr__(self, attr, val): # noqa: D105
if attr != '__dict__' and hasattr(self, attr):
raise AttributeError('Attribute "%s" already set' % attr)
super().__setattr__(attr, val)
###############################################################################
# A version that tweaks the __repr__ of its values based on keys
class BunchConstNamed(BunchConst):
"""Class to provide nice __repr__ for our integer constants.
Only supports string keys and int or float values.
"""
def __setattr__(self, attr, val): # noqa: D105
assert isinstance(attr, str)
if isinstance(val, int):
val = NamedInt(attr, val)
elif isinstance(val, float):
val = NamedFloat(attr, val)
else:
assert isinstance(val, BunchConstNamed), type(val)
super().__setattr__(attr, val)
class _Named(object):
"""Provide shared methods for giving named-representation subclasses."""
def __new__(cls, name, val): # noqa: D102,D105
out = _named_subclass(cls).__new__(cls, val)
out._name = name
return out
def __str__(self): # noqa: D105
return '%s (%s)' % (str(self.__class__.mro()[-2](self)), self._name)
__repr__ = __str__
# see https://stackoverflow.com/a/15774013/2175965
def __copy__(self): # noqa: D105
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
return result
def __deepcopy__(self, memo): # noqa: D105
cls = self.__class__
result = cls.__new__(cls, self._name, self)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
def __getnewargs__(self): # noqa: D105
return self._name, _named_subclass(self)(self)
def _named_subclass(klass):
if not isinstance(klass, type):
klass = klass.__class__
subklass = klass.mro()[-2]
assert subklass in (int, float)
return subklass
class NamedInt(_Named, int):
"""Int with a name in __repr__."""
pass # noqa
class NamedFloat(_Named, float):
"""Float with a name in __repr__."""
pass # noqa
|
from django.core.exceptions import ValidationError
from django.forms import fields, widgets
from django.template import engines
from django.template.loader import select_template
from django.utils.translation import gettext_lazy as _, gettext
from entangled.forms import EntangledModelFormMixin
from cms.plugin_pool import plugin_pool
from shop.cascade.plugin_base import ShopPluginBase
from shop.conf import app_settings
class ShopSearchResultsFormMixin(EntangledModelFormMixin):
CHOICES = [
('paginator', _("Use Paginator")),
('manual', _("Manual Infinite")),
('auto', _("Auto Infinite")),
]
pagination = fields.ChoiceField(
label=_("Pagination"),
choices=CHOICES,
widget=widgets.RadioSelect,
help_text=_("Shall the list of search results use a paginator or scroll infinitely?"),
)
class Meta:
entangled_fields = {'glossary': ['pagination']}
def clean(self):
cleaned_data = super().clean()
page = self.instance.placeholder.page if self.instance.placeholder_id else None
if page and page.application_urls != 'CatalogSearchApp':
raise ValidationError("This plugin can only be used on a CMS page with an application of type 'Search'.")
return cleaned_data
class ShopSearchResultsPlugin(ShopPluginBase):
name = _("Search Results")
require_parent = True
parent_classes = ['BootstrapColumnPlugin']
form = ShopSearchResultsFormMixin
cache = False
def get_render_template(self, context, instance, placeholder):
if instance.placeholder.page.application_urls != 'CatalogSearchApp':
alert_msg = '''<div class="alert alert-danger">
Plugin "{}" is used on a CMS page without an application of type "Catalog Search".
</div>'''
return engines['django'].from_string(alert_msg.format(self.name))
return select_template([
'{}/search/results.html'.format(app_settings.APP_LABEL),
'shop/search/results.html',
])
def render(self, context, instance, placeholder):
self.super(ShopSearchResultsPlugin, self).render(context, instance, placeholder)
context['pagination'] = instance.glossary.get('pagination', 'paginator')
return context
@classmethod
def get_identifier(cls, obj):
pagination = obj.glossary.get('pagination')
if pagination == 'paginator':
return gettext("Manual Pagination")
return gettext("Infinite Scroll")
plugin_pool.register_plugin(ShopSearchResultsPlugin)
|
from email.mime.application import MIMEApplication
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import email.utils
import logging
import os
import smtplib
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_PORT,
CONF_RECIPIENT,
CONF_SENDER,
CONF_TIMEOUT,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.reload import setup_reload_service
import homeassistant.util.dt as dt_util
from . import DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
ATTR_IMAGES = "images" # optional embedded image file attachments
ATTR_HTML = "html"
CONF_ENCRYPTION = "encryption"
CONF_DEBUG = "debug"
CONF_SERVER = "server"
CONF_SENDER_NAME = "sender_name"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 587
DEFAULT_TIMEOUT = 5
DEFAULT_DEBUG = False
DEFAULT_ENCRYPTION = "starttls"
ENCRYPTION_OPTIONS = ["tls", "starttls", "none"]
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RECIPIENT): vol.All(cv.ensure_list, [vol.Email()]),
vol.Required(CONF_SENDER): vol.Email(),
vol.Optional(CONF_SERVER, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_ENCRYPTION, default=DEFAULT_ENCRYPTION): vol.In(
ENCRYPTION_OPTIONS
),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SENDER_NAME): cv.string,
vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the mail notification service."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
mail_service = MailNotificationService(
config.get(CONF_SERVER),
config.get(CONF_PORT),
config.get(CONF_TIMEOUT),
config.get(CONF_SENDER),
config.get(CONF_ENCRYPTION),
config.get(CONF_USERNAME),
config.get(CONF_PASSWORD),
config.get(CONF_RECIPIENT),
config.get(CONF_SENDER_NAME),
config.get(CONF_DEBUG),
)
if mail_service.connection_is_valid():
return mail_service
return None
class MailNotificationService(BaseNotificationService):
"""Implement the notification service for E-mail messages."""
def __init__(
self,
server,
port,
timeout,
sender,
encryption,
username,
password,
recipients,
sender_name,
debug,
):
"""Initialize the SMTP service."""
self._server = server
self._port = port
self._timeout = timeout
self._sender = sender
self.encryption = encryption
self.username = username
self.password = password
self.recipients = recipients
self._sender_name = sender_name
self.debug = debug
self.tries = 2
def connect(self):
"""Connect/authenticate to SMTP Server."""
if self.encryption == "tls":
mail = smtplib.SMTP_SSL(self._server, self._port, timeout=self._timeout)
else:
mail = smtplib.SMTP(self._server, self._port, timeout=self._timeout)
mail.set_debuglevel(self.debug)
mail.ehlo_or_helo_if_needed()
if self.encryption == "starttls":
mail.starttls()
mail.ehlo()
if self.username and self.password:
mail.login(self.username, self.password)
return mail
def connection_is_valid(self):
"""Check for valid config, verify connectivity."""
server = None
try:
server = self.connect()
except (smtplib.socket.gaierror, ConnectionRefusedError):
_LOGGER.exception(
"SMTP server not found or refused connection (%s:%s). "
"Please check the IP address, hostname, and availability of your SMTP server",
self._server,
self._port,
)
except smtplib.SMTPAuthenticationError:
_LOGGER.exception(
"Login not possible. "
"Please check your setting and/or your credentials"
)
return False
finally:
if server:
server.quit()
return True
def send_message(self, message="", **kwargs):
"""
Build and send a message to a user.
Will send plain text normally, or will build a multipart HTML message
with inline image attachments if images config is defined, or will
build a multipart HTML if html config is defined.
"""
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = kwargs.get(ATTR_DATA)
if data:
if ATTR_HTML in data:
msg = _build_html_msg(
message, data[ATTR_HTML], images=data.get(ATTR_IMAGES, [])
)
else:
msg = _build_multipart_msg(message, images=data.get(ATTR_IMAGES, []))
else:
msg = _build_text_msg(message)
msg["Subject"] = subject
msg["To"] = ",".join(self.recipients)
if self._sender_name:
msg["From"] = f"{self._sender_name} <{self._sender}>"
else:
msg["From"] = self._sender
msg["X-Mailer"] = "Home Assistant"
msg["Date"] = email.utils.format_datetime(dt_util.now())
msg["Message-Id"] = email.utils.make_msgid()
return self._send_email(msg)
def _send_email(self, msg):
"""Send the message."""
mail = self.connect()
for _ in range(self.tries):
try:
mail.sendmail(self._sender, self.recipients, msg.as_string())
break
except smtplib.SMTPServerDisconnected:
_LOGGER.warning(
"SMTPServerDisconnected sending mail: retrying connection"
)
mail.quit()
mail = self.connect()
except smtplib.SMTPException:
_LOGGER.warning("SMTPException sending mail: retrying connection")
mail.quit()
mail = self.connect()
mail.quit()
def _build_text_msg(message):
"""Build plaintext email."""
_LOGGER.debug("Building plain text email")
return MIMEText(message)
def _attach_file(atch_name, content_id):
"""Create a message attachment."""
try:
with open(atch_name, "rb") as attachment_file:
file_bytes = attachment_file.read()
except FileNotFoundError:
_LOGGER.warning("Attachment %s not found. Skipping", atch_name)
return None
try:
attachment = MIMEImage(file_bytes)
except TypeError:
_LOGGER.warning(
"Attachment %s has an unknown MIME type. " "Falling back to file",
atch_name,
)
attachment = MIMEApplication(file_bytes, Name=atch_name)
attachment["Content-Disposition"] = "attachment; " 'filename="%s"' % atch_name
attachment.add_header("Content-ID", f"<{content_id}>")
return attachment
def _build_multipart_msg(message, images):
"""Build Multipart message with in-line images."""
_LOGGER.debug("Building multipart email with embedded attachment(s)")
msg = MIMEMultipart("related")
msg_alt = MIMEMultipart("alternative")
msg.attach(msg_alt)
body_txt = MIMEText(message)
msg_alt.attach(body_txt)
body_text = [f"<p>{message}</p><br>"]
for atch_num, atch_name in enumerate(images):
cid = f"image{atch_num}"
body_text.append(f'<img src="cid:{cid}"><br>')
attachment = _attach_file(atch_name, cid)
if attachment:
msg.attach(attachment)
body_html = MIMEText("".join(body_text), "html")
msg_alt.attach(body_html)
return msg
def _build_html_msg(text, html, images):
"""Build Multipart message with in-line images and rich HTML (UTF-8)."""
_LOGGER.debug("Building HTML rich email")
msg = MIMEMultipart("related")
alternative = MIMEMultipart("alternative")
alternative.attach(MIMEText(text, _charset="utf-8"))
alternative.attach(MIMEText(html, ATTR_HTML, _charset="utf-8"))
msg.attach(alternative)
for atch_name in images:
name = os.path.basename(atch_name)
attachment = _attach_file(atch_name, name)
if attachment:
msg.attach(attachment)
return msg
|
import math
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_ID, CONF_NAME
import homeassistant.helpers.config_validation as cv
from .device import EnOceanEntity
CONF_SENDER_ID = "sender_id"
DEFAULT_NAME = "EnOcean Light"
SUPPORT_ENOCEAN = SUPPORT_BRIGHTNESS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ID, default=[]): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Required(CONF_SENDER_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the EnOcean light platform."""
sender_id = config.get(CONF_SENDER_ID)
dev_name = config.get(CONF_NAME)
dev_id = config.get(CONF_ID)
add_entities([EnOceanLight(sender_id, dev_id, dev_name)])
class EnOceanLight(EnOceanEntity, LightEntity):
"""Representation of an EnOcean light source."""
def __init__(self, sender_id, dev_id, dev_name):
"""Initialize the EnOcean light source."""
super().__init__(dev_id, dev_name)
self._on_state = False
self._brightness = 50
self._sender_id = sender_id
@property
def name(self):
"""Return the name of the device if any."""
return self.dev_name
@property
def brightness(self):
"""Brightness of the light.
This method is optional. Removing it indicates to Home Assistant
that brightness is not supported for this light.
"""
return self._brightness
@property
def is_on(self):
"""If light is on."""
return self._on_state
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_ENOCEAN
def turn_on(self, **kwargs):
"""Turn the light source on or sets a specific dimmer value."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is not None:
self._brightness = brightness
bval = math.floor(self._brightness / 256.0 * 100.0)
if bval == 0:
bval = 1
command = [0xA5, 0x02, bval, 0x01, 0x09]
command.extend(self._sender_id)
command.extend([0x00])
self.send_command(command, [], 0x01)
self._on_state = True
def turn_off(self, **kwargs):
"""Turn the light source off."""
command = [0xA5, 0x02, 0x00, 0x01, 0x09]
command.extend(self._sender_id)
command.extend([0x00])
self.send_command(command, [], 0x01)
self._on_state = False
def value_changed(self, packet):
"""Update the internal state of this device.
Dimmer devices like Eltako FUD61 send telegram in different RORGs.
We only care about the 4BS (0xA5).
"""
if packet.data[0] == 0xA5 and packet.data[1] == 0x02:
val = packet.data[2]
self._brightness = math.floor(val / 100.0 * 256.0)
self._on_state = bool(val != 0)
self.schedule_update_ha_state()
|
import argparse
import openrazer.client
import sys
def _clamp_u8(value):
if value > 255:
return 255
elif value < 0:
return 0
else:
return value
def _print_table(header_list, rows):
column_lengths = [len(header_item) for header_item in header_list]
for row in rows:
for index, column in enumerate(row):
column = str(column)
cell_length = len(column)
try:
if column_lengths[index] < cell_length:
column_lengths[index] = cell_length
except IndexError:
pass
# spaces in between columns + total column length
max_line_length = ((len(column_lengths) - 1) * 4) + sum(column_lengths)
# Got maximum column widths
result = ''
for index, header_item in enumerate(header_list):
pad = ' ' * (column_lengths[index] - len(header_item))
result += '{0}{1} '.format(header_item, pad)
# Remove trailing space, add newline
result += (' ' * (max_line_length - len(result))) + '\n'
# Add ----- separator and newline
result += ('-' * max_line_length) + '\n'
for row in rows:
line = ''
for index, column in enumerate(row):
column = str(column)
pad = ' ' * (column_lengths[index] - len(column))
line += '{0}{1} '.format(column, pad)
line += (' ' * (max_line_length - len(line))) + '\n'
result += line
print(result)
def _get_devices() -> list:
"""
Gets devices ordered by serial
:return: List of devices ordered by serial
:rtype: list
"""
device_manager = openrazer.client.DeviceManager()
devices = sorted(device_manager.devices, key=lambda x: (str(x.serial), str(x.type)))
return devices
def _get_devices_by_serial() -> dict:
device_manager = openrazer.client.DeviceManager()
devices = {device.serial: device for device in device_manager.devices}
return devices
def _get_devices_by_type() -> dict:
device_manager = openrazer.client.DeviceManager()
devices = {}
for device in device_manager.devices:
dev_type = device.type
try:
devices[dev_type].append(device)
except KeyError:
devices[dev_type] = [device]
# Sort devices
for key in devices:
devices[key] = sorted(devices[key], key=lambda x: str(x.serial))
return devices
def _get_device_from_filter(args):
if args.serial is not None:
device = _get_devices_by_serial().get(args.serial, None)
elif '--keyboard' in sys.argv:
if args.keyboard is None:
args.keyboard = 0
try:
device = _get_devices_by_type().get('keyboard', [])[args.keyboard]
except IndexError:
device = None
elif '--mouse' in sys.argv:
if args.mouse is None:
args.mouse = 0
try:
device = _get_devices_by_type().get('mouse', [])[args.mouse]
except IndexError:
device = None
elif '--firefly' in sys.argv:
if args.firefly is None:
args.firefly = 0
try:
device = _get_devices_by_type().get('firefly', [])[args.firefly]
except IndexError:
device = None
elif '--tartarus' in sys.argv:
if args.tartarus is None:
args.tartarus = 0
try:
device = _get_devices_by_type().get('tartarus', [])[args.tartarus]
except IndexError:
device = None
else:
# Theoretically impossible to land here
device = None
if device is None:
print("Could not find device")
sys.exit(1)
else:
return device
def list_devices(args):
"""
Subcommand to list all devices
:param args: Argparse arguments
"""
devices = _get_devices()
headers = ['ID', 'Device Name', 'Device Type', 'Serial']
rows = []
for index, device in enumerate(devices):
rows.append([
index,
device.name,
device.type.title(),
device.serial
])
_print_table(headers, rows)
def brightness_func(args):
device = _get_device_from_filter(args)
if args.set is None:
# Get brightness
if args.raw:
print(str(device.brightness))
else:
print("Brightness: {0}%".format(device.brightness))
else:
brightness_value = float(_clamp_u8(args.set))
if not args.raw:
print("Setting brightness to {0}%".format(brightness_value))
device.brightness = brightness_value
def parse_args():
def add_filter_group(sub):
group = sub.add_mutually_exclusive_group(required=True)
group.add_argument('--serial', help='Select device via its serial')
group.add_argument('--keyboard', nargs='?', default=0, type=int, help='Select keyboard, if ID is omitted the first is used')
group.add_argument('--mouse', nargs='?', default=0, type=int, help='Select mouse, if ID is omitted the first is used')
group.add_argument('--firefly', nargs='?', default=0, type=int, help='Select Firefly, if ID is omitted the first is used')
group.add_argument('--tartarus', nargs='?', default=0, type=int, help='Select Tartarus, if ID is omitted the first is used')
def add_raw(sub):
sub.add_argument('--raw', action='store_true', help="Raw output")
parser = argparse.ArgumentParser()
subparser = parser.add_subparsers(dest='command', help='commands')
subparser.required = True
help_parser = subparser.add_parser('help', help='The help command will display help, running "help <command>" will display more detailed help')
help_parser.add_argument('help', nargs='?', metavar='COMMAND', default=None, type=str)
# No need to assign to a var as it has no args
subparser.add_parser('list', help='Lists Razer Devices')
# Brightness
brightness_parser = subparser.add_parser('brightness', help='Get or set the brightness')
add_filter_group(brightness_parser)
add_raw(brightness_parser)
brightness_parser.add_argument('--set', metavar='BRIGHTNESS', type=float, default=None, help='Gets brightness if omitted')
# Macro
macro_parser = subparser.add_parser('macro', help='Manage macros')
add_filter_group(macro_parser)
macro_exclusive_group = macro_parser.add_mutually_exclusive_group(required=True)
macro_exclusive_group.add_argument('--list', action='store_true', help="List active macros")
macro_exclusive_group.add_argument('--add-script', nargs=2, type=str, metavar=('BIND_KEY', 'SCRIPT_PATH'), help="Bind the given script to the given macro key. If you require script arguments either create a wrapper or use the API direct.")
macro_exclusive_group.add_argument('--add-url', nargs=2, type=str, metavar=('BIND_KEY', 'URL'), help="Bind the given URL to the given macro key, so that xdg-open will open a tab.")
macro_exclusive_group.add_argument('--add-keys', nargs='+', type=str, metavar=('BIND_KEY', 'KEYS'), help="Bind the given key string to the given macro key.")
args = parser.parse_args()
if args.command == 'help':
if args.help == 'brightness':
brightness_parser.print_help()
elif args.help == 'macro':
macro_parser.print_help()
else:
parser.print_help()
sys.exit(0)
return args
CMD_MAP = {
'list': list_devices,
'brightness': brightness_func
}
def run():
args = parse_args()
if args.command in CMD_MAP:
CMD_MAP[args.command](args)
else:
print('Someone forgot to add mapping for command "{0}"'.format(args.command))
print()
if __name__ == '__main__':
run()
|
from redis import Redis
import json
from datetime import datetime
from flask import current_app
from lemur.plugins import lemur_atlas as atlas
from lemur.plugins.bases.metric import MetricPlugin
def millis_since_epoch():
"""
current time since epoch in milliseconds
"""
epoch = datetime.utcfromtimestamp(0)
delta = datetime.now() - epoch
return int(delta.total_seconds() * 1000.0)
class AtlasMetricRedisPlugin(MetricPlugin):
title = "AtlasRedis"
slug = "atlas-metric-redis"
description = "Adds support for sending key metrics to Atlas via local Redis"
version = atlas.VERSION
author = "Jay Zarfoss"
author_url = "https://github.com/netflix/lemur"
options = [
{
"name": "redis_host",
"type": "str",
"required": False,
"help_message": "If no host is provided localhost is assumed",
"default": "localhost",
},
{"name": "redis_port", "type": "int", "required": False, "default": 28527},
]
metric_data = {}
redis_host = None
redis_port = None
def submit(
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
):
if not options:
options = self.options
valid_types = ["COUNTER", "GAUGE", "TIMER"]
if metric_type.upper() not in valid_types:
raise Exception(
"Invalid Metric Type for Atlas: '{metric}' choose from: {options}".format(
metric=metric_type, options=",".join(valid_types)
)
)
if metric_tags:
if not isinstance(metric_tags, dict):
raise Exception(
"Invalid Metric Tags for Atlas: Tags must be in dict format"
)
self.metric_data["timestamp"] = millis_since_epoch()
self.metric_data["type"] = metric_type.upper()
self.metric_data["name"] = str(metric_name)
self.metric_data["tags"] = metric_tags
if (
metric_value == "NaN"
or isinstance(metric_value, int)
or isinstance(metric_value, float)
):
self.metric_data["value"] = metric_value
else:
raise Exception("Invalid Metric Value for Atlas: Metric must be a number")
self.redis_host = self.get_option("redis_host", options)
self.redis_port = self.get_option("redis_port", options)
try:
r = Redis(host=self.redis_host, port=self.redis_port, socket_timeout=0.1)
r.rpush('atlas-agent', json.dumps(self.metric_data))
except Exception as e:
current_app.logger.warning(
"AtlasMetricsRedis: exception [{exception}] could not post atlas metrics to AtlasRedis [{host}:{port}], metric [{metricdata}]".format(
exception=e, host=self.redis_host, port=self.redis_port, metricdata=json.dumps(self.metric_data)
)
)
|
import os
import sys
import re
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'snmp'))
from snmp import SNMPCollector as parent_SNMPCollector
import diamond.convertor
class SNMPInterfaceCollector(parent_SNMPCollector):
# IF-MIB OID
IF_MIB_INDEX_OID = "1.3.6.1.2.1.2.2.1.1"
IF_MIB_NAME_OID = "1.3.6.1.2.1.31.1.1.1.1"
IF_MIB_TYPE_OID = "1.3.6.1.2.1.2.2.1.3"
# A list of IF-MIB 32bit counters to walk
IF_MIB_GAUGE_OID_TABLE = {'ifInDiscards': "1.3.6.1.2.1.2.2.1.13",
'ifInErrors': "1.3.6.1.2.1.2.2.1.14",
'ifOutDiscards': "1.3.6.1.2.1.2.2.1.19",
'ifOutErrors': "1.3.6.1.2.1.2.2.1.20"}
# A list of IF-MIB 64bit counters to talk
IF_MIB_COUNTER_OID_TABLE = {'ifHCInOctets': "1.3.6.1.2.1.31.1.1.1.6",
'ifInUcastPkts': "1.3.6.1.2.1.31.1.1.1.7",
'ifInMulticastPkts': "1.3.6.1.2.1.31.1.1.1.8",
'ifInBroadcastPkts': "1.3.6.1.2.1.31.1.1.1.9",
'ifHCOutOctets': "1.3.6.1.2.1.31.1.1.1.10",
'ifOutUcastPkts': "1.3.6.1.2.1.31.1.1.1.11",
'ifOutMulticastPkts': "1.3.6.1.2.1.31.1.1.1.12",
'ifOutBroadcastPkts': "1.3.6.1.2.1.31.1.1.1.13"}
# A list of interface types we care about
IF_TYPES = ["6"]
def get_default_config_help(self):
config_help = super(SNMPInterfaceCollector,
self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Override SNMPCollector.get_default_config method to provide
default_config for the SNMPInterfaceCollector
"""
default_config = super(SNMPInterfaceCollector,
self).get_default_config()
default_config['path'] = 'interface'
default_config['byte_unit'] = ['bit', 'byte']
return default_config
def collect_snmp(self, device, host, port, community):
"""
Collect SNMP interface data from device
"""
# Log
self.log.info("Collecting SNMP interface statistics from: %s", device)
# Define a list of interface indexes
ifIndexes = []
# Get Interface Indexes
ifIndexOid = '.'.join([self.IF_MIB_INDEX_OID])
ifIndexData = self.walk(ifIndexOid, host, port, community)
ifIndexes = [v for v in ifIndexData.values()]
for ifIndex in ifIndexes:
# Get Interface Type
ifTypeOid = '.'.join([self.IF_MIB_TYPE_OID, ifIndex])
ifTypeData = self.get(ifTypeOid, host, port, community)
if ifTypeData[ifTypeOid] not in self.IF_TYPES:
# Skip Interface
continue
# Get Interface Name
ifNameOid = '.'.join([self.IF_MIB_NAME_OID, ifIndex])
ifNameData = self.get(ifNameOid, host, port, community)
ifName = ifNameData[ifNameOid]
# Remove quotes from string
ifName = re.sub(r'(\"|\')', '', ifName)
# Get Gauges
for gaugeName, gaugeOid in self.IF_MIB_GAUGE_OID_TABLE.items():
ifGaugeOid = '.'.join([self.IF_MIB_GAUGE_OID_TABLE[gaugeName],
ifIndex])
ifGaugeData = self.get(ifGaugeOid, host, port, community)
ifGaugeValue = ifGaugeData[ifGaugeOid]
if not ifGaugeValue:
continue
# Get Metric Name and Value
metricIfDescr = re.sub(r'\W', '_', ifName)
metricName = '.'.join([metricIfDescr, gaugeName])
metricValue = int(ifGaugeValue)
# Get Metric Path
metricPath = '.'.join(['devices',
device,
self.config['path'],
metricName])
# Publish Metric
self.publish_gauge(metricPath, metricValue)
# Get counters (64bit)
counterItems = self.IF_MIB_COUNTER_OID_TABLE.items()
for counterName, counterOid in counterItems:
ifCounterOid = '.'.join(
[self.IF_MIB_COUNTER_OID_TABLE[counterName], ifIndex])
ifCounterData = self.get(ifCounterOid, host, port, community)
ifCounterValue = ifCounterData[ifCounterOid]
if not ifCounterValue:
continue
# Get Metric Name and Value
metricIfDescr = re.sub(r'\W', '_', ifName)
if counterName in ['ifHCInOctets', 'ifHCOutOctets']:
for unit in self.config['byte_unit']:
# Convert Metric
metricName = '.'.join([metricIfDescr,
counterName.replace('Octets',
unit)])
metricValue = diamond.convertor.binary.convert(
value=ifCounterValue,
oldUnit='byte',
newUnit=unit)
# Get Metric Path
metricPath = '.'.join(['devices',
device,
self.config['path'],
metricName])
# Publish Metric
self.publish_counter(metricPath,
metricValue,
max_value=18446744073709600000,
)
else:
metricName = '.'.join([metricIfDescr, counterName])
metricValue = int(ifCounterValue)
# Get Metric Path
metricPath = '.'.join(['devices',
device,
self.config['path'],
metricName])
# Publish Metric
self.publish_counter(metricPath,
metricValue,
max_value=18446744073709600000,
)
|
import logging
import pywink
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OCCUPANCY,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_SOUND,
DEVICE_CLASS_VIBRATION,
BinarySensorEntity,
)
from . import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
# These are the available sensors mapped to binary_sensor class
SENSOR_TYPES = {
"brightness": "light",
"capturing_audio": DEVICE_CLASS_SOUND,
"capturing_video": None,
"co_detected": "gas",
"liquid_detected": DEVICE_CLASS_MOISTURE,
"loudness": DEVICE_CLASS_SOUND,
"motion": DEVICE_CLASS_MOTION,
"noise": DEVICE_CLASS_SOUND,
"opened": DEVICE_CLASS_OPENING,
"presence": DEVICE_CLASS_OCCUPANCY,
"smoke_detected": DEVICE_CLASS_SMOKE,
"vibration": DEVICE_CLASS_VIBRATION,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink binary sensor platform."""
for sensor in pywink.get_sensors():
_id = sensor.object_id() + sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
if sensor.capability() in SENSOR_TYPES:
add_entities([WinkBinarySensorEntity(sensor, hass)])
for key in pywink.get_keys():
_id = key.object_id() + key.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkBinarySensorEntity(key, hass)])
for sensor in pywink.get_smoke_and_co_detectors():
_id = sensor.object_id() + sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkSmokeDetector(sensor, hass)])
for hub in pywink.get_hubs():
_id = hub.object_id() + hub.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkHub(hub, hass)])
for remote in pywink.get_remotes():
_id = remote.object_id() + remote.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkRemote(remote, hass)])
for button in pywink.get_buttons():
_id = button.object_id() + button.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkButton(button, hass)])
for gang in pywink.get_gangs():
_id = gang.object_id() + gang.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkGang(gang, hass)])
for door_bell_sensor in pywink.get_door_bells():
_id = door_bell_sensor.object_id() + door_bell_sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkBinarySensorEntity(door_bell_sensor, hass)])
for camera_sensor in pywink.get_cameras():
_id = camera_sensor.object_id() + camera_sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
try:
if camera_sensor.capability() in SENSOR_TYPES:
add_entities([WinkBinarySensorEntity(camera_sensor, hass)])
except AttributeError:
_LOGGER.info("Device isn't a sensor, skipping")
class WinkBinarySensorEntity(WinkDevice, BinarySensorEntity):
"""Representation of a Wink binary sensor."""
def __init__(self, wink, hass):
"""Initialize the Wink binary sensor."""
super().__init__(wink, hass)
if hasattr(self.wink, "unit"):
self._unit_of_measurement = self.wink.unit()
else:
self._unit_of_measurement = None
if hasattr(self.wink, "capability"):
self.capability = self.wink.capability()
else:
self.capability = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["binary_sensor"].append(self)
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.wink.state()
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return SENSOR_TYPES.get(self.capability)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return super().device_state_attributes
class WinkSmokeDetector(WinkBinarySensorEntity):
"""Representation of a Wink Smoke detector."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["test_activated"] = self.wink.test_activated()
return _attributes
class WinkHub(WinkBinarySensorEntity):
"""Representation of a Wink Hub."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["update_needed"] = self.wink.update_needed()
_attributes["firmware_version"] = self.wink.firmware_version()
_attributes["pairing_mode"] = self.wink.pairing_mode()
_kidde_code = self.wink.kidde_radio_code()
if _kidde_code is not None:
# The service call to set the Kidde code
# takes a string of 1s and 0s so it makes
# sense to display it to the user that way
_formatted_kidde_code = f"{_kidde_code:b}".zfill(8)
_attributes["kidde_radio_code"] = _formatted_kidde_code
return _attributes
class WinkRemote(WinkBinarySensorEntity):
"""Representation of a Wink Lutron Connected bulb remote."""
@property
def device_state_attributes(self):
"""Return the state attributes."""
_attributes = super().device_state_attributes
_attributes["button_on_pressed"] = self.wink.button_on_pressed()
_attributes["button_off_pressed"] = self.wink.button_off_pressed()
_attributes["button_up_pressed"] = self.wink.button_up_pressed()
_attributes["button_down_pressed"] = self.wink.button_down_pressed()
return _attributes
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return None
class WinkButton(WinkBinarySensorEntity):
"""Representation of a Wink Relay button."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["pressed"] = self.wink.pressed()
_attributes["long_pressed"] = self.wink.long_pressed()
return _attributes
class WinkGang(WinkBinarySensorEntity):
"""Representation of a Wink Relay gang."""
@property
def is_on(self):
"""Return true if the gang is connected."""
return self.wink.state()
|
import logging
import re
from typing import Any, Callable, Dict, List, Optional, Set, cast
import attr
from stringcase import snakecase
from homeassistant.components.device_tracker.config_entry import ScannerEntity
from homeassistant.components.device_tracker.const import (
DOMAIN as DEVICE_TRACKER_DOMAIN,
SOURCE_TYPE_ROUTER,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.core import callback
from homeassistant.helpers import entity_registry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from . import HuaweiLteBaseEntity
from .const import DOMAIN, KEY_WLAN_HOST_LIST, UPDATE_SIGNAL
_LOGGER = logging.getLogger(__name__)
_DEVICE_SCAN = f"{DEVICE_TRACKER_DOMAIN}/device_scan"
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up from config entry."""
# Grab hosts list once to examine whether the initial fetch has got some data for
# us, i.e. if wlan host list is supported. Only set up a subscription and proceed
# with adding and tracking entities if it is.
router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]]
try:
_ = router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
except KeyError:
_LOGGER.debug("%s[%s][%s] not in data", KEY_WLAN_HOST_LIST, "Hosts", "Host")
return
# Initialize already tracked entities
tracked: Set[str] = set()
registry = await entity_registry.async_get_registry(hass)
known_entities: List[Entity] = []
for entity in registry.entities.values():
if (
entity.domain == DEVICE_TRACKER_DOMAIN
and entity.config_entry_id == config_entry.entry_id
):
tracked.add(entity.unique_id)
known_entities.append(
HuaweiLteScannerEntity(router, entity.unique_id.partition("-")[2])
)
async_add_entities(known_entities, True)
# Tell parent router to poll hosts list to gather new devices
router.subscriptions[KEY_WLAN_HOST_LIST].add(_DEVICE_SCAN)
async def _async_maybe_add_new_entities(url: str) -> None:
"""Add new entities if the update signal comes from our router."""
if url == router.url:
async_add_new_entities(hass, url, async_add_entities, tracked)
# Register to handle router data updates
disconnect_dispatcher = async_dispatcher_connect(
hass, UPDATE_SIGNAL, _async_maybe_add_new_entities
)
router.unload_handlers.append(disconnect_dispatcher)
# Add new entities from initial scan
async_add_new_entities(hass, router.url, async_add_entities, tracked)
@callback
def async_add_new_entities(
hass: HomeAssistantType,
router_url: str,
async_add_entities: Callable[[List[Entity], bool], None],
tracked: Set[str],
) -> None:
"""Add new entities that are not already being tracked."""
router = hass.data[DOMAIN].routers[router_url]
try:
hosts = router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
except KeyError:
_LOGGER.debug("%s[%s][%s] not in data", KEY_WLAN_HOST_LIST, "Hosts", "Host")
return
new_entities: List[Entity] = []
for host in (x for x in hosts if x.get("MacAddress")):
entity = HuaweiLteScannerEntity(router, host["MacAddress"])
if entity.unique_id in tracked:
continue
tracked.add(entity.unique_id)
new_entities.append(entity)
async_add_entities(new_entities, True)
def _better_snakecase(text: str) -> str:
if text == text.upper():
# All uppercase to all lowercase to get http for HTTP, not h_t_t_p
text = text.lower()
else:
# Three or more consecutive uppercase with middle part lowercased
# to get http_response for HTTPResponse, not h_t_t_p_response
text = re.sub(
r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))",
lambda match: f"{match.group(1)}{match.group(2).lower()}{match.group(3)}",
text,
)
return cast(str, snakecase(text))
@attr.s
class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity):
"""Huawei LTE router scanner entity."""
mac: str = attr.ib()
_is_connected: bool = attr.ib(init=False, default=False)
_hostname: Optional[str] = attr.ib(init=False, default=None)
_device_state_attributes: Dict[str, Any] = attr.ib(init=False, factory=dict)
def __attrs_post_init__(self) -> None:
"""Initialize internal state."""
self._device_state_attributes["mac_address"] = self.mac
@property
def _entity_name(self) -> str:
return self._hostname or self.mac
@property
def _device_unique_id(self) -> str:
return self.mac
@property
def source_type(self) -> str:
"""Return SOURCE_TYPE_ROUTER."""
return SOURCE_TYPE_ROUTER
@property
def is_connected(self) -> bool:
"""Get whether the entity is connected."""
return self._is_connected
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Get additional attributes related to entity state."""
return self._device_state_attributes
async def async_update(self) -> None:
"""Update state."""
hosts = self.router.data[KEY_WLAN_HOST_LIST]["Hosts"]["Host"]
host = next((x for x in hosts if x.get("MacAddress") == self.mac), None)
self._is_connected = host is not None
if host is not None:
self._hostname = host.get("HostName")
self._device_state_attributes = {
_better_snakecase(k): v for k, v in host.items() if k != "HostName"
}
|
import asyncio
from datetime import datetime, timedelta
import logging
import aiohttp
import async_timeout
from buienradar.buienradar import parse_data
from buienradar.constants import (
ATTRIBUTION,
CONDITION,
CONTENT,
DATA,
FORECAST,
HUMIDITY,
MESSAGE,
PRESSURE,
STATIONNAME,
STATUS_CODE,
SUCCESS,
TEMPERATURE,
VISIBILITY,
WINDAZIMUTH,
WINDSPEED,
)
from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, HTTP_OK
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util import dt as dt_util
from .const import SCHEDULE_NOK, SCHEDULE_OK
__all__ = ["BrData"]
_LOGGER = logging.getLogger(__name__)
"""
Log at WARN level after WARN_THRESHOLD failures, otherwise log at
DEBUG level.
"""
WARN_THRESHOLD = 4
def threshold_log(count: int, *args, **kwargs) -> None:
"""Log at warn level after WARN_THRESHOLD failures, debug otherwise."""
if count >= WARN_THRESHOLD:
_LOGGER.warning(*args, **kwargs)
else:
_LOGGER.debug(*args, **kwargs)
class BrData:
"""Get the latest data and updates the states."""
# Initialize to warn immediately if the first call fails.
load_error_count: int = WARN_THRESHOLD
rain_error_count: int = WARN_THRESHOLD
def __init__(self, hass, coordinates, timeframe, devices):
"""Initialize the data object."""
self.devices = devices
self.data = {}
self.hass = hass
self.coordinates = coordinates
self.timeframe = timeframe
async def update_devices(self):
"""Update all devices/sensors."""
if not self.devices:
return
# Update all devices
for dev in self.devices:
dev.data_updated(self.data)
async def schedule_update(self, minute=1):
"""Schedule an update after minute minutes."""
_LOGGER.debug("Scheduling next update in %s minutes", minute)
nxt = dt_util.utcnow() + timedelta(minutes=minute)
async_track_point_in_utc_time(self.hass, self.async_update, nxt)
async def get_data(self, url):
"""Load data from specified url."""
_LOGGER.debug("Calling url: %s...", url)
result = {SUCCESS: False, MESSAGE: None}
resp = None
try:
websession = async_get_clientsession(self.hass)
with async_timeout.timeout(10):
resp = await websession.get(url)
result[STATUS_CODE] = resp.status
result[CONTENT] = await resp.text()
if resp.status == HTTP_OK:
result[SUCCESS] = True
else:
result[MESSAGE] = "Got http statuscode: %d" % (resp.status)
return result
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
result[MESSAGE] = "%s" % err
return result
finally:
if resp is not None:
await resp.release()
async def async_update(self, *_):
"""Update the data from buienradar."""
content = await self.get_data(JSON_FEED_URL)
if content.get(SUCCESS) is not True:
# unable to get the data
self.load_error_count += 1
threshold_log(
self.load_error_count,
"Unable to retrieve json data from Buienradar" "(Msg: %s, status: %s,)",
content.get(MESSAGE),
content.get(STATUS_CODE),
)
# schedule new call
await self.schedule_update(SCHEDULE_NOK)
return
self.load_error_count = 0
# rounding coordinates prevents unnecessary redirects/calls
lat = self.coordinates[CONF_LATITUDE]
lon = self.coordinates[CONF_LONGITUDE]
rainurl = json_precipitation_forecast_url(lat, lon)
raincontent = await self.get_data(rainurl)
if raincontent.get(SUCCESS) is not True:
self.rain_error_count += 1
# unable to get the data
threshold_log(
self.rain_error_count,
"Unable to retrieve rain data from Buienradar" "(Msg: %s, status: %s)",
raincontent.get(MESSAGE),
raincontent.get(STATUS_CODE),
)
# schedule new call
await self.schedule_update(SCHEDULE_NOK)
return
self.rain_error_count = 0
result = parse_data(
content.get(CONTENT),
raincontent.get(CONTENT),
self.coordinates[CONF_LATITUDE],
self.coordinates[CONF_LONGITUDE],
self.timeframe,
False,
)
_LOGGER.debug("Buienradar parsed data: %s", result)
if result.get(SUCCESS) is not True:
if int(datetime.now().strftime("%H")) > 0:
_LOGGER.warning(
"Unable to parse data from Buienradar. (Msg: %s)",
result.get(MESSAGE),
)
await self.schedule_update(SCHEDULE_NOK)
return
self.data = result.get(DATA)
await self.update_devices()
await self.schedule_update(SCHEDULE_OK)
@property
def attribution(self):
"""Return the attribution."""
return self.data.get(ATTRIBUTION)
@property
def stationname(self):
"""Return the name of the selected weatherstation."""
return self.data.get(STATIONNAME)
@property
def condition(self):
"""Return the condition."""
return self.data.get(CONDITION)
@property
def temperature(self):
"""Return the temperature, or None."""
try:
return float(self.data.get(TEMPERATURE))
except (ValueError, TypeError):
return None
@property
def pressure(self):
"""Return the pressure, or None."""
try:
return float(self.data.get(PRESSURE))
except (ValueError, TypeError):
return None
@property
def humidity(self):
"""Return the humidity, or None."""
try:
return int(self.data.get(HUMIDITY))
except (ValueError, TypeError):
return None
@property
def visibility(self):
"""Return the visibility, or None."""
try:
return int(self.data.get(VISIBILITY))
except (ValueError, TypeError):
return None
@property
def wind_speed(self):
"""Return the windspeed, or None."""
try:
return float(self.data.get(WINDSPEED))
except (ValueError, TypeError):
return None
@property
def wind_bearing(self):
"""Return the wind bearing, or None."""
try:
return int(self.data.get(WINDAZIMUTH))
except (ValueError, TypeError):
return None
@property
def forecast(self):
"""Return the forecast data."""
return self.data.get(FORECAST)
|
from celery.schedules import crontab
from django.core.checks import run_checks
from weblate.utils.celery import app
from weblate.wladmin.models import BackupService, ConfigurationError, SupportStatus
@app.task(trail=False)
def configuration_health_check(checks=None):
# Run deployment checks if needed
if checks is None:
checks = run_checks(include_deployment_checks=True)
checks_dict = {check.id: check for check in checks}
criticals = {
"weblate.E002",
"weblate.E003",
"weblate.E007",
"weblate.E009",
"weblate.E012",
"weblate.E013",
"weblate.E014",
"weblate.E015",
"weblate.E017",
"weblate.E018",
"weblate.E019",
"weblate.C023",
"weblate.C029",
"weblate.C030",
"weblate.C031",
"weblate.C032",
"weblate.E034",
"weblate.C035",
"weblate.C036",
}
removals = []
existing = {error.name: error for error in ConfigurationError.objects.all()}
for check_id in criticals:
if check_id in checks_dict:
check = checks_dict[check_id]
if check_id in existing:
error = existing[check_id]
if error.message != check.msg:
error.message = check.msg
error.save(update_fields=["message"])
else:
ConfigurationError.objects.create(name=check_id, message=check.msg)
elif check_id in existing:
removals.append(check_id)
if removals:
ConfigurationError.objects.filter(name__in=removals).delete()
@app.task(trail=False)
def support_status_update():
support = SupportStatus.objects.get_current()
if support.secret:
support.refresh()
support.save()
@app.task(trail=False)
def backup():
for service in BackupService.objects.filter(enabled=True):
backup_service.delay(service.pk)
@app.task(trail=False)
def backup_service(pk):
service = BackupService.objects.get(pk=pk)
service.ensure_init()
service.backup()
service.prune()
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(
3600, configuration_health_check.s(), name="configuration-health-check"
)
sender.add_periodic_task(
24 * 3600, support_status_update.s(), name="support-status-update"
)
sender.add_periodic_task(crontab(hour=2, minute=0), backup.s(), name="backup")
|
import os
import pytest
from molecule import util
from molecule.command import cleanup
@pytest.fixture
def _command_provisioner_section_with_cleanup_data():
return {
'provisioner': {
'name': 'ansible',
'playbooks': {
'cleanup': 'cleanup.yml',
},
}
}
@pytest.fixture
def _patched_ansible_cleanup(mocker):
return mocker.patch('molecule.provisioner.ansible.Ansible.cleanup')
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.mark.parametrize(
'config_instance', ['_command_provisioner_section_with_cleanup_data'],
indirect=True)
def test_execute(mocker, _patched_ansible_cleanup, patched_logger_info,
patched_config_validate, config_instance):
pb = os.path.join(config_instance.scenario.directory, 'cleanup.yml')
util.write_file(pb, '')
cu = cleanup.Cleanup(config_instance)
cu.execute()
x = [
mocker.call("Scenario: 'default'"),
mocker.call("Action: 'cleanup'"),
]
assert x == patched_logger_info.mock_calls
_patched_ansible_cleanup.assert_called_once_with()
def test_execute_skips_when_playbook_not_configured(
patched_logger_warn, _patched_ansible_cleanup, config_instance):
cu = cleanup.Cleanup(config_instance)
cu.execute()
msg = 'Skipping, cleanup playbook not configured.'
patched_logger_warn.assert_called_once_with(msg)
assert not _patched_ansible_cleanup.called
|
import numpy as np
import unittest
import chainer
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.links import LightHeadRCNNResNet101
from chainercv.links.model.light_head_rcnn import LightHeadRCNNTrainChain
from chainercv.utils import generate_random_bbox
def _random_array(shape):
return np.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
@testing.parameterize(
{'train': False},
{'train': True}
)
class TestLightHeadRCNNResNet101(unittest.TestCase):
B = 1
n_fg_class = 20
n_class = n_fg_class + 1
n_anchor = 9
n_train_post_nms = 12
n_test_post_nms = 8
def setUp(self):
proposal_creator_params = {
'n_train_post_nms': self.n_train_post_nms,
'n_test_post_nms': self.n_test_post_nms
}
self.link = LightHeadRCNNResNet101(
self.n_fg_class, pretrained_model=None,
proposal_creator_params=proposal_creator_params)
chainer.config.train = self.train
def check_call(self):
xp = self.link.xp
feat_size = (12, 16)
x = chainer.Variable(
xp.random.uniform(
low=-1., high=1.,
size=(self.B, 3, feat_size[0] * 16, feat_size[1] * 16)
).astype(np.float32))
roi_cls_locs, roi_scores, rois, roi_indices = self.link(x)
n_roi = roi_scores.shape[0]
if self.train:
self.assertGreaterEqual(self.B * self.n_train_post_nms, n_roi)
else:
self.assertGreaterEqual(self.B * self.n_test_post_nms * 2, n_roi)
self.assertIsInstance(roi_cls_locs, chainer.Variable)
self.assertIsInstance(roi_cls_locs.array, xp.ndarray)
self.assertEqual(roi_cls_locs.shape, (n_roi, self.n_class * 4))
self.assertIsInstance(roi_scores, chainer.Variable)
self.assertIsInstance(roi_scores.array, xp.ndarray)
self.assertEqual(roi_scores.shape, (n_roi, self.n_class))
self.assertIsInstance(rois, xp.ndarray)
self.assertEqual(rois.shape, (n_roi, 4))
self.assertIsInstance(roi_indices, xp.ndarray)
self.assertEqual(roi_indices.shape, (n_roi,))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
class TestLightHeadRCNNResNet101Loss(unittest.TestCase):
B = 1
n_fg_class = 20
n_anchor = 9
n_train_post_nms = 12
n_test_post_nms = 8
n_bbox = 3
def setUp(self):
proposal_creator_params = {
'n_train_post_nms': self.n_train_post_nms,
'n_test_post_nms': self.n_test_post_nms
}
self.model = LightHeadRCNNTrainChain(
LightHeadRCNNResNet101(
self.n_fg_class, pretrained_model=None,
proposal_creator_params=proposal_creator_params))
self.bboxes = generate_random_bbox(
self.n_bbox, (600, 800), 16, 350)[np.newaxis]
self.labels = np.random.randint(
0, self.n_fg_class, size=(1, self.n_bbox)).astype(np.int32)
self.imgs = _random_array((1, 3, 600, 800))
self.scales = np.array([1.])
def check_call(self, model, imgs, bboxes, labels, scales):
loss = self.model(imgs, bboxes, labels, scales)
self.assertEqual(loss.shape, ())
def test_call_cpu(self):
self.check_call(
self.model, self.imgs, self.bboxes, self.labels, self.scales)
@attr.gpu
def test_call_gpu(self):
self.model.to_gpu()
self.check_call(
self.model, cuda.to_gpu(self.imgs),
self.bboxes, self.labels, self.scales)
@testing.parameterize(*testing.product({
'n_fg_class': [None, 10, 20, 80],
'anchor_scales': [(8, 16, 32), (4, 8, 16, 32), (2, 4, 8, 16, 32)],
'pretrained_model': ['coco'],
}))
class TestLightHeadRCNNResNet101Pretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_fg_class': self.n_fg_class,
'anchor_scales': self.anchor_scales,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'coco':
valid = self.n_fg_class in [None, 80]
valid = valid and self.anchor_scales == (2, 4, 8, 16, 32)
if valid:
LightHeadRCNNResNet101(**kwargs)
else:
with self.assertRaises(ValueError):
LightHeadRCNNResNet101(**kwargs)
testing.run_module(__name__, __file__)
|
import re
import sys
import collections
def get_stats(vocab):
pairs = collections.defaultdict(int)
for word, freq in vocab.items():
symbols = word.split()
for i in range(len(symbols)-1):
pairs[symbols[i],symbols[i+1]] += freq
return pairs
def merge_vocab(pair, v_in):
v_out = {}
bigram_pattern = re.escape(' '.join(pair))
p = re.compile(r'(?<!\S)' + bigram_pattern + r'(?!\S)')
for word in v_in:
w_out = p.sub(''.join(pair), word)
v_out[w_out] = v_in[word]
return v_out
vocab = {'l o w</w>' : 5, 'l o w e r</w>' : 2,
'n e w e s t</w>' : 6, 'w i d e s t</w>' : 3}
num_merges = 15
for i in range(num_merges):
pairs = get_stats(vocab)
try:
best = max(pairs, key=pairs.get)
except ValueError:
break
if pairs[best] < 2:
sys.stderr.write('no pair has frequency > 1. Stopping\n')
break
vocab = merge_vocab(best, vocab)
print(best)
|
from unittest import mock
import pytest
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QTextDocument, QColor
from PyQt5.QtWidgets import QTextEdit
from qutebrowser.completion import completiondelegate
@pytest.mark.parametrize('pat,txt,segments', [
('foo', 'foo', [(0, 3)]),
('foo', 'foobar', [(0, 3)]),
('foo', 'FOObar', [(0, 3)]), # re.IGNORECASE
('foo', 'barfoo', [(3, 3)]),
('foo', 'barfoobaz', [(3, 3)]),
('foo', 'barfoobazfoo', [(3, 3), (9, 3)]),
('foo', 'foofoo', [(0, 3), (3, 3)]),
('a b', 'cadb', [(1, 1), (3, 1)]),
('foo', '<foo>', [(1, 3)]),
('<a>', "<a>bc", [(0, 3)]),
# https://github.com/qutebrowser/qutebrowser/issues/4199
('foo', "'foo'", [(1, 3)]),
('x', "'x'", [(1, 1)]),
('lt', "<lt", [(1, 2)]),
# See https://github.com/qutebrowser/qutebrowser/pull/5111
('bar', '\U0001d65b\U0001d664\U0001d664bar', [(6, 3)]),
('an anomaly', 'an anomaly', [(0, 2), (3, 7)]),
])
def test_highlight(pat, txt, segments):
doc = QTextDocument(txt)
highlighter = completiondelegate._Highlighter(doc, pat, Qt.red)
highlighter.setFormat = mock.Mock()
highlighter.highlightBlock(txt)
highlighter.setFormat.assert_has_calls([
mock.call(s[0], s[1], mock.ANY) for s in segments
])
def test_benchmark_highlight(benchmark):
txt = 'boofoobar'
pat = 'foo bar'
doc = QTextDocument(txt)
def bench():
highlighter = completiondelegate._Highlighter(doc, pat, Qt.red)
highlighter.highlightBlock(txt)
benchmark(bench)
def test_highlighted(qtbot):
"""Make sure highlighting works.
Note that with Qt > 5.12.1 we need to call setPlainText *after*
creating the highlighter for highlighting to work. Ideally, we'd test
whether CompletionItemDelegate._get_textdoc() works properly, but testing
that is kind of hard, so we just test it in isolation here.
"""
doc = QTextDocument()
completiondelegate._Highlighter(doc, 'Hello', Qt.red)
doc.setPlainText('Hello World')
# Needed so the highlighting actually works.
edit = QTextEdit()
qtbot.addWidget(edit)
edit.setDocument(doc)
colors = [f.foreground().color() for f in doc.allFormats()]
assert QColor('red') in colors
|
import pytest
from homeassistant.components.mobile_app.const import DOMAIN
from homeassistant.setup import async_setup_component
from .const import REGISTER, REGISTER_CLEARTEXT
from tests.common import mock_device_registry
@pytest.fixture
def registry(hass):
"""Return a configured device registry."""
return mock_device_registry(hass)
@pytest.fixture
async def create_registrations(hass, authed_api_client):
"""Return two new registrations."""
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
enc_reg = await authed_api_client.post(
"/api/mobile_app/registrations", json=REGISTER
)
assert enc_reg.status == 201
enc_reg_json = await enc_reg.json()
clear_reg = await authed_api_client.post(
"/api/mobile_app/registrations", json=REGISTER_CLEARTEXT
)
assert clear_reg.status == 201
clear_reg_json = await clear_reg.json()
await hass.async_block_till_done()
return (enc_reg_json, clear_reg_json)
@pytest.fixture
async def webhook_client(hass, authed_api_client, aiohttp_client):
"""mobile_app mock client."""
# We pass in the authed_api_client server instance because
# it is used inside create_registrations and just passing in
# the app instance would cause the server to start twice,
# which caused deprecation warnings to be printed.
return await aiohttp_client(authed_api_client.server)
@pytest.fixture
async def authed_api_client(hass, hass_client):
"""Provide an authenticated client for mobile_app to use."""
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
return await hass_client()
@pytest.fixture(autouse=True)
async def setup_ws(hass):
"""Configure the websocket_api component."""
assert await async_setup_component(hass, "websocket_api", {})
await hass.async_block_till_done()
|
from collections import Counter
from unittest import TestCase
from scattertext import whitespace_nlp_with_sentences
from scattertext.features.UseFullDocAsFeature import UseFullDocAsFeature
class TestUseFullDocAsFeature(TestCase):
def test_get_feats(self):
doc = whitespace_nlp_with_sentences("A a bb cc.")
term_freq = UseFullDocAsFeature().get_feats(doc)
self.assertEqual(Counter({"A a bb cc.": 1}), term_freq)
|
import pandas as pd
class CategoricalEnsembler(object):
def __init__(self, trained_models, transformation_pipeline, categorical_column, default_category):
self.trained_models = trained_models
self.categorical_column = categorical_column
self.transformation_pipeline = transformation_pipeline
self.default_category = default_category
self.is_categorical_ensembler = True
def get(self, prop_name, default=None):
try:
return getattr(self, prop_name)
except AttributeError:
return default
def predict(self, data):
# For now, we are assuming that data is a list of dictionaries, so if we have a single dict, put it in a list
if isinstance(data, dict):
data = [data]
if isinstance(data, pd.DataFrame):
data = data.to_dict('records')
predictions = []
for row in data:
category = row[self.categorical_column]
if str(category) == 'nan':
category = 'nan'
try:
model = self.trained_models[category]
except KeyError as e:
if self.default_category == '_RAISE_ERROR':
raise(e)
model = self.trained_models[self.default_category]
transformed_row = self.transformation_pipeline.transform(row)
prediction = model.predict(transformed_row)
predictions.append(prediction)
if len(predictions) == 1:
return predictions[0]
else:
return predictions
def predict_proba(self, data):
# For now, we are assuming that data is a list of dictionaries, so if we have a single dict, put it in a list
if isinstance(data, dict):
data = [data]
if isinstance(data, pd.DataFrame):
data = data.to_dict('records')
predictions = []
for row in data:
category = row[self.categorical_column]
if str(category) == 'nan':
category = 'nan'
try:
model = self.trained_models[category]
except KeyError as e:
if self.default_category == '_RAISE_ERROR':
raise(e)
model = self.trained_models[self.default_category]
transformed_row = self.transformation_pipeline.transform(row)
prediction = model.predict_proba(transformed_row)
predictions.append(prediction)
if len(predictions) == 1:
return predictions[0]
else:
return predictions
# Remove nans from our categorical ensemble column
def clean_categorical_definitions(df, categorical_column):
sum_of_nan_values = df[categorical_column].isnull().sum().sum()
if sum_of_nan_values > 0:
print('Found ' + str(sum_of_nan_values) + ' nan values in the categorical_column.')
print('We will default to making these values a string "nan" instead, since that can be used as a key')
print('If this is not the behavior you want, consider changing these categorical_column values yourself')
df[categorical_column].fillna('nan', inplace=True)
return df
|
import warnings
from functools import partial
from typing import Any, Hashable
import numpy as np
import pandas as pd
from ..core import dtypes, duck_array_ops, indexing
from ..core.pycompat import is_duck_dask_array
from ..core.variable import Variable
class SerializationWarning(RuntimeWarning):
"""Warnings about encoding/decoding issues in serialization."""
class VariableCoder:
"""Base class for encoding and decoding transformations on variables.
We use coders for transforming variables between xarray's data model and
a format suitable for serialization. For example, coders apply CF
conventions for how data should be represented in netCDF files.
Subclasses should implement encode() and decode(), which should satisfy
the identity ``coder.decode(coder.encode(variable)) == variable``. If any
options are necessary, they should be implemented as arguments to the
__init__ method.
The optional name argument to encode() and decode() exists solely for the
sake of better error messages, and should correspond to the name of
variables in the underlying store.
"""
def encode(
self, variable: Variable, name: Hashable = None
) -> Variable: # pragma: no cover
"""Convert an encoded variable to a decoded variable"""
raise NotImplementedError()
def decode(
self, variable: Variable, name: Hashable = None
) -> Variable: # pragma: no cover
"""Convert an decoded variable to a encoded variable"""
raise NotImplementedError()
class _ElementwiseFunctionArray(indexing.ExplicitlyIndexedNDArrayMixin):
"""Lazily computed array holding values of elemwise-function.
Do not construct this object directly: call lazy_elemwise_func instead.
Values are computed upon indexing or coercion to a NumPy array.
"""
def __init__(self, array, func, dtype):
assert not is_duck_dask_array(array)
self.array = indexing.as_indexable(array)
self.func = func
self._dtype = dtype
@property
def dtype(self):
return np.dtype(self._dtype)
def __getitem__(self, key):
return type(self)(self.array[key], self.func, self.dtype)
def __array__(self, dtype=None):
return self.func(self.array)
def __repr__(self):
return "{}({!r}, func={!r}, dtype={!r})".format(
type(self).__name__, self.array, self.func, self.dtype
)
def lazy_elemwise_func(array, func, dtype):
"""Lazily apply an element-wise function to an array.
Parameters
----------
array : any valid value of Variable._data
func : callable
Function to apply to indexed slices of an array. For use with dask,
this should be a pickle-able object.
dtype : coercible to np.dtype
Dtype for the result of this function.
Returns
-------
Either a dask.array.Array or _ElementwiseFunctionArray.
"""
if is_duck_dask_array(array):
import dask.array as da
return da.map_blocks(func, array, dtype=dtype)
else:
return _ElementwiseFunctionArray(array, func, dtype)
def unpack_for_encoding(var):
return var.dims, var.data, var.attrs.copy(), var.encoding.copy()
def unpack_for_decoding(var):
return var.dims, var._data, var.attrs.copy(), var.encoding.copy()
def safe_setitem(dest, key, value, name=None):
if key in dest:
var_str = f" on variable {name!r}" if name else ""
raise ValueError(
"failed to prevent overwriting existing key {} in attrs{}. "
"This is probably an encoding field used by xarray to describe "
"how a variable is serialized. To proceed, remove this key from "
"the variable's attributes manually.".format(key, var_str)
)
dest[key] = value
def pop_to(source, dest, key, name=None):
"""
A convenience function which pops a key k from source to dest.
None values are not passed on. If k already exists in dest an
error is raised.
"""
value = source.pop(key, None)
if value is not None:
safe_setitem(dest, key, value, name=name)
return value
def _apply_mask(
data: np.ndarray, encoded_fill_values: list, decoded_fill_value: Any, dtype: Any
) -> np.ndarray:
"""Mask all matching values in a NumPy arrays."""
data = np.asarray(data, dtype=dtype)
condition = False
for fv in encoded_fill_values:
condition |= data == fv
return np.where(condition, decoded_fill_value, data)
class CFMaskCoder(VariableCoder):
"""Mask or unmask fill values according to CF conventions."""
def encode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_encoding(variable)
dtype = np.dtype(encoding.get("dtype", data.dtype))
fv = encoding.get("_FillValue")
mv = encoding.get("missing_value")
if (
fv is not None
and mv is not None
and not duck_array_ops.allclose_or_equiv(fv, mv)
):
raise ValueError(
f"Variable {name!r} has conflicting _FillValue ({fv}) and missing_value ({mv}). Cannot encode data."
)
if fv is not None:
# Ensure _FillValue is cast to same dtype as data's
encoding["_FillValue"] = dtype.type(fv)
fill_value = pop_to(encoding, attrs, "_FillValue", name=name)
if not pd.isnull(fill_value):
data = duck_array_ops.fillna(data, fill_value)
if mv is not None:
# Ensure missing_value is cast to same dtype as data's
encoding["missing_value"] = dtype.type(mv)
fill_value = pop_to(encoding, attrs, "missing_value", name=name)
if not pd.isnull(fill_value) and fv is None:
data = duck_array_ops.fillna(data, fill_value)
return Variable(dims, data, attrs, encoding)
def decode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_decoding(variable)
raw_fill_values = [
pop_to(attrs, encoding, attr, name=name)
for attr in ("missing_value", "_FillValue")
]
if raw_fill_values:
encoded_fill_values = {
fv
for option in raw_fill_values
for fv in np.ravel(option)
if not pd.isnull(fv)
}
if len(encoded_fill_values) > 1:
warnings.warn(
"variable {!r} has multiple fill values {}, "
"decoding all values to NaN.".format(name, encoded_fill_values),
SerializationWarning,
stacklevel=3,
)
dtype, decoded_fill_value = dtypes.maybe_promote(data.dtype)
if encoded_fill_values:
transform = partial(
_apply_mask,
encoded_fill_values=encoded_fill_values,
decoded_fill_value=decoded_fill_value,
dtype=dtype,
)
data = lazy_elemwise_func(data, transform, dtype)
return Variable(dims, data, attrs, encoding)
def _scale_offset_decoding(data, scale_factor, add_offset, dtype):
data = np.array(data, dtype=dtype, copy=True)
if scale_factor is not None:
data *= scale_factor
if add_offset is not None:
data += add_offset
return data
def _choose_float_dtype(dtype, has_offset):
"""Return a float dtype that can losslessly represent `dtype` values."""
# Keep float32 as-is. Upcast half-precision to single-precision,
# because float16 is "intended for storage but not computation"
if dtype.itemsize <= 4 and np.issubdtype(dtype, np.floating):
return np.float32
# float32 can exactly represent all integers up to 24 bits
if dtype.itemsize <= 2 and np.issubdtype(dtype, np.integer):
# A scale factor is entirely safe (vanishing into the mantissa),
# but a large integer offset could lead to loss of precision.
# Sensitivity analysis can be tricky, so we just use a float64
# if there's any offset at all - better unoptimised than wrong!
if not has_offset:
return np.float32
# For all other types and circumstances, we just use float64.
# (safe because eg. complex numbers are not supported in NetCDF)
return np.float64
class CFScaleOffsetCoder(VariableCoder):
"""Scale and offset variables according to CF conventions.
Follows the formula:
decode_values = encoded_values * scale_factor + add_offset
"""
def encode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_encoding(variable)
if "scale_factor" in encoding or "add_offset" in encoding:
dtype = _choose_float_dtype(data.dtype, "add_offset" in encoding)
data = data.astype(dtype=dtype, copy=True)
if "add_offset" in encoding:
data -= pop_to(encoding, attrs, "add_offset", name=name)
if "scale_factor" in encoding:
data /= pop_to(encoding, attrs, "scale_factor", name=name)
return Variable(dims, data, attrs, encoding)
def decode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_decoding(variable)
if "scale_factor" in attrs or "add_offset" in attrs:
scale_factor = pop_to(attrs, encoding, "scale_factor", name=name)
add_offset = pop_to(attrs, encoding, "add_offset", name=name)
dtype = _choose_float_dtype(data.dtype, "add_offset" in attrs)
if np.ndim(scale_factor) > 0:
scale_factor = scale_factor.item()
if np.ndim(add_offset) > 0:
add_offset = add_offset.item()
transform = partial(
_scale_offset_decoding,
scale_factor=scale_factor,
add_offset=add_offset,
dtype=dtype,
)
data = lazy_elemwise_func(data, transform, dtype)
return Variable(dims, data, attrs, encoding)
class UnsignedIntegerCoder(VariableCoder):
def encode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_encoding(variable)
# from netCDF best practices
# https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
# "_Unsigned = "true" to indicate that
# integer data should be treated as unsigned"
if encoding.get("_Unsigned", "false") == "true":
pop_to(encoding, attrs, "_Unsigned")
signed_dtype = np.dtype("i%s" % data.dtype.itemsize)
if "_FillValue" in attrs:
new_fill = signed_dtype.type(attrs["_FillValue"])
attrs["_FillValue"] = new_fill
data = duck_array_ops.around(data).astype(signed_dtype)
return Variable(dims, data, attrs, encoding)
def decode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_decoding(variable)
if "_Unsigned" in attrs:
unsigned = pop_to(attrs, encoding, "_Unsigned")
if data.dtype.kind == "i":
if unsigned == "true":
unsigned_dtype = np.dtype("u%s" % data.dtype.itemsize)
transform = partial(np.asarray, dtype=unsigned_dtype)
data = lazy_elemwise_func(data, transform, unsigned_dtype)
if "_FillValue" in attrs:
new_fill = unsigned_dtype.type(attrs["_FillValue"])
attrs["_FillValue"] = new_fill
else:
warnings.warn(
"variable %r has _Unsigned attribute but is not "
"of integer type. Ignoring attribute." % name,
SerializationWarning,
stacklevel=3,
)
return Variable(dims, data, attrs, encoding)
|
import unittest
from functional.io import ReusableFile, GZFile, BZ2File, XZFile, universal_write_open
class TestUtil(unittest.TestCase):
def test_reusable_file(self):
license_file_lf = ReusableFile("LICENSE.txt")
with open("LICENSE.txt") as license_file:
self.assertEqual(list(license_file), list(license_file_lf))
iter_1 = iter(license_file_lf)
iter_2 = iter(license_file_lf)
self.assertEqual(list(iter_1), list(iter_2))
def test_gzip_file(self):
file_name = "functional/test/data/test.txt.gz"
expect = [
"line0\n",
"line1\n",
"line2",
]
self.assertListEqual(
expect, list(GZFile(file_name, mode="rt", encoding="utf-8"))
)
expect = [
b"line0\n",
b"line1\n",
b"line2",
]
self.assertListEqual(expect, list(GZFile(file_name, mode="rb")))
def test_bz2_file(self):
file_name = "functional/test/data/test.txt.bz2"
expect = [
"line0\n",
"line1\n",
"line2",
]
self.assertListEqual(
expect, list(BZ2File(file_name, mode="rt", encoding="utf-8"))
)
expect = [
b"line0\n",
b"line1\n",
b"line2",
]
self.assertListEqual(expect, list(BZ2File(file_name, mode="rb")))
def test_xz_file(self):
file_name = "functional/test/data/test.txt.xz"
expect = [
"line0\n",
"line1\n",
"line2",
]
self.assertListEqual(
expect, list(XZFile(file_name, mode="rt", encoding="utf-8"))
)
expect = [
b"line0\n",
b"line1\n",
b"line2",
]
self.assertListEqual(expect, list(XZFile(file_name, mode="rb")))
def test_universal_write_open(self):
with self.assertRaises(ValueError):
universal_write_open("", "", compression=1)
|
from homematicip.base.enums import ValveState
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.homematicip_cloud.generic_entity import (
ATTR_CONFIG_PENDING,
ATTR_DEVICE_OVERHEATED,
ATTR_DEVICE_OVERLOADED,
ATTR_DEVICE_UNTERVOLTAGE,
ATTR_DUTY_CYCLE_REACHED,
ATTR_RSSI_DEVICE,
ATTR_RSSI_PEER,
)
from homeassistant.components.homematicip_cloud.sensor import (
ATTR_CURRENT_ILLUMINATION,
ATTR_HIGHEST_ILLUMINATION,
ATTR_LEFT_COUNTER,
ATTR_LOWEST_ILLUMINATION,
ATTR_RIGHT_COUNTER,
ATTR_TEMPERATURE_OFFSET,
ATTR_WIND_DIRECTION,
ATTR_WIND_DIRECTION_VARIATION,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
LENGTH_MILLIMETERS,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
SPEED_KILOMETERS_PER_HOUR,
TEMP_CELSIUS,
)
from homeassistant.setup import async_setup_component
from .helper import async_manipulate_test_data, get_and_check_entity_basics
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, SENSOR_DOMAIN, {SENSOR_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_accesspoint_status(hass, default_mock_hap_factory):
"""Test HomematicipSwitch."""
entity_id = "sensor.access_point_duty_cycle"
entity_name = "Access Point Duty Cycle"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == "8.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
await async_manipulate_test_data(hass, hmip_device, "dutyCycle", 17.3)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "17.3"
async def test_hmip_heating_thermostat(hass, default_mock_hap_factory):
"""Test HomematicipHeatingThermostat."""
entity_id = "sensor.heizkorperthermostat_heating"
entity_name = "Heizkörperthermostat Heating"
device_model = "HMIP-eTRV"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Heizkörperthermostat"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.37)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "37"
await async_manipulate_test_data(hass, hmip_device, "valveState", "nn")
ha_state = hass.states.get(entity_id)
assert ha_state.state == "nn"
await async_manipulate_test_data(
hass, hmip_device, "valveState", ValveState.ADAPTION_DONE
)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "37"
await async_manipulate_test_data(hass, hmip_device, "lowBat", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes["icon"] == "mdi:battery-outline"
async def test_hmip_humidity_sensor(hass, default_mock_hap_factory):
"""Test HomematicipHumiditySensor."""
entity_id = "sensor.bwth_1_humidity"
entity_name = "BWTH 1 Humidity"
device_model = "HmIP-BWTH"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["BWTH 1"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "40"
assert ha_state.attributes["unit_of_measurement"] == PERCENTAGE
await async_manipulate_test_data(hass, hmip_device, "humidity", 45)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "45"
# test common attributes
assert ha_state.attributes[ATTR_RSSI_DEVICE] == -76
assert ha_state.attributes[ATTR_RSSI_PEER] == -77
async def test_hmip_temperature_sensor1(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.bwth_1_temperature"
entity_name = "BWTH 1 Temperature"
device_model = "HmIP-BWTH"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["BWTH 1"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "21.0"
assert ha_state.attributes["unit_of_measurement"] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get("temperature_offset")
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10
async def test_hmip_temperature_sensor2(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.heizkorperthermostat_temperature"
entity_name = "Heizkörperthermostat Temperature"
device_model = "HMIP-eTRV"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Heizkörperthermostat"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "20.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "valveActualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get(ATTR_TEMPERATURE_OFFSET)
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10
async def test_hmip_temperature_sensor3(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.raumbediengerat_analog_temperature"
entity_name = "Raumbediengerät Analog Temperature"
device_model = "ALPHA-IP-RBGa"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Raumbediengerät Analog"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "23.3"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get(ATTR_TEMPERATURE_OFFSET)
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10
async def test_hmip_power_sensor(hass, default_mock_hap_factory):
"""Test HomematicipPowerSensor."""
entity_id = "sensor.flur_oben_power"
entity_name = "Flur oben Power"
device_model = "HmIP-BSM"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Flur oben"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "0.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == POWER_WATT
await async_manipulate_test_data(hass, hmip_device, "currentPowerConsumption", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
# test common attributes
assert not ha_state.attributes.get(ATTR_DEVICE_OVERHEATED)
assert not ha_state.attributes.get(ATTR_DEVICE_OVERLOADED)
assert not ha_state.attributes.get(ATTR_DEVICE_UNTERVOLTAGE)
assert not ha_state.attributes.get(ATTR_DUTY_CYCLE_REACHED)
assert not ha_state.attributes.get(ATTR_CONFIG_PENDING)
await async_manipulate_test_data(hass, hmip_device, "deviceOverheated", True)
await async_manipulate_test_data(hass, hmip_device, "deviceOverloaded", True)
await async_manipulate_test_data(hass, hmip_device, "deviceUndervoltage", True)
await async_manipulate_test_data(hass, hmip_device, "dutyCycle", True)
await async_manipulate_test_data(hass, hmip_device, "configPending", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_DEVICE_OVERHEATED]
assert ha_state.attributes[ATTR_DEVICE_OVERLOADED]
assert ha_state.attributes[ATTR_DEVICE_UNTERVOLTAGE]
assert ha_state.attributes[ATTR_DUTY_CYCLE_REACHED]
assert ha_state.attributes[ATTR_CONFIG_PENDING]
async def test_hmip_illuminance_sensor1(hass, default_mock_hap_factory):
"""Test HomematicipIlluminanceSensor."""
entity_id = "sensor.wettersensor_illuminance"
entity_name = "Wettersensor Illuminance"
device_model = "HmIP-SWO-B"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wettersensor"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "4890.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LIGHT_LUX
await async_manipulate_test_data(hass, hmip_device, "illumination", 231)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "231"
async def test_hmip_illuminance_sensor2(hass, default_mock_hap_factory):
"""Test HomematicipIlluminanceSensor."""
entity_id = "sensor.lichtsensor_nord_illuminance"
entity_name = "Lichtsensor Nord Illuminance"
device_model = "HmIP-SLO"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Lichtsensor Nord"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "807.3"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LIGHT_LUX
await async_manipulate_test_data(hass, hmip_device, "averageIllumination", 231)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "231"
assert ha_state.attributes[ATTR_CURRENT_ILLUMINATION] == 785.2
assert ha_state.attributes[ATTR_HIGHEST_ILLUMINATION] == 837.1
assert ha_state.attributes[ATTR_LOWEST_ILLUMINATION] == 785.2
async def test_hmip_windspeed_sensor(hass, default_mock_hap_factory):
"""Test HomematicipWindspeedSensor."""
entity_id = "sensor.wettersensor_pro_windspeed"
entity_name = "Wettersensor - pro Windspeed"
device_model = "HmIP-SWO-PR"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wettersensor - pro"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "2.6"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == SPEED_KILOMETERS_PER_HOUR
await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "9.4"
assert ha_state.attributes[ATTR_WIND_DIRECTION_VARIATION] == 56.25
assert ha_state.attributes[ATTR_WIND_DIRECTION] == "WNW"
wind_directions = {
25: "NNE",
37.5: "NE",
70: "ENE",
92.5: "E",
115: "ESE",
137.5: "SE",
160: "SSE",
182.5: "S",
205: "SSW",
227.5: "SW",
250: "WSW",
272.5: POWER_WATT,
295: "WNW",
317.5: "NW",
340: "NNW",
0: "N",
}
for direction, txt in wind_directions.items():
await async_manipulate_test_data(hass, hmip_device, "windDirection", direction)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_WIND_DIRECTION] == txt
async def test_hmip_today_rain_sensor(hass, default_mock_hap_factory):
"""Test HomematicipTodayRainSensor."""
entity_id = "sensor.weather_sensor_plus_today_rain"
entity_name = "Weather Sensor – plus Today Rain"
device_model = "HmIP-SWO-PL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Weather Sensor – plus"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "3.9"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LENGTH_MILLIMETERS
await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "14.2"
async def test_hmip_passage_detector_delta_counter(hass, default_mock_hap_factory):
"""Test HomematicipPassageDetectorDeltaCounter."""
entity_id = "sensor.spdr_1"
entity_name = "SPDR_1"
device_model = "HmIP-SPDR"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "164"
assert ha_state.attributes[ATTR_LEFT_COUNTER] == 966
assert ha_state.attributes[ATTR_RIGHT_COUNTER] == 802
await async_manipulate_test_data(hass, hmip_device, "leftRightCounterDelta", 190)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "190"
|
from typing import List, Optional
from homeassistant.const import (
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PRESSURE_PSI,
TEMP_CELSIUS,
VOLUME_GALLONS,
)
from homeassistant.util.temperature import fahrenheit_to_celsius
from .const import DOMAIN as FLO_DOMAIN
from .device import FloDeviceDataUpdateCoordinator
from .entity import FloEntity
WATER_ICON = "mdi:water"
GAUGE_ICON = "mdi:gauge"
NAME_DAILY_USAGE = "Today's Water Usage"
NAME_CURRENT_SYSTEM_MODE = "Current System Mode"
NAME_FLOW_RATE = "Water Flow Rate"
NAME_TEMPERATURE = "Water Temperature"
NAME_WATER_PRESSURE = "Water Pressure"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Flo sensors from config entry."""
devices: List[FloDeviceDataUpdateCoordinator] = hass.data[FLO_DOMAIN][
config_entry.entry_id
]["devices"]
entities = []
entities.extend([FloDailyUsageSensor(device) for device in devices])
entities.extend([FloSystemModeSensor(device) for device in devices])
entities.extend([FloCurrentFlowRateSensor(device) for device in devices])
entities.extend([FloTemperatureSensor(device) for device in devices])
entities.extend([FloPressureSensor(device) for device in devices])
async_add_entities(entities)
class FloDailyUsageSensor(FloEntity):
"""Monitors the daily water usage."""
def __init__(self, device):
"""Initialize the daily water usage sensor."""
super().__init__("daily_consumption", NAME_DAILY_USAGE, device)
self._state: float = None
@property
def icon(self) -> str:
"""Return the daily usage icon."""
return WATER_ICON
@property
def state(self) -> Optional[float]:
"""Return the current daily usage."""
if self._device.consumption_today is None:
return None
return round(self._device.consumption_today, 1)
@property
def unit_of_measurement(self) -> str:
"""Return gallons as the unit measurement for water."""
return VOLUME_GALLONS
class FloSystemModeSensor(FloEntity):
"""Monitors the current Flo system mode."""
def __init__(self, device):
"""Initialize the system mode sensor."""
super().__init__("current_system_mode", NAME_CURRENT_SYSTEM_MODE, device)
self._state: str = None
@property
def state(self) -> Optional[str]:
"""Return the current system mode."""
if not self._device.current_system_mode:
return None
return self._device.current_system_mode
class FloCurrentFlowRateSensor(FloEntity):
"""Monitors the current water flow rate."""
def __init__(self, device):
"""Initialize the flow rate sensor."""
super().__init__("current_flow_rate", NAME_FLOW_RATE, device)
self._state: float = None
@property
def icon(self) -> str:
"""Return the daily usage icon."""
return GAUGE_ICON
@property
def state(self) -> Optional[float]:
"""Return the current flow rate."""
if self._device.current_flow_rate is None:
return None
return round(self._device.current_flow_rate, 1)
@property
def unit_of_measurement(self) -> str:
"""Return the unit measurement."""
return "gpm"
class FloTemperatureSensor(FloEntity):
"""Monitors the temperature."""
def __init__(self, device):
"""Initialize the temperature sensor."""
super().__init__("temperature", NAME_TEMPERATURE, device)
self._state: float = None
@property
def state(self) -> Optional[float]:
"""Return the current temperature."""
if self._device.temperature is None:
return None
return round(fahrenheit_to_celsius(self._device.temperature), 1)
@property
def unit_of_measurement(self) -> str:
"""Return gallons as the unit measurement for water."""
return TEMP_CELSIUS
@property
def device_class(self) -> Optional[str]:
"""Return the device class for this sensor."""
return DEVICE_CLASS_TEMPERATURE
class FloPressureSensor(FloEntity):
"""Monitors the water pressure."""
def __init__(self, device):
"""Initialize the pressure sensor."""
super().__init__("water_pressure", NAME_WATER_PRESSURE, device)
self._state: float = None
@property
def state(self) -> Optional[float]:
"""Return the current water pressure."""
if self._device.current_psi is None:
return None
return round(self._device.current_psi, 1)
@property
def unit_of_measurement(self) -> str:
"""Return gallons as the unit measurement for water."""
return PRESSURE_PSI
@property
def device_class(self) -> Optional[str]:
"""Return the device class for this sensor."""
return DEVICE_CLASS_PRESSURE
|
from django.db import models, router
from django.db.models import Case, IntegerField, Sum, When
from django.db.models.deletion import Collector
from django.db.models.lookups import PatternLookup
ESCAPED = frozenset(".\\+*?[^]$(){}=!<>|:-")
def conditional_sum(value=1, **cond):
"""Wrapper to generate SUM on boolean/enum values."""
return Sum(Case(When(then=value, **cond), default=0, output_field=IntegerField()))
class PostgreSQLSearchLookup(PatternLookup):
lookup_name = "search"
param_pattern = "%s"
def as_sql(self, qn, connection):
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return f"{lhs} %% {rhs} = true", params
class MySQLSearchLookup(models.Lookup):
lookup_name = "search"
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"MATCH ({lhs}) AGAINST ({rhs} IN NATURAL LANGUAGE MODE)", params
class MySQLSubstringLookup(MySQLSearchLookup):
lookup_name = "substring"
class PostgreSQLSubstringLookup(PatternLookup):
"""
Case insensitive substring lookup.
This is essentially same as icontains in Django, but utilizes ILIKE
operator which can use pg_trgm index.
"""
lookup_name = "substring"
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"{lhs} ILIKE {rhs}", params
def table_has_row(connection, table, rowname):
"""Check whether actual table has row."""
with connection.cursor() as cursor:
table_description = connection.introspection.get_table_description(
cursor, table
)
for row in table_description:
if row.name == rowname:
return True
return False
def re_escape(pattern):
"""Escape for use in database regexp match.
This is based on re.escape, but that one escapes too much.
"""
string = list(pattern)
for i, char in enumerate(pattern):
if char == "\000":
string[i] = "\\000"
elif char in ESCAPED:
string[i] = "\\" + char
return "".join(string)
class FastCollector(Collector):
"""
Fast delete collector skipping some signals.
It allows fast deletion for models flagged with weblate_unsafe_delete.
This is needed as check removal triggers check run and that can
create new checks for just removed units.
"""
@staticmethod
def is_weblate_unsafe(model):
return getattr(model, "weblate_unsafe_delete", False)
def can_fast_delete(self, objs, from_field=None):
if hasattr(objs, "model") and self.is_weblate_unsafe(objs.model):
return True
return super().can_fast_delete(objs, from_field)
def delete(self):
from weblate.trans.models import Change, Suggestion, Vote
fast_deletes = []
for item in self.fast_deletes:
if item.model is Suggestion:
fast_deletes.append(Vote.objects.filter(suggestion__in=item))
fast_deletes.append(Change.objects.filter(suggestion__in=item))
fast_deletes.append(item)
self.fast_deletes = fast_deletes
return super().delete()
class FastDeleteModelMixin:
"""Model mixin to use FastCollector."""
def delete(self, using=None, keep_parents=False):
"""Copy of Django delete with changed collector."""
using = using or router.db_for_write(self.__class__, instance=self)
collector = FastCollector(using=using)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
class FastDeleteQuerySetMixin:
"""QuerySet mixin to use FastCollector."""
def delete(self):
"""
Delete the records in the current QuerySet.
Copied from Django, the only difference is using custom collector.
"""
assert not self.query.is_sliced, "Cannot use 'limit' or 'offset' with delete."
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._chain()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = FastCollector(using=del_query.db)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
|
from unittest import mock
import hypothesis
from hypothesis import strategies
import pytest
from PyQt5.QtCore import QModelIndex
from qutebrowser.completion.models import completionmodel, listcategory
from qutebrowser.utils import qtutils
from qutebrowser.api import cmdutils
@hypothesis.given(strategies.lists(
min_size=0, max_size=3,
elements=strategies.integers(min_value=0, max_value=2**31)))
def test_first_last_item(counts):
"""Test that first() and last() index to the first and last items."""
model = completionmodel.CompletionModel()
for c in counts:
cat = mock.Mock(spec=['layoutChanged', 'layoutAboutToBeChanged'])
cat.rowCount = mock.Mock(return_value=c, spec=[])
model.add_category(cat)
data = [i for i, row_count in enumerate(counts) if row_count > 0]
if not data:
# with no items, first and last should be an invalid index
assert not model.first_item().isValid()
assert not model.last_item().isValid()
else:
first = data[0]
last = data[-1]
# first item of the first data category
assert model.first_item().row() == 0
assert model.first_item().parent().row() == first
# last item of the last data category
assert model.last_item().row() == counts[last] - 1
assert model.last_item().parent().row() == last
@hypothesis.given(strategies.lists(elements=strategies.integers(),
min_size=0, max_size=3))
def test_count(counts):
model = completionmodel.CompletionModel()
for c in counts:
cat = mock.Mock(spec=['rowCount', 'layoutChanged',
'layoutAboutToBeChanged'])
cat.rowCount = mock.Mock(return_value=c, spec=[])
model.add_category(cat)
assert model.count() == sum(counts)
@hypothesis.given(pat=strategies.text())
def test_set_pattern(pat, qtbot):
"""Validate the filtering and sorting results of set_pattern."""
model = completionmodel.CompletionModel()
cats = [mock.Mock(spec=['set_pattern']) for _ in range(3)]
for c in cats:
c.set_pattern = mock.Mock(spec=[])
model.add_category(c)
with qtbot.waitSignal(model.layoutAboutToBeChanged), \
qtbot.waitSignal(model.layoutChanged):
model.set_pattern(pat)
for c in cats:
c.set_pattern.assert_called_with(pat)
def test_delete_cur_item():
func = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
cat = listcategory.ListCategory('', [('foo', 'bar')], delete_func=func)
model.add_category(cat)
parent = model.index(0, 0)
model.delete_cur_item(model.index(0, 0, parent))
func.assert_called_once_with(['foo', 'bar'])
def test_delete_cur_item_no_func():
callback = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
cat = listcategory.ListCategory('', [('foo', 'bar')], delete_func=None)
model.rowsAboutToBeRemoved.connect(callback)
model.rowsRemoved.connect(callback)
model.add_category(cat)
parent = model.index(0, 0)
with pytest.raises(cmdutils.CommandError):
model.delete_cur_item(model.index(0, 0, parent))
callback.assert_not_called()
def test_delete_cur_item_no_cat():
"""Test completion_item_del with no selected category."""
callback = mock.Mock(spec=[])
model = completionmodel.CompletionModel()
model.rowsAboutToBeRemoved.connect(callback)
model.rowsRemoved.connect(callback)
with pytest.raises(qtutils.QtValueError):
model.delete_cur_item(QModelIndex())
callback.assert_not_called()
|
import logging
from rfk101py.rfk101py import rfk101py
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "idteck_prox"
EVENT_IDTECK_PROX_KEYCARD = "idteck_prox_keycard"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_NAME): cv.string,
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the IDTECK proximity card component."""
conf = config[DOMAIN]
for unit in conf:
host = unit[CONF_HOST]
port = unit[CONF_PORT]
name = unit[CONF_NAME]
try:
reader = IdteckReader(hass, host, port, name)
reader.connect()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, reader.stop)
except OSError as error:
_LOGGER.error("Error creating %s. %s", name, error)
return False
return True
class IdteckReader:
"""Representation of an IDTECK proximity card reader."""
def __init__(self, hass, host, port, name):
"""Initialize the reader."""
self.hass = hass
self._host = host
self._port = port
self._name = name
self._connection = None
def connect(self):
"""Connect to the reader."""
self._connection = rfk101py(self._host, self._port, self._callback)
def _callback(self, card):
"""Send a keycard event message into Home Assistant whenever a card is read."""
self.hass.bus.fire(
EVENT_IDTECK_PROX_KEYCARD, {"card": card, "name": self._name}
)
def stop(self):
"""Close resources."""
if self._connection:
self._connection.close()
self._connection = None
|
import sys
import email
from os.path import join, dirname, abspath
from six import text_type
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.umessage import UMessage, decode_QP, message_from_string
DATA = join(dirname(abspath(__file__)), 'data')
class UMessageTC(TestCase):
def setUp(self):
if sys.version_info >= (3, 2):
import io
msg1 = email.message_from_file(io.open(join(DATA, 'test1.msg'), encoding='utf8'))
msg2 = email.message_from_file(io.open(join(DATA, 'test2.msg'), encoding='utf8'))
else:
msg1 = email.message_from_file(open(join(DATA, 'test1.msg')))
msg2 = email.message_from_file(open(join(DATA, 'test2.msg')))
self.umessage1 = UMessage(msg1)
self.umessage2 = UMessage(msg2)
def test_get_subject(self):
subj = self.umessage2.get('Subject')
self.assertEqual(type(subj), text_type)
self.assertEqual(subj, u'À LA MER')
def test_get_all(self):
to = self.umessage2.get_all('To')
self.assertEqual(type(to[0]), text_type)
self.assertEqual(to, [u'élément à accents <[email protected]>'])
def test_get_payload_no_multi(self):
payload = self.umessage1.get_payload()
self.assertEqual(type(payload), text_type)
def test_get_payload_decode(self):
msg = """\
MIME-Version: 1.0
Content-Type: text/plain; charset="utf-8"
Content-Transfer-Encoding: base64
Subject: =?utf-8?q?b=C3=AFjour?=
From: =?utf-8?q?oim?= <[email protected]>
Reply-to: =?utf-8?q?oim?= <[email protected]>, =?utf-8?q?BimBam?= <[email protected]>
X-CW: data
To: [email protected]
Date: now
dW4gcGV0aXQgY8O2dWNvdQ==
"""
msg = message_from_string(msg)
self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou')
def test_decode_QP(self):
test_line = '=??b?UmFwaGHrbA==?= DUPONT<[email protected]>'
test = decode_QP(test_line)
self.assertEqual(type(test), text_type)
self.assertEqual(test, u'Raphaël DUPONT<[email protected]>')
def test_decode_QP_utf8(self):
test_line = '=?utf-8?q?o=C3=AEm?= <[email protected]>'
test = decode_QP(test_line)
self.assertEqual(type(test), text_type)
self.assertEqual(test, u'oîm <[email protected]>')
def test_decode_QP_ascii(self):
test_line = 'test <[email protected]>'
test = decode_QP(test_line)
self.assertEqual(type(test), text_type)
self.assertEqual(test, u'test <[email protected]>')
if __name__ == '__main__':
unittest_main()
|
import Levenshtein_search
from .index import Index
from .core import Enumerator
class LevenshteinIndex(Index):
def __init__(self):
self.index_key = Levenshtein_search.populate_wordset(-1, [])
self._doc_to_id = Enumerator(start=1)
def index(self, doc):
if doc not in self._doc_to_id:
self._doc_to_id[doc]
Levenshtein_search.add_string(self.index_key, doc)
def unindex(self, doc):
del self._doc_to_id[doc]
Levenshtein_search.clear_wordset(self.index_key)
self.index_key = Levenshtein_search.populate_wordset(-1, list(self._doc_to_id))
def initSearch(self):
pass
def search(self, doc, threshold=0):
matching_docs = Levenshtein_search.lookup(self.index_key, doc, threshold)
if matching_docs:
return [self._doc_to_id[match] for match, _, _ in matching_docs]
else:
return []
def __del__(self):
Levenshtein_search.clear_wordset(self.index_key)
|
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.lighting.kraken', 'setCustom', in_sig='ai')
def set_custom_kraken(self, rgbi):
"""
Set custom colour on kraken
:return: List of args used for breathing effect
:rtype: int
"""
self.logger.debug("DBus call set custom")
driver_path = self.get_driver_path('matrix_effect_custom')
if len(rgbi) not in (3, 4):
raise ValueError("List must be of 3 or 4 bytes")
# DodgyCoding
rgbi_list = list(rgbi)
for index, item in enumerate(list(rgbi)):
item = int(item)
if item < 0:
rgbi_list[index] = 0
elif item > 255:
rgbi_list[index] = 255
else:
rgbi_list[index] = item
with open(driver_path, 'wb') as driver_file:
driver_file.write(bytes(rgbi_list))
|
from socket import gaierror
import pytest
from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.components.xiaomi_aqara import config_flow, const
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_PORT
from tests.async_mock import Mock, patch
ZEROCONF_NAME = "name"
ZEROCONF_PROP = "properties"
ZEROCONF_MAC = "mac"
TEST_HOST = "1.2.3.4"
TEST_HOST_2 = "5.6.7.8"
TEST_KEY = "1234567890123456"
TEST_PORT = 1234
TEST_NAME = "Test_Aqara_Gateway"
TEST_SID = "abcdefghijkl"
TEST_PROTOCOL = "1.1.1"
TEST_MAC = "ab:cd:ef:gh:ij:kl"
TEST_GATEWAY_ID = TEST_MAC
TEST_ZEROCONF_NAME = "lumi-gateway-v3_miio12345678._miio._udp.local."
@pytest.fixture(name="xiaomi_aqara", autouse=True)
def xiaomi_aqara_fixture():
"""Mock xiaomi_aqara discovery and entry setup."""
mock_gateway_discovery = get_mock_discovery([TEST_HOST])
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
), patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGateway",
return_value=mock_gateway_discovery.gateways[TEST_HOST],
), patch(
"homeassistant.components.xiaomi_aqara.async_setup_entry", return_value=True
):
yield
def get_mock_discovery(
host_list,
invalid_interface=False,
invalid_key=False,
invalid_host=False,
invalid_mac=False,
):
"""Return a mock gateway info instance."""
gateway_discovery = Mock()
gateway_dict = {}
for host in host_list:
gateway = Mock()
gateway.ip_adress = host
gateway.port = TEST_PORT
gateway.sid = TEST_SID
gateway.proto = TEST_PROTOCOL
gateway.connection_error = invalid_host
gateway.mac_error = invalid_mac
if invalid_key:
gateway.write_to_hub = Mock(return_value=False)
gateway_dict[host] = gateway
gateway_discovery.gateways = gateway_dict
if invalid_interface:
gateway_discovery.discover_gateways = Mock(side_effect=gaierror)
return gateway_discovery
async def test_config_flow_user_success(hass):
"""Test a successful config flow initialized by the user."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_PORT: TEST_PORT,
CONF_MAC: TEST_MAC,
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
const.CONF_PROTOCOL: TEST_PROTOCOL,
const.CONF_KEY: TEST_KEY,
const.CONF_SID: TEST_SID,
}
async def test_config_flow_user_multiple_success(hass):
"""Test a successful config flow initialized by the user with multiple gateways discoverd."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([TEST_HOST, TEST_HOST_2])
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "select"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"select_ip": TEST_HOST_2},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST_2,
CONF_PORT: TEST_PORT,
CONF_MAC: TEST_MAC,
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
const.CONF_PROTOCOL: TEST_PROTOCOL,
const.CONF_KEY: TEST_KEY,
const.CONF_SID: TEST_SID,
}
async def test_config_flow_user_no_key_success(hass):
"""Test a successful config flow initialized by the user without a key."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_NAME: TEST_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_PORT: TEST_PORT,
CONF_MAC: TEST_MAC,
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
const.CONF_PROTOCOL: TEST_PROTOCOL,
const.CONF_KEY: None,
const.CONF_SID: TEST_SID,
}
async def test_config_flow_user_host_mac_success(hass):
"""Test a successful config flow initialized by the user with a host and mac specified."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([])
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
CONF_HOST: TEST_HOST,
CONF_MAC: TEST_MAC,
},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_NAME: TEST_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_PORT: TEST_PORT,
CONF_MAC: TEST_MAC,
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
const.CONF_PROTOCOL: TEST_PROTOCOL,
const.CONF_KEY: None,
const.CONF_SID: TEST_SID,
}
async def test_config_flow_user_discovery_error(hass):
"""Test a failed config flow initialized by the user with no gateways discoverd."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([])
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "discovery_error"}
async def test_config_flow_user_invalid_interface(hass):
"""Test a failed config flow initialized by the user with an invalid interface."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([], invalid_interface=True)
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {const.CONF_INTERFACE: "invalid_interface"}
async def test_config_flow_user_invalid_host(hass):
"""Test a failed config flow initialized by the user with an invalid host."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([TEST_HOST], invalid_host=True)
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGateway",
return_value=mock_gateway_discovery.gateways[TEST_HOST],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
CONF_HOST: "0.0.0.0",
CONF_MAC: TEST_MAC,
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"host": "invalid_host"}
async def test_config_flow_user_invalid_mac(hass):
"""Test a failed config flow initialized by the user with an invalid mac."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([TEST_HOST], invalid_mac=True)
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGateway",
return_value=mock_gateway_discovery.gateways[TEST_HOST],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
CONF_HOST: TEST_HOST,
CONF_MAC: "in:va:li:d0:0m:ac",
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"mac": "invalid_mac"}
async def test_config_flow_user_invalid_key(hass):
"""Test a failed config flow initialized by the user with an invalid key."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
mock_gateway_discovery = get_mock_discovery([TEST_HOST], invalid_key=True)
with patch(
"homeassistant.components.xiaomi_aqara.config_flow.XiaomiGatewayDiscovery",
return_value=mock_gateway_discovery,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {const.CONF_KEY: "invalid_key"}
async def test_zeroconf_success(hass):
"""Test a successful zeroconf discovery of a xiaomi aqara gateway."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
zeroconf.ATTR_HOST: TEST_HOST,
ZEROCONF_NAME: TEST_ZEROCONF_NAME,
ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC},
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE},
)
assert result["type"] == "form"
assert result["step_id"] == "settings"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{const.CONF_KEY: TEST_KEY, CONF_NAME: TEST_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_PORT: TEST_PORT,
CONF_MAC: TEST_MAC,
const.CONF_INTERFACE: config_flow.DEFAULT_INTERFACE,
const.CONF_PROTOCOL: TEST_PROTOCOL,
const.CONF_KEY: TEST_KEY,
const.CONF_SID: TEST_SID,
}
async def test_zeroconf_missing_data(hass):
"""Test a failed zeroconf discovery because of missing data."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={zeroconf.ATTR_HOST: TEST_HOST, ZEROCONF_NAME: TEST_ZEROCONF_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "not_xiaomi_aqara"
async def test_zeroconf_unknown_device(hass):
"""Test a failed zeroconf discovery because of a unknown device."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
zeroconf.ATTR_HOST: TEST_HOST,
ZEROCONF_NAME: "not-a-xiaomi-aqara-gateway",
ZEROCONF_PROP: {ZEROCONF_MAC: TEST_MAC},
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_xiaomi_aqara"
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.