text
stringlengths 213
32.3k
|
---|
import asyncio
from uuid import uuid4
import async_timeout
from pytradfri import Gateway, RequestError
from pytradfri.api.aiocoap_api import APIFactory
import voluptuous as vol
from homeassistant import config_entries
from .const import (
CONF_GATEWAY_ID,
CONF_HOST,
CONF_IDENTITY,
CONF_IMPORT_GROUPS,
CONF_KEY,
KEY_SECURITY_CODE,
)
class AuthError(Exception):
"""Exception if authentication occurs."""
def __init__(self, code):
"""Initialize exception."""
super().__init__()
self.code = code
@config_entries.HANDLERS.register("tradfri")
class FlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
def __init__(self):
"""Initialize flow."""
self._host = None
self._import_groups = False
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
return await self.async_step_auth()
async def async_step_auth(self, user_input=None):
"""Handle the authentication with a gateway."""
errors = {}
if user_input is not None:
host = user_input.get(CONF_HOST, self._host)
try:
auth = await authenticate(
self.hass, host, user_input[KEY_SECURITY_CODE]
)
# We don't ask for import group anymore as group state
# is not reliable, don't want to show that to the user.
# But we still allow specifying import group via config yaml.
auth[CONF_IMPORT_GROUPS] = self._import_groups
return await self._entry_from_data(auth)
except AuthError as err:
if err.code == "invalid_security_code":
errors[KEY_SECURITY_CODE] = err.code
else:
errors["base"] = err.code
else:
user_input = {}
fields = {}
if self._host is None:
fields[vol.Required(CONF_HOST, default=user_input.get(CONF_HOST))] = str
fields[
vol.Required(KEY_SECURITY_CODE, default=user_input.get(KEY_SECURITY_CODE))
] = str
return self.async_show_form(
step_id="auth", data_schema=vol.Schema(fields), errors=errors
)
async def async_step_homekit(self, discovery_info):
"""Handle homekit discovery."""
await self.async_set_unique_id(discovery_info["properties"]["id"])
self._abort_if_unique_id_configured({CONF_HOST: discovery_info["host"]})
host = discovery_info["host"]
for entry in self._async_current_entries():
if entry.data.get(CONF_HOST) != host:
continue
# Backwards compat, we update old entries
if not entry.unique_id:
self.hass.config_entries.async_update_entry(
entry, unique_id=discovery_info["properties"]["id"]
)
return self.async_abort(reason="already_configured")
self._host = host
return await self.async_step_auth()
async def async_step_import(self, user_input):
"""Import a config entry."""
for entry in self._async_current_entries():
if entry.data.get(CONF_HOST) == user_input["host"]:
return self.async_abort(reason="already_configured")
# Happens if user has host directly in configuration.yaml
if "key" not in user_input:
self._host = user_input["host"]
self._import_groups = user_input[CONF_IMPORT_GROUPS]
return await self.async_step_auth()
try:
data = await get_gateway_info(
self.hass,
user_input["host"],
# Old config format had a fixed identity
user_input.get("identity", "homeassistant"),
user_input["key"],
)
data[CONF_IMPORT_GROUPS] = user_input[CONF_IMPORT_GROUPS]
return await self._entry_from_data(data)
except AuthError:
# If we fail to connect, just pass it on to discovery
self._host = user_input["host"]
return await self.async_step_auth()
async def _entry_from_data(self, data):
"""Create an entry from data."""
host = data[CONF_HOST]
gateway_id = data[CONF_GATEWAY_ID]
same_hub_entries = [
entry.entry_id
for entry in self._async_current_entries()
if entry.data.get(CONF_GATEWAY_ID) == gateway_id
or entry.data.get(CONF_HOST) == host
]
if same_hub_entries:
await asyncio.wait(
[
self.hass.config_entries.async_remove(entry_id)
for entry_id in same_hub_entries
]
)
return self.async_create_entry(title=host, data=data)
async def authenticate(hass, host, security_code):
"""Authenticate with a Tradfri hub."""
identity = uuid4().hex
api_factory = await APIFactory.init(host, psk_id=identity)
try:
with async_timeout.timeout(5):
key = await api_factory.generate_psk(security_code)
except RequestError as err:
raise AuthError("invalid_security_code") from err
except asyncio.TimeoutError as err:
raise AuthError("timeout") from err
finally:
await api_factory.shutdown()
return await get_gateway_info(hass, host, identity, key)
async def get_gateway_info(hass, host, identity, key):
"""Return info for the gateway."""
try:
factory = await APIFactory.init(host, psk_id=identity, psk=key)
api = factory.request
gateway = Gateway()
gateway_info_result = await api(gateway.get_gateway_info())
await factory.shutdown()
except (OSError, RequestError) as err:
# We're also catching OSError as PyTradfri doesn't catch that one yet
# Upstream PR: https://github.com/ggravlingen/pytradfri/pull/189
raise AuthError("cannot_connect") from err
return {
CONF_HOST: host,
CONF_IDENTITY: identity,
CONF_KEY: key,
CONF_GATEWAY_ID: gateway_info_result.id,
}
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.windows_packages import diskspd
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'diskspd'
BENCHMARK_CONFIG = """
diskspd:
description: Run diskspd on a single machine
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 1
disk_spec: *default_500_gb
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
vm = benchmark_spec.vms[0]
vm.Install('diskspd')
def Run(benchmark_spec):
"""Measure the disk performance in one VM.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects with the benchmark results.
"""
vm = benchmark_spec.vms[0]
results = []
results.extend(diskspd.RunDiskSpd(vm))
return results
def Cleanup(unused_benchmark_spec):
pass
|
import unittest
from unittest.mock import patch, Mock
import josepy as jose
from cryptography.x509 import DNSName
from flask import Flask
from lemur.plugins.lemur_acme import plugin
from lemur.plugins.lemur_acme.acme_handlers import AuthorizationRecord
from lemur.common.utils import generate_private_key
from mock import MagicMock
class TestAcmeDns(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.acme_handlers.dns_provider_service")
def setUp(self, mock_dns_provider_service):
self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin()
self.acme = plugin.AcmeDnsHandler()
mock_dns_provider = Mock()
mock_dns_provider.name = "cloudflare"
mock_dns_provider.credentials = "{}"
mock_dns_provider.provider_type = "cloudflare"
self.acme.dns_providers_for_domain = {
"www.test.com": [mock_dns_provider],
"test.fakedomain.net": [mock_dns_provider],
}
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
# needed to run tests in dev environment without getting error 'Working outside of application context'.
_app = Flask('lemur_test_acme')
self.ctx = _app.app_context()
assert self.ctx
self.ctx.push()
def tearDown(self):
self.ctx.pop()
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
def test_get_dns_challenges(self, mock_len):
assert mock_len
from acme import challenges
host = "example.com"
c = challenges.DNS01()
mock_authz = Mock()
mock_authz.body.resolved_combinations = []
mock_entry = Mock()
mock_entry.chall = c
mock_authz.body.resolved_combinations.append(mock_entry)
result = yield self.acme.get_dns_challenges(host, mock_authz)
self.assertEqual(result, mock_entry)
@patch("acme.client.Client")
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.get_dns_challenges")
def test_start_dns_challenge(
self, mock_get_dns_challenges, mock_len, mock_acme
):
assert mock_len
mock_order = Mock()
mock_authz = Mock()
mock_authz.body.resolved_combinations = []
mock_entry = MagicMock()
mock_entry.chall = TestAcmeDns.test_complete_dns_challenge_fail
mock_authz.body.resolved_combinations.append(mock_entry)
mock_acme.request_domain_challenges = Mock(return_value=mock_authz)
mock_dns_provider = Mock()
mock_dns_provider.create_txt_record = Mock(return_value=1)
values = [mock_entry]
iterable = mock_get_dns_challenges.return_value
iterator = iter(values)
iterable.__iter__.return_value = iterator
result = self.acme.start_dns_challenge(
mock_acme, "accountid", "domain", "host", mock_dns_provider, mock_order, {}
)
self.assertEqual(type(result), AuthorizationRecord)
@patch("acme.client.Client")
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
@patch("time.sleep")
def test_complete_dns_challenge_success(
self, mock_sleep, mock_wait_for_dns_change, mock_acme
):
mock_dns_provider = Mock()
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
mock_authz = Mock()
mock_sleep.return_value = False
mock_authz.dns_challenge.response = Mock()
mock_authz.dns_challenge.response.simple_verify = Mock(return_value=True)
mock_authz.authz = []
mock_authz.target_domain = "www.test.com"
mock_authz_record = Mock()
mock_authz_record.body.identifier.value = "test"
mock_authz.authz.append(mock_authz_record)
mock_authz.change_id = []
mock_authz.change_id.append("123")
mock_authz.dns_challenge = []
dns_challenge = Mock()
mock_authz.dns_challenge.append(dns_challenge)
self.acme.complete_dns_challenge(mock_acme, mock_authz)
@patch("acme.client.Client")
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
def test_complete_dns_challenge_fail(
self, mock_wait_for_dns_change, mock_acme
):
mock_dns_provider = Mock()
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
mock_dns_challenge = Mock()
response = Mock()
response.simple_verify = Mock(return_value=False)
mock_dns_challenge.response = Mock(return_value=response)
mock_authz = Mock()
mock_authz.dns_challenge = []
mock_authz.dns_challenge.append(mock_dns_challenge)
mock_authz.target_domain = "www.test.com"
mock_authz_record = Mock()
mock_authz_record.body.identifier.value = "test"
mock_authz.authz = []
mock_authz.authz.append(mock_authz_record)
mock_authz.change_id = []
mock_authz.change_id.append("123")
with self.assertRaises(ValueError):
self.acme.complete_dns_challenge(mock_acme, mock_authz)
@patch("acme.client.Client")
@patch("OpenSSL.crypto", return_value="mock_cert")
@patch("josepy.util.ComparableX509")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.get_dns_challenges")
def test_request_certificate(
self,
mock_get_dns_challenges,
mock_jose,
mock_crypto,
mock_acme,
):
mock_cert_response = Mock()
mock_cert_response.body = "123"
mock_cert_response_full = [mock_cert_response, True]
mock_acme.poll_and_request_issuance = Mock(return_value=mock_cert_response_full)
mock_authz = []
mock_authz_record = MagicMock()
mock_authz_record.authz = Mock()
mock_authz.append(mock_authz_record)
mock_acme.fetch_chain = Mock(return_value="mock_chain")
mock_crypto.dump_certificate = Mock(return_value=b"chain")
mock_order = Mock()
self.acme.request_certificate(mock_acme, [], mock_order)
def test_setup_acme_client_fail(self):
mock_authority = Mock()
mock_authority.options = []
with self.assertRaises(Exception):
self.acme.setup_acme_client(mock_authority)
@patch("lemur.plugins.lemur_acme.acme_handlers.jose.JWK.json_loads")
@patch("lemur.plugins.lemur_acme.acme_handlers.BackwardsCompatibleClientV2")
def test_setup_acme_client_success_load_account_from_authority(self, mock_acme, mock_key_json_load):
mock_authority = Mock()
mock_authority.id = 2
mock_authority.options = '[{"name": "mock_name", "value": "mock_value"}, ' \
'{"name": "store_account", "value": true},' \
'{"name": "acme_private_key", "value": "{\\"n\\": \\"PwIOkViO\\", \\"kty\\": \\"RSA\\"}"}, ' \
'{"name": "acme_regr", "value": "{\\"body\\": {}, \\"uri\\": \\"http://test.com\\"}"}]'
mock_client = Mock()
mock_acme.return_value = mock_client
mock_key_json_load.return_value = jose.JWKRSA(key=generate_private_key("RSA2048"))
result_client, result_registration = self.acme.setup_acme_client(mock_authority)
mock_acme.new_account_and_tos.assert_not_called()
assert result_client
assert not result_registration
@patch("lemur.plugins.lemur_acme.acme_handlers.jose.JWKRSA.fields_to_partial_json")
@patch("lemur.plugins.lemur_acme.acme_handlers.authorities_service")
@patch("lemur.plugins.lemur_acme.acme_handlers.BackwardsCompatibleClientV2")
def test_setup_acme_client_success_store_new_account(self, mock_acme, mock_authorities_service,
mock_key_generation):
mock_authority = Mock()
mock_authority.id = 2
mock_authority.options = '[{"name": "mock_name", "value": "mock_value"}, ' \
'{"name": "store_account", "value": true}]'
mock_client = Mock()
mock_registration = Mock()
mock_registration.uri = "http://test.com"
mock_client.register = mock_registration
mock_client.agree_to_tos = Mock(return_value=True)
mock_client.new_account_and_tos.return_value = mock_registration
mock_acme.return_value = mock_client
mock_key_generation.return_value = {"n": "PwIOkViO"}
mock_authorities_service.update_options = Mock(return_value=True)
self.acme.setup_acme_client(mock_authority)
mock_authorities_service.update_options.assert_called_with(2, options='[{"name": "mock_name", "value": "mock_value"}, '
'{"name": "store_account", "value": true}, '
'{"name": "acme_private_key", "value": "{\\"n\\": \\"PwIOkViO\\", \\"kty\\": \\"RSA\\"}"}, '
'{"name": "acme_regr", "value": "{\\"body\\": {}, \\"uri\\": \\"http://test.com\\"}"}]')
@patch("lemur.plugins.lemur_acme.acme_handlers.authorities_service")
@patch("lemur.plugins.lemur_acme.acme_handlers.BackwardsCompatibleClientV2")
def test_setup_acme_client_success(self, mock_acme, mock_authorities_service):
mock_authority = Mock()
mock_authority.options = '[{"name": "mock_name", "value": "mock_value"}, ' \
'{"name": "store_account", "value": false}]'
mock_client = Mock()
mock_registration = Mock()
mock_registration.uri = "http://test.com"
mock_client.register = mock_registration
mock_client.agree_to_tos = Mock(return_value=True)
mock_acme.return_value = mock_client
result_client, result_registration = self.acme.setup_acme_client(mock_authority)
mock_authorities_service.update_options.assert_not_called()
assert result_client
assert result_registration
def test_get_domains_single(self):
options = {"common_name": "test.netflix.net"}
result = self.acme.get_domains(options)
self.assertEqual(result, [options["common_name"]])
def test_get_domains_multiple(self):
options = {
"common_name": "test.netflix.net",
"extensions": {
"sub_alt_names": {"names": [DNSName("test2.netflix.net"), DNSName("test3.netflix.net")]}
},
}
result = self.acme.get_domains(options)
self.assertEqual(
result, [options["common_name"], "test2.netflix.net", "test3.netflix.net"]
)
def test_get_domains_san(self):
options = {
"common_name": "test.netflix.net",
"extensions": {
"sub_alt_names": {"names": [DNSName("test.netflix.net"), DNSName("test2.netflix.net")]}
},
}
result = self.acme.get_domains(options)
self.assertEqual(
result, [options["common_name"], "test2.netflix.net"]
)
def test_create_authority(self):
options = {
"plugin": {"plugin_options": [{"name": "certificate", "value": "123"}]}
}
acme_root, b, role = self.ACMEIssuerPlugin.create_authority(options)
self.assertEqual(acme_root, "123")
self.assertEqual(b, "")
self.assertEqual(role, [{"username": "", "password": "", "name": "acme"}])
@patch("lemur.plugins.lemur_acme.acme_handlers.dns_provider_service")
def test_get_dns_provider(self, mock_dns_provider_service):
provider = plugin.AcmeDnsHandler()
route53 = provider.get_dns_provider("route53")
assert route53
cloudflare = provider.get_dns_provider("cloudflare")
assert cloudflare
dyn = provider.get_dns_provider("dyn")
assert dyn
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
@patch("lemur.plugins.lemur_acme.acme_handlers.dns_provider_service")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.get_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.finalize_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.request_certificate")
@patch("lemur.plugins.lemur_acme.challenge_types.authorization_service")
def test_create_certificate(
self,
mock_authorization_service,
mock_request_certificate,
mock_finalize_authorizations,
mock_get_authorizations,
mock_dns_provider_service,
mock_acme,
):
provider = plugin.ACMEIssuerPlugin()
mock_authority = Mock()
mock_client = Mock()
mock_acme.return_value = (mock_client, "")
mock_dns_provider = Mock()
mock_dns_provider.credentials = '{"account_id": 1}'
mock_dns_provider.provider_type = "route53"
mock_dns_provider_service.get.return_value = mock_dns_provider
issuer_options = {
"authority": mock_authority,
"dns_provider": mock_dns_provider,
"common_name": "test.netflix.net",
}
csr = "123"
mock_request_certificate.return_value = ("pem_certificate", "chain")
result = provider.create_certificate(csr, issuer_options)
assert result
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.start_dns_challenge", return_value="test")
def test_get_authorizations(self, mock_start_dns_challenge):
mock_order = Mock()
mock_order.body.identifiers = []
mock_domain = Mock()
mock_order.body.identifiers.append(mock_domain)
mock_order_info = Mock()
mock_order_info.account_number = 1
mock_order_info.domains = ["test.fakedomain.net"]
result = self.acme.get_authorizations(
"acme_client", mock_order, mock_order_info
)
self.assertEqual(result, ["test"])
@patch(
"lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.complete_dns_challenge",
return_value="test",
)
def test_finalize_authorizations(self, mock_complete_dns_challenge):
mock_authz = []
mock_authz_record = MagicMock()
mock_authz_record.authz = Mock()
mock_authz_record.change_id = 1
mock_authz_record.dns_challenge.validation_domain_name = Mock()
mock_authz_record.dns_challenge.validation = Mock()
mock_authz.append(mock_authz_record)
mock_dns_provider = Mock()
mock_dns_provider.delete_txt_record = Mock()
mock_acme_client = Mock()
result = self.acme.finalize_authorizations(mock_acme_client, mock_authz)
self.assertEqual(result, mock_authz)
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
@patch("lemur.plugins.lemur_acme.acme_handlers.dns_provider_service")
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.get_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.finalize_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.request_certificate")
def test_get_ordered_certificate(
self,
mock_request_certificate,
mock_finalize_authorizations,
mock_get_authorizations,
mock_dns_provider_service_p,
mock_dns_provider_service,
mock_authorization_service,
mock_acme,
):
mock_client = Mock()
mock_acme.return_value = (mock_client, "")
mock_request_certificate.return_value = ("pem_certificate", "chain")
mock_cert = Mock()
mock_cert.external_id = 1
provider = plugin.ACMEIssuerPlugin()
provider.get_dns_provider = Mock()
result = provider.get_ordered_certificate(mock_cert)
self.assertEqual(
result, {"body": "pem_certificate", "chain": "chain", "external_id": "1"}
)
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
@patch("lemur.plugins.lemur_acme.acme_handlers.dns_provider_service")
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.get_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.finalize_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeDnsHandler.request_certificate")
def test_get_ordered_certificates(
self,
mock_request_certificate,
mock_finalize_authorizations,
mock_get_authorizations,
mock_dns_provider_service,
mock_dns_provider_service_p,
mock_authorization_service,
mock_acme,
):
mock_client = Mock()
mock_acme.return_value = (mock_client, "")
mock_request_certificate.return_value = ("pem_certificate", "chain")
mock_cert = Mock()
mock_cert.external_id = 1
mock_cert2 = Mock()
mock_cert2.external_id = 2
provider = plugin.ACMEIssuerPlugin()
provider.get_dns_provider = Mock()
result = provider.get_ordered_certificates([mock_cert, mock_cert2])
self.assertEqual(len(result), 2)
self.assertEqual(
result[0]["cert"],
{"body": "pem_certificate", "chain": "chain", "external_id": "1"},
)
self.assertEqual(
result[1]["cert"],
{"body": "pem_certificate", "chain": "chain", "external_id": "2"},
)
|
revision = "c87cb989af04"
down_revision = "9392b9f9a805"
from alembic import op
def upgrade():
op.create_index(op.f("ix_domains_name"), "domains", ["name"], unique=False)
def downgrade():
op.drop_index(op.f("ix_domains_name"), table_name="domains")
|
import asyncio
import logging
import os
import async_timeout
from tellduslive import Session, supports_local_api
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST
from homeassistant.util.json import load_json
from .const import (
APPLICATION_NAME,
CLOUD_NAME,
DOMAIN,
KEY_SCAN_INTERVAL,
KEY_SESSION,
NOT_SO_PRIVATE_KEY,
PUBLIC_KEY,
SCAN_INTERVAL,
TELLDUS_CONFIG_FILE,
)
KEY_TOKEN = "token"
KEY_TOKEN_SECRET = "token_secret"
_LOGGER = logging.getLogger(__name__)
@config_entries.HANDLERS.register("tellduslive")
class FlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Init config flow."""
self._hosts = [CLOUD_NAME]
self._host = None
self._session = None
self._scan_interval = SCAN_INTERVAL
def _get_auth_url(self):
self._session = Session(
public_key=PUBLIC_KEY,
private_key=NOT_SO_PRIVATE_KEY,
host=self._host,
application=APPLICATION_NAME,
)
return self._session.authorize_url
async def async_step_user(self, user_input=None):
"""Let user select host or cloud."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
if user_input is not None or len(self._hosts) == 1:
if user_input is not None and user_input[CONF_HOST] != CLOUD_NAME:
self._host = user_input[CONF_HOST]
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{vol.Required(CONF_HOST): vol.In(list(self._hosts))}
),
)
async def async_step_auth(self, user_input=None):
"""Handle the submitted configuration."""
errors = {}
if user_input is not None:
if await self.hass.async_add_executor_job(self._session.authorize):
host = self._host or CLOUD_NAME
if self._host:
session = {CONF_HOST: host, KEY_TOKEN: self._session.access_token}
else:
session = {
KEY_TOKEN: self._session.access_token,
KEY_TOKEN_SECRET: self._session.access_token_secret,
}
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_SCAN_INTERVAL: self._scan_interval.seconds,
KEY_SESSION: session,
},
)
errors["base"] = "invalid_auth"
try:
with async_timeout.timeout(10):
auth_url = await self.hass.async_add_executor_job(self._get_auth_url)
if not auth_url:
return self.async_abort(reason="authorize_url_fail")
except asyncio.TimeoutError:
return self.async_abort(reason="authorize_url_timeout")
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error generating auth url")
return self.async_abort(reason="authorize_url_fail")
_LOGGER.debug("Got authorization URL %s", auth_url)
return self.async_show_form(
step_id="auth",
errors=errors,
description_placeholders={
"app_name": APPLICATION_NAME,
"auth_url": auth_url,
},
)
async def async_step_discovery(self, discovery_info):
"""Run when a Tellstick is discovered."""
await self._async_handle_discovery_without_unique_id()
_LOGGER.info("Discovered tellstick device: %s", discovery_info)
if supports_local_api(discovery_info[1]):
_LOGGER.info("%s support local API", discovery_info[1])
self._hosts.append(discovery_info[0])
return await self.async_step_user()
async def async_step_import(self, user_input):
"""Import a config entry."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_setup")
self._scan_interval = user_input[KEY_SCAN_INTERVAL]
if user_input[CONF_HOST] != DOMAIN:
self._hosts.append(user_input[CONF_HOST])
if not await self.hass.async_add_executor_job(
os.path.isfile, self.hass.config.path(TELLDUS_CONFIG_FILE)
):
return await self.async_step_user()
conf = await self.hass.async_add_executor_job(
load_json, self.hass.config.path(TELLDUS_CONFIG_FILE)
)
host = next(iter(conf))
if user_input[CONF_HOST] != host:
return await self.async_step_user()
host = CLOUD_NAME if host == "tellduslive" else host
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_SCAN_INTERVAL: self._scan_interval.seconds,
KEY_SESSION: next(iter(conf.values())),
},
)
|
import logging
from nest.nest import APIError
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_AUTO,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_ECO,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_SCAN_INTERVAL,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DATA_NEST, DOMAIN as NEST_DOMAIN
from .const import SIGNAL_NEST_UPDATE
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_SCAN_INTERVAL): vol.All(vol.Coerce(int), vol.Range(min=1))}
)
NEST_MODE_HEAT_COOL = "heat-cool"
NEST_MODE_ECO = "eco"
NEST_MODE_HEAT = "heat"
NEST_MODE_COOL = "cool"
NEST_MODE_OFF = "off"
MODE_HASS_TO_NEST = {
HVAC_MODE_AUTO: NEST_MODE_HEAT_COOL,
HVAC_MODE_HEAT: NEST_MODE_HEAT,
HVAC_MODE_COOL: NEST_MODE_COOL,
HVAC_MODE_OFF: NEST_MODE_OFF,
}
MODE_NEST_TO_HASS = {v: k for k, v in MODE_HASS_TO_NEST.items()}
ACTION_NEST_TO_HASS = {
"off": CURRENT_HVAC_IDLE,
"heating": CURRENT_HVAC_HEAT,
"cooling": CURRENT_HVAC_COOL,
}
PRESET_AWAY_AND_ECO = "Away and Eco"
PRESET_MODES = [PRESET_NONE, PRESET_AWAY, PRESET_ECO, PRESET_AWAY_AND_ECO]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nest thermostat.
No longer in use.
"""
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Nest climate device based on a config entry."""
temp_unit = hass.config.units.temperature_unit
thermostats = await hass.async_add_executor_job(hass.data[DATA_NEST].thermostats)
all_devices = [
NestThermostat(structure, device, temp_unit)
for structure, device in thermostats
]
async_add_entities(all_devices, True)
class NestThermostat(ClimateEntity):
"""Representation of a Nest thermostat."""
def __init__(self, structure, device, temp_unit):
"""Initialize the thermostat."""
self._unit = temp_unit
self.structure = structure
self.device = device
self._fan_modes = [FAN_ON, FAN_AUTO]
# Set the default supported features
self._support_flags = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
# Not all nest devices support cooling and heating remove unused
self._operation_list = []
if self.device.can_heat and self.device.can_cool:
self._operation_list.append(HVAC_MODE_AUTO)
self._support_flags = self._support_flags | SUPPORT_TARGET_TEMPERATURE_RANGE
# Add supported nest thermostat features
if self.device.can_heat:
self._operation_list.append(HVAC_MODE_HEAT)
if self.device.can_cool:
self._operation_list.append(HVAC_MODE_COOL)
self._operation_list.append(HVAC_MODE_OFF)
# feature of device
self._has_fan = self.device.has_fan
if self._has_fan:
self._support_flags = self._support_flags | SUPPORT_FAN_MODE
# data attributes
self._away = None
self._location = None
self._name = None
self._humidity = None
self._target_temperature = None
self._temperature = None
self._temperature_scale = None
self._mode = None
self._action = None
self._fan = None
self._eco_temperature = None
self._is_locked = None
self._locked_temperature = None
self._min_temperature = None
self._max_temperature = None
@property
def should_poll(self):
"""Do not need poll thanks using Nest streaming API."""
return False
async def async_added_to_hass(self):
"""Register update signal handler."""
async def async_update_state():
"""Update device state."""
await self.async_update_ha_state(True)
self.async_on_remove(
async_dispatcher_connect(self.hass, SIGNAL_NEST_UPDATE, async_update_state)
)
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def unique_id(self):
"""Return unique ID for this device."""
return self.device.serial
@property
def device_info(self):
"""Return information about the device."""
return {
"identifiers": {(NEST_DOMAIN, self.device.device_id)},
"name": self.device.name_long,
"manufacturer": "Nest Labs",
"model": "Thermostat",
"sw_version": self.device.software_version,
}
@property
def name(self):
"""Return the name of the nest, if any."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._temperature_scale
@property
def current_temperature(self):
"""Return the current temperature."""
return self._temperature
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
if self._mode == NEST_MODE_ECO:
if self.device.previous_mode in MODE_NEST_TO_HASS:
return MODE_NEST_TO_HASS[self.device.previous_mode]
# previous_mode not supported so return the first compatible mode
return self._operation_list[0]
return MODE_NEST_TO_HASS[self._mode]
@property
def hvac_action(self):
"""Return the current hvac action."""
return ACTION_NEST_TO_HASS[self._action]
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self._mode not in (NEST_MODE_HEAT_COOL, NEST_MODE_ECO):
return self._target_temperature
return None
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self._mode == NEST_MODE_ECO:
return self._eco_temperature[0]
if self._mode == NEST_MODE_HEAT_COOL:
return self._target_temperature[0]
return None
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
if self._mode == NEST_MODE_ECO:
return self._eco_temperature[1]
if self._mode == NEST_MODE_HEAT_COOL:
return self._target_temperature[1]
return None
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temp = None
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if self._mode == NEST_MODE_HEAT_COOL:
if target_temp_low is not None and target_temp_high is not None:
temp = (target_temp_low, target_temp_high)
_LOGGER.debug("Nest set_temperature-output-value=%s", temp)
else:
temp = kwargs.get(ATTR_TEMPERATURE)
_LOGGER.debug("Nest set_temperature-output-value=%s", temp)
try:
if temp is not None:
self.device.target = temp
except APIError as api_error:
_LOGGER.error("An error occurred while setting temperature: %s", api_error)
# restore target temperature
self.schedule_update_ha_state(True)
def set_hvac_mode(self, hvac_mode):
"""Set operation mode."""
self.device.mode = MODE_HASS_TO_NEST[hvac_mode]
@property
def hvac_modes(self):
"""List of available operation modes."""
return self._operation_list
@property
def preset_mode(self):
"""Return current preset mode."""
if self._away and self._mode == NEST_MODE_ECO:
return PRESET_AWAY_AND_ECO
if self._away:
return PRESET_AWAY
if self._mode == NEST_MODE_ECO:
return PRESET_ECO
return PRESET_NONE
@property
def preset_modes(self):
"""Return preset modes."""
return PRESET_MODES
def set_preset_mode(self, preset_mode):
"""Set preset mode."""
if preset_mode == self.preset_mode:
return
need_away = preset_mode in (PRESET_AWAY, PRESET_AWAY_AND_ECO)
need_eco = preset_mode in (PRESET_ECO, PRESET_AWAY_AND_ECO)
is_away = self._away
is_eco = self._mode == NEST_MODE_ECO
if is_away != need_away:
self.structure.away = need_away
if is_eco != need_eco:
if need_eco:
self.device.mode = NEST_MODE_ECO
else:
self.device.mode = self.device.previous_mode
@property
def fan_mode(self):
"""Return whether the fan is on."""
if self._has_fan:
# Return whether the fan is on
return FAN_ON if self._fan else FAN_AUTO
# No Fan available so disable slider
return None
@property
def fan_modes(self):
"""List of available fan modes."""
if self._has_fan:
return self._fan_modes
return None
def set_fan_mode(self, fan_mode):
"""Turn fan on/off."""
if self._has_fan:
self.device.fan = fan_mode.lower()
@property
def min_temp(self):
"""Identify min_temp in Nest API or defaults if not available."""
return self._min_temperature
@property
def max_temp(self):
"""Identify max_temp in Nest API or defaults if not available."""
return self._max_temperature
def update(self):
"""Cache value from Python-nest."""
self._location = self.device.where
self._name = self.device.name
self._humidity = self.device.humidity
self._temperature = self.device.temperature
self._mode = self.device.mode
self._action = self.device.hvac_state
self._target_temperature = self.device.target
self._fan = self.device.fan
self._away = self.structure.away == "away"
self._eco_temperature = self.device.eco_temperature
self._locked_temperature = self.device.locked_temperature
self._min_temperature = self.device.min_temperature
self._max_temperature = self.device.max_temperature
self._is_locked = self.device.is_locked
if self.device.temperature_scale == "C":
self._temperature_scale = TEMP_CELSIUS
else:
self._temperature_scale = TEMP_FAHRENHEIT
|
from datetime import timedelta
import stookalert
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_SAFETY,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME
from homeassistant.helpers import config_validation as cv
SCAN_INTERVAL = timedelta(minutes=60)
CONF_PROVINCE = "province"
DEFAULT_DEVICE_CLASS = DEVICE_CLASS_SAFETY
DEFAULT_NAME = "Stookalert"
ATTRIBUTION = "Data provided by rivm.nl"
PROVINCES = [
"Drenthe",
"Flevoland",
"Friesland",
"Gelderland",
"Groningen",
"Limburg",
"Noord-Brabant",
"Noord-Holland",
"Overijssel",
"Utrecht",
"Zeeland",
"Zuid-Holland",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PROVINCE): vol.In(PROVINCES),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Stookalert binary sensor platform."""
province = config[CONF_PROVINCE]
name = config[CONF_NAME]
api_handler = stookalert.stookalert(province)
add_entities([StookalertBinarySensor(name, api_handler)], update_before_add=True)
class StookalertBinarySensor(BinarySensorEntity):
"""An implementation of RIVM Stookalert."""
def __init__(self, name, api_handler):
"""Initialize a Stookalert device."""
self._name = name
self._api_handler = api_handler
@property
def device_state_attributes(self):
"""Return the attribute(s) of the sensor."""
state_attr = {ATTR_ATTRIBUTION: ATTRIBUTION}
if self._api_handler.last_updated is not None:
state_attr["last_updated"] = self._api_handler.last_updated.isoformat()
return state_attr
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return True if the Alert is active."""
return self._api_handler.state == 1
@property
def device_class(self):
"""Return the device class of this binary sensor."""
return DEFAULT_DEVICE_CLASS
def update(self):
"""Update the data from the Stookalert handler."""
self._api_handler.get_alerts()
|
from pathlib import Path
from test.common import test_root
import pytest
from box import Box, BoxError, BoxList, box_from_file
class TestFromFile:
def test_from_all(self):
assert isinstance(box_from_file(Path(test_root, "data", "json_file.json")), Box)
assert isinstance(box_from_file(Path(test_root, "data", "toml_file.tml")), Box)
assert isinstance(box_from_file(Path(test_root, "data", "yaml_file.yaml")), Box)
assert isinstance(box_from_file(Path(test_root, "data", "json_file.json"), file_type="json"), Box)
assert isinstance(box_from_file(Path(test_root, "data", "toml_file.tml"), file_type="toml"), Box)
assert isinstance(box_from_file(Path(test_root, "data", "yaml_file.yaml"), file_type="yaml"), Box)
assert isinstance(box_from_file(Path(test_root, "data", "json_list.json")), BoxList)
assert isinstance(box_from_file(Path(test_root, "data", "yaml_list.yaml")), BoxList)
assert isinstance(box_from_file(Path(test_root, "data", "msgpack_file.msgpack")), Box)
assert isinstance(box_from_file(Path(test_root, "data", "msgpack_list.msgpack")), BoxList)
assert isinstance(box_from_file(Path(test_root, "data", "csv_file.csv")), BoxList)
def test_bad_file(self):
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"), file_type="json")
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"), file_type="toml")
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"), file_type="yaml")
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"), file_type="msgpack")
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"), file_type="unknown")
with pytest.raises(BoxError):
box_from_file(Path(test_root, "data", "bad_file.txt"))
with pytest.raises(BoxError):
box_from_file("does not exist")
|
import diamond.collector
from diamond.collector import str_to_bool
import re
try:
import pymongo
except ImportError:
pymongo = None
try:
from pymongo import ReadPreference
except ImportError:
ReadPreference = None
class TokuMXCollector(diamond.collector.Collector):
def __init__(self, *args, **kwargs):
self.__totals = {}
super(TokuMXCollector, self).__init__(*args, **kwargs)
def get_default_config_help(self):
config_help = super(TokuMXCollector, self).get_default_config_help()
config_help.update({
'hosts': 'Array of hostname(:port) elements to get metrics from'
'Set an alias by prefixing host:port with alias@',
'host': 'A single hostname(:port) to get metrics from'
' (can be used instead of hosts and overrides it)',
'user': 'Username for authenticated login (optional)',
'passwd': 'Password for authenticated login (optional)',
'databases': 'A regex of which databases to gather metrics for.'
' Defaults to all databases.',
'ignore_collections': 'A regex of which collections to ignore.'
' MapReduce temporary collections (tmp.mr.*)'
' are ignored by default.',
'network_timeout': 'Timeout for mongodb connection (in seconds).'
' There is no timeout by default.',
'simple': 'Only collect the same metrics as mongostat.',
'translate_collections': 'Translate dot (.) to underscores (_)'
' in collection names.'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(TokuMXCollector, self).get_default_config()
config.update({
'path': 'mongo',
'hosts': ['localhost'],
'user': None,
'passwd': None,
'databases': '.*',
'ignore_collections': '^tmp\.mr\.',
'network_timeout': None,
'simple': 'False',
'translate_collections': 'False'
})
return config
def collect(self):
"""Collect number values from db.serverStatus() and db.engineStatus()"""
if pymongo is None:
self.log.error('Unable to import pymongo')
return
# we need this for backwards compatibility
if 'host' in self.config:
self.config['hosts'] = [self.config['host']]
# convert network_timeout to integer
if self.config['network_timeout']:
self.config['network_timeout'] = int(
self.config['network_timeout'])
# use auth if given
if 'user' in self.config:
user = self.config['user']
else:
user = None
if 'passwd' in self.config:
passwd = self.config['passwd']
else:
passwd = None
for host in self.config['hosts']:
if len(self.config['hosts']) == 1:
# one host only, no need to have a prefix
base_prefix = []
else:
matches = re.search('((.+)\@)?(.+)?', host)
alias = matches.group(2)
host = matches.group(3)
if alias is None:
base_prefix = [re.sub('[:\.]', '_', host)]
else:
base_prefix = [alias]
try:
if ReadPreference is None:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
slave_okay=True
)
else:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
read_preference=ReadPreference.SECONDARY,
)
except Exception as e:
self.log.error('Couldnt connect to mongodb: %s', e)
continue
# try auth
if user:
try:
conn.admin.authenticate(user, passwd)
except Exception as e:
self.log.error(
'User auth given, but could not autheticate' +
' with host: %s, err: %s' % (host, e))
return{}
serverStatus = conn.db.command('serverStatus')
engineStatus = conn.db.command('engineStatus')
data = dict(serverStatus.items() + engineStatus.items())
self._publish_transformed(data, base_prefix)
if str_to_bool(self.config['simple']):
data = self._extract_simple_data(data)
self._publish_dict_with_prefix(data, base_prefix)
db_name_filter = re.compile(self.config['databases'])
ignored_collections = re.compile(self.config['ignore_collections'])
for db_name in conn.database_names():
if not db_name_filter.search(db_name):
continue
db_stats = conn[db_name].command('dbStats')
db_prefix = base_prefix + ['databases', db_name]
self._publish_dict_with_prefix(db_stats, db_prefix)
for collection_name in conn[db_name].collection_names():
if ignored_collections.search(collection_name):
continue
collection_stats = conn[db_name].command('collstats',
collection_name)
if str_to_bool(self.config['translate_collections']):
collection_name = collection_name.replace('.', '_')
collection_prefix = db_prefix + [collection_name]
self._publish_dict_with_prefix(collection_stats,
collection_prefix)
def _publish_transformed(self, data, base_prefix):
""" Publish values of type: counter or percent """
self._publish_dict_with_prefix(data.get('opcounters', {}),
base_prefix + ['opcounters_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('opcountersRepl', {}),
base_prefix +
['opcountersRepl_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('network', {}),
base_prefix + ['network_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['extra_info_per_sec'],
'page_faults',
data.get('extra_info', {}),
self.publish_counter)
def get_dotted_value(data, key_name):
key_name = key_name.split('.')
for i in key_name:
data = data.get(i, {})
if not data:
return 0
return data
def compute_interval(data, total_name):
current_total = get_dotted_value(data, total_name)
total_key = '.'.join(base_prefix + [total_name])
last_total = self.__totals.get(total_key, current_total)
interval = current_total - last_total
self.__totals[total_key] = current_total
return interval
def publish_percent(value_name, total_name, data):
value = float(get_dotted_value(data, value_name) * 100)
interval = compute_interval(data, total_name)
key = '.'.join(base_prefix + ['percent', value_name])
self.publish_counter(key, value, time_delta=bool(interval),
interval=interval)
publish_percent('globalLock.lockTime', 'globalLock.totalTime', data)
locks = data.get('locks')
if locks:
if '.' in locks:
locks['_global_'] = locks['.']
del (locks['.'])
key_prefix = '.'.join(base_prefix + ['percent'])
db_name_filter = re.compile(self.config['databases'])
interval = compute_interval(data, 'uptimeMillis')
for db_name in locks:
if not db_name_filter.search(db_name):
continue
r = get_dotted_value(
locks,
'%s.timeLockedMicros.r' % db_name)
R = get_dotted_value(
locks,
'.%s.timeLockedMicros.R' % db_name)
value = float(r + R) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.read' % db_name,
value, time_delta=bool(interval),
interval=interval)
w = get_dotted_value(
locks,
'%s.timeLockedMicros.w' % db_name)
W = get_dotted_value(
locks,
'%s.timeLockedMicros.W' % db_name)
value = float(w + W) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.write' % db_name,
value, time_delta=bool(interval), interval=interval)
def _publish_dict_with_prefix(self, dict, prefix, publishfn=None):
for key in dict:
self._publish_metrics(prefix, key, dict, publishfn)
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if key not in data:
return
value = data[key]
keys = prev_keys + [key]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(keys, new_key, value)
elif isinstance(value, int) or isinstance(value, float):
publishfn('.'.join(keys), value)
elif isinstance(value, long):
publishfn('.'.join(keys), float(value))
def _extract_simple_data(self, data):
return {
'connections': data.get('connections'),
'globalLock': data.get('globalLock'),
'indexCounters': data.get('indexCounters')
}
|
import datetime
import logging
from homeassistant.components.binary_sensor import DOMAIN, BinarySensorEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import track_point_in_time
import homeassistant.util.dt as dt_util
from . import ZWaveDeviceEntity, workaround
from .const import COMMAND_CLASS_SENSOR_BINARY
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave binary sensors from Config Entry."""
@callback
def async_add_binary_sensor(binary_sensor):
"""Add Z-Wave binary sensor."""
async_add_entities([binary_sensor])
async_dispatcher_connect(hass, "zwave_new_binary_sensor", async_add_binary_sensor)
def get_device(values, **kwargs):
"""Create Z-Wave entity device."""
device_mapping = workaround.get_device_mapping(values.primary)
if device_mapping == workaround.WORKAROUND_NO_OFF_EVENT:
return ZWaveTriggerSensor(values, "motion")
if workaround.get_device_component_mapping(values.primary) == DOMAIN:
return ZWaveBinarySensor(values, None)
if values.primary.command_class == COMMAND_CLASS_SENSOR_BINARY:
return ZWaveBinarySensor(values, None)
return None
class ZWaveBinarySensor(BinarySensorEntity, ZWaveDeviceEntity):
"""Representation of a binary sensor within Z-Wave."""
def __init__(self, values, device_class):
"""Initialize the sensor."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._sensor_type = device_class
self._state = self.values.primary.data
def update_properties(self):
"""Handle data changes for node values."""
self._state = self.values.primary.data
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._sensor_type
class ZWaveTriggerSensor(ZWaveBinarySensor):
"""Representation of a stateless sensor within Z-Wave."""
def __init__(self, values, device_class):
"""Initialize the sensor."""
super().__init__(values, device_class)
# Set default off delay to 60 sec
self.re_arm_sec = 60
self.invalidate_after = None
def update_properties(self):
"""Handle value changes for this entity's node."""
self._state = self.values.primary.data
_LOGGER.debug("off_delay=%s", self.values.off_delay)
# Set re_arm_sec if off_delay is provided from the sensor
if self.values.off_delay:
_LOGGER.debug("off_delay.data=%s", self.values.off_delay.data)
self.re_arm_sec = self.values.off_delay.data * 8
# only allow this value to be true for re_arm secs
if not self.hass:
return
self.invalidate_after = dt_util.utcnow() + datetime.timedelta(
seconds=self.re_arm_sec
)
track_point_in_time(
self.hass, self.async_update_ha_state, self.invalidate_after
)
@property
def is_on(self):
"""Return true if movement has happened within the rearm time."""
return self._state and (
self.invalidate_after is None or self.invalidate_after > dt_util.utcnow()
)
|
from homeassistant.auth.permissions import (
POLICY_SCHEMA,
PolicyPermissions,
system_policies,
)
def test_admin_policy():
"""Test admin policy works."""
# Make sure it's valid
POLICY_SCHEMA(system_policies.ADMIN_POLICY)
perms = PolicyPermissions(system_policies.ADMIN_POLICY, None)
assert perms.check_entity("light.kitchen", "read")
assert perms.check_entity("light.kitchen", "control")
assert perms.check_entity("light.kitchen", "edit")
def test_user_policy():
"""Test user policy works."""
# Make sure it's valid
POLICY_SCHEMA(system_policies.USER_POLICY)
perms = PolicyPermissions(system_policies.USER_POLICY, None)
assert perms.check_entity("light.kitchen", "read")
assert perms.check_entity("light.kitchen", "control")
assert perms.check_entity("light.kitchen", "edit")
def test_read_only_policy():
"""Test read only policy works."""
# Make sure it's valid
POLICY_SCHEMA(system_policies.READ_ONLY_POLICY)
perms = PolicyPermissions(system_policies.READ_ONLY_POLICY, None)
assert perms.check_entity("light.kitchen", "read")
assert not perms.check_entity("light.kitchen", "control")
assert not perms.check_entity("light.kitchen", "edit")
|
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components import binary_sensor, mqtt
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
CONF_DEVICE,
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_NAME,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.helpers.event as evt
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util import dt as dt_util
from . import (
ATTR_DISCOVERY_HASH,
CONF_QOS,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from .debug_info import log_messages
from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "MQTT Binary sensor"
CONF_OFF_DELAY = "off_delay"
DEFAULT_PAYLOAD_OFF = "OFF"
DEFAULT_PAYLOAD_ON = "ON"
DEFAULT_FORCE_UPDATE = False
CONF_EXPIRE_AFTER = "expire_after"
PLATFORM_SCHEMA = (
mqtt.MQTT_RO_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_EXPIRE_AFTER): cv.positive_int,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OFF_DELAY): cv.positive_int,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT binary sensor through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, config, async_add_entities)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT binary sensor dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add a MQTT binary sensor."""
discovery_data = discovery_payload.discovery_data
try:
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
hass, config, async_add_entities, config_entry, discovery_data
)
except Exception:
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(binary_sensor.DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
hass, config, async_add_entities, config_entry=None, discovery_data=None
):
"""Set up the MQTT binary sensor."""
async_add_entities([MqttBinarySensor(hass, config, config_entry, discovery_data)])
class MqttBinarySensor(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
BinarySensorEntity,
):
"""Representation a binary sensor that is updated by MQTT."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT binary sensor."""
self.hass = hass
self._unique_id = config.get(CONF_UNIQUE_ID)
self._state = None
self._sub_state = None
self._expiration_trigger = None
self._delay_listener = None
expire_after = config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
self._expired = True
else:
self._expired = None
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe mqtt events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
self._config = config
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
def off_delay_listener(now):
"""Switch device off after a delay."""
self._delay_listener = None
self._state = False
self.async_write_ha_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_message_received(msg):
"""Handle a new received MQTT state message."""
payload = msg.payload
# auto-expire enabled?
expire_after = self._config.get(CONF_EXPIRE_AFTER)
if expire_after is not None and expire_after > 0:
# When expire_after is set, and we receive a message, assume device is
# not expired since it has to be to receive the message
self._expired = False
# Reset old trigger
if self._expiration_trigger:
self._expiration_trigger()
self._expiration_trigger = None
# Set new trigger
expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after)
self._expiration_trigger = async_track_point_in_utc_time(
self.hass, self._value_is_expired, expiration_at
)
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(
payload, variables={"entity_id": self.entity_id}
)
if not payload.strip(): # No output from template, ignore
_LOGGER.debug(
"Empty template output for entity: %s with state topic: %s. Payload: '%s', with value template '%s'",
self._config[CONF_NAME],
self._config[CONF_STATE_TOPIC],
msg.payload,
value_template,
)
return
if payload == self._config[CONF_PAYLOAD_ON]:
self._state = True
elif payload == self._config[CONF_PAYLOAD_OFF]:
self._state = False
else: # Payload is not for this entity
template_info = ""
if value_template is not None:
template_info = f", template output: '{payload}', with value template '{str(value_template)}'"
_LOGGER.info(
"No matching payload found for entity: %s with state topic: %s. Payload: '%s'%s",
self._config[CONF_NAME],
self._config[CONF_STATE_TOPIC],
msg.payload,
template_info,
)
return
if self._delay_listener is not None:
self._delay_listener()
self._delay_listener = None
off_delay = self._config.get(CONF_OFF_DELAY)
if self._state and off_delay is not None:
self._delay_listener = evt.async_call_later(
self.hass, off_delay, off_delay_listener
)
self.async_write_ha_state()
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config[CONF_STATE_TOPIC],
"msg_callback": state_message_received,
"qos": self._config[CONF_QOS],
}
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@callback
def _value_is_expired(self, *_):
"""Triggered when value is expired."""
self._expiration_trigger = None
self._expired = True
self.async_write_ha_state()
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the binary sensor."""
return self._config[CONF_NAME]
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this sensor."""
return self._config.get(CONF_DEVICE_CLASS)
@property
def force_update(self):
"""Force update."""
return self._config[CONF_FORCE_UPDATE]
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def available(self) -> bool:
"""Return true if the device is available and value has not expired."""
expire_after = self._config.get(CONF_EXPIRE_AFTER)
# pylint: disable=no-member
return MqttAvailability.available.fget(self) and (
expire_after is None or not self._expired
)
|
from __future__ import division
import sys
import time
class ProgressHook(object):
"""A hook class reporting the progress of iteration.
This is a hook class designed for
:func:`~chainercv.utils.apply_prediction_to_iterator`.
Args:
n_total (int): The number of images. This argument is optional.
"""
def __init__(self, n_total=None):
self.n_total = n_total
self.start = time.time()
self.n_processed = 0
def __call__(self, in_values, out_values, rest_values):
self.n_processed += len(in_values[0])
fps = self.n_processed / (time.time() - self.start)
if self.n_total is not None and fps > 0:
eta = int((self.n_total - self.n_processed) / fps)
sys.stdout.write(
'\r{:d} of {:d} samples, {:.2f} samples/sec,'
' ETA {:4d}:{:02d}:{:02d}'.format(
self.n_processed, self.n_total, fps,
eta // 60 // 60, (eta // 60) % 60, eta % 60))
else:
sys.stdout.write(
'\r{:d} samples, {:.2f} samples/sec'.format(
self.n_processed, fps))
sys.stdout.flush()
|
from functools import partial
from ...utils import verbose, get_config
from ..utils import (has_dataset, _data_path, _get_version, _version_doc,
_data_path_doc_accept)
has_brainstorm_data = partial(has_dataset, name='brainstorm.bst_raw')
_description = u"""
URL: http://neuroimage.usc.edu/brainstorm/DatasetMedianNerveCtf
- One subject, one acquisition run of 6 minutes
- Subject stimulated using Digitimer Constant Current Stimulator
(model DS7A)
- The run contains 200 electric stimulations randomly distributed between
left and right:
- 102 stimulations of the left hand
- 98 stimulations of the right hand
- Inter-stimulus interval: jittered between [1500, 2000]ms
- Stimuli generated using PsychToolBox on Windows PC (TTL pulse generated
with the parallel port connected to the Digitimer via the rear panel BNC)
"""
@verbose
def data_path(path=None, force_update=False, update_path=True, download=True,
*, accept=False, verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='brainstorm',
download=download, archive_name='bst_raw.tar.gz',
accept=accept)
_data_path_doc = _data_path_doc_accept.format(
name='brainstorm', conf='MNE_DATASETS_BRAINSTORM_DATA_PATH')
_data_path_doc = _data_path_doc.replace('brainstorm dataset',
'brainstorm (bst_raw) dataset')
data_path.__doc__ = _data_path_doc
def get_version(): # noqa: D103
return _get_version('brainstorm.bst_raw')
get_version.__doc__ = _version_doc.format(name='brainstorm')
def description(): # noqa: D103
"""Get description of brainstorm (bst_raw) dataset."""
for desc in _description.splitlines():
print(desc)
def _skip_bstraw_data():
skip_testing = (get_config('MNE_SKIP_TESTING_DATASET_TESTS', 'false') ==
'true')
skip = skip_testing or not has_brainstorm_data()
return skip
def requires_bstraw_data(func):
"""Skip testing data test."""
import pytest
return pytest.mark.skipif(_skip_bstraw_data(),
reason='Requires brainstorm dataset')(func)
|
from typing import Any, List, Optional, Union
from pyisy.constants import (
ISY_VALUE_UNKNOWN,
PROTO_GROUP,
PROTO_INSTEON,
PROTO_PROGRAM,
PROTO_ZWAVE,
TAG_FOLDER,
)
from pyisy.nodes import Group, Node, Nodes
from pyisy.programs import Programs
from pyisy.variables import Variables
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate.const import DOMAIN as CLIMATE
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
_LOGGER,
DEFAULT_PROGRAM_STRING,
DOMAIN,
FILTER_INSTEON_TYPE,
FILTER_NODE_DEF_ID,
FILTER_STATES,
FILTER_UOM,
FILTER_ZWAVE_CAT,
ISY994_NODES,
ISY994_PROGRAMS,
ISY994_VARIABLES,
ISY_GROUP_PLATFORM,
KEY_ACTIONS,
KEY_STATUS,
NODE_FILTERS,
SUBNODE_CLIMATE_COOL,
SUBNODE_CLIMATE_HEAT,
SUBNODE_EZIO2X4_SENSORS,
SUBNODE_FANLINC_LIGHT,
SUBNODE_IOLINC_RELAY,
SUPPORTED_PLATFORMS,
SUPPORTED_PROGRAM_PLATFORMS,
TYPE_CATEGORY_SENSOR_ACTUATORS,
TYPE_EZIO2X4,
UOM_DOUBLE_TEMP,
UOM_ISYV4_DEGREES,
)
BINARY_SENSOR_UOMS = ["2", "78"]
BINARY_SENSOR_ISY_STATES = ["on", "off"]
def _check_for_node_def(
hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None
) -> bool:
"""Check if the node matches the node_def_id for any platforms.
This is only present on the 5.0 ISY firmware, and is the most reliable
way to determine a device's type.
"""
if not hasattr(node, "node_def_id") or node.node_def_id is None:
# Node doesn't have a node_def (pre 5.0 firmware most likely)
return False
node_def_id = node.node_def_id
platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_def_id in NODE_FILTERS[platform][FILTER_NODE_DEF_ID]:
hass_isy_data[ISY994_NODES][platform].append(node)
return True
return False
def _check_for_insteon_type(
hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None
) -> bool:
"""Check if the node matches the Insteon type for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Insteon device. "Node Server" (v5+) and Z-Wave and others will
not have a type.
"""
if not hasattr(node, "protocol") or node.protocol != PROTO_INSTEON:
return False
if not hasattr(node, "type") or node.type is None:
# Node doesn't have a type (non-Insteon device most likely)
return False
device_type = node.type
platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if any(
[
device_type.startswith(t)
for t in set(NODE_FILTERS[platform][FILTER_INSTEON_TYPE])
]
):
# Hacky special-cases for certain devices with different platforms
# included as subnodes. Note that special-cases are not necessary
# on ISY 5.x firmware as it uses the superior NodeDefs method
subnode_id = int(node.address.split(" ")[-1], 16)
# FanLinc, which has a light module as one of its nodes.
if platform == FAN and subnode_id == SUBNODE_FANLINC_LIGHT:
hass_isy_data[ISY994_NODES][LIGHT].append(node)
return True
# Thermostats, which has a "Heat" and "Cool" sub-node on address 2 and 3
if platform == CLIMATE and subnode_id in [
SUBNODE_CLIMATE_COOL,
SUBNODE_CLIMATE_HEAT,
]:
hass_isy_data[ISY994_NODES][BINARY_SENSOR].append(node)
return True
# IOLincs which have a sensor and relay on 2 different nodes
if (
platform == BINARY_SENSOR
and device_type.startswith(TYPE_CATEGORY_SENSOR_ACTUATORS)
and subnode_id == SUBNODE_IOLINC_RELAY
):
hass_isy_data[ISY994_NODES][SWITCH].append(node)
return True
# Smartenit EZIO2X4
if (
platform == SWITCH
and device_type.startswith(TYPE_EZIO2X4)
and subnode_id in SUBNODE_EZIO2X4_SENSORS
):
hass_isy_data[ISY994_NODES][BINARY_SENSOR].append(node)
return True
hass_isy_data[ISY994_NODES][platform].append(node)
return True
return False
def _check_for_zwave_cat(
hass_isy_data: dict, node: Union[Group, Node], single_platform: str = None
) -> bool:
"""Check if the node matches the ISY Z-Wave Category for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Z-Wave Devices with the devtype.cat property.
"""
if not hasattr(node, "protocol") or node.protocol != PROTO_ZWAVE:
return False
if not hasattr(node, "zwave_props") or node.zwave_props is None:
# Node doesn't have a device type category (non-Z-Wave device)
return False
device_type = node.zwave_props.category
platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if any(
[
device_type.startswith(t)
for t in set(NODE_FILTERS[platform][FILTER_ZWAVE_CAT])
]
):
hass_isy_data[ISY994_NODES][platform].append(node)
return True
return False
def _check_for_uom_id(
hass_isy_data: dict,
node: Union[Group, Node],
single_platform: str = None,
uom_list: list = None,
) -> bool:
"""Check if a node's uom matches any of the platforms uom filter.
This is used for versions of the ISY firmware that report uoms as a single
ID. We can often infer what type of device it is by that ID.
"""
if not hasattr(node, "uom") or node.uom in [None, ""]:
# Node doesn't have a uom (Scenes for example)
return False
# Backwards compatibility for ISYv4 Firmware:
node_uom = node.uom
if isinstance(node.uom, list):
node_uom = node.uom[0]
if uom_list:
if node_uom in uom_list:
hass_isy_data[ISY994_NODES][single_platform].append(node)
return True
return False
platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_uom in NODE_FILTERS[platform][FILTER_UOM]:
hass_isy_data[ISY994_NODES][platform].append(node)
return True
return False
def _check_for_states_in_uom(
hass_isy_data: dict,
node: Union[Group, Node],
single_platform: str = None,
states_list: list = None,
) -> bool:
"""Check if a list of uoms matches two possible filters.
This is for versions of the ISY firmware that report uoms as a list of all
possible "human readable" states. This filter passes if all of the possible
states fit inside the given filter.
"""
if not hasattr(node, "uom") or node.uom in [None, ""]:
# Node doesn't have a uom (Scenes for example)
return False
# This only works for ISYv4 Firmware where uom is a list of states:
if not isinstance(node.uom, list):
return False
node_uom = set(map(str.lower, node.uom))
if states_list:
if node_uom == set(states_list):
hass_isy_data[ISY994_NODES][single_platform].append(node)
return True
return False
platforms = SUPPORTED_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_uom == set(NODE_FILTERS[platform][FILTER_STATES]):
hass_isy_data[ISY994_NODES][platform].append(node)
return True
return False
def _is_sensor_a_binary_sensor(hass_isy_data: dict, node: Union[Group, Node]) -> bool:
"""Determine if the given sensor node should be a binary_sensor."""
if _check_for_node_def(hass_isy_data, node, single_platform=BINARY_SENSOR):
return True
if _check_for_insteon_type(hass_isy_data, node, single_platform=BINARY_SENSOR):
return True
# For the next two checks, we're providing our own set of uoms that
# represent on/off devices. This is because we can only depend on these
# checks in the context of already knowing that this is definitely a
# sensor device.
if _check_for_uom_id(
hass_isy_data, node, single_platform=BINARY_SENSOR, uom_list=BINARY_SENSOR_UOMS
):
return True
if _check_for_states_in_uom(
hass_isy_data,
node,
single_platform=BINARY_SENSOR,
states_list=BINARY_SENSOR_ISY_STATES,
):
return True
return False
def _categorize_nodes(
hass_isy_data: dict, nodes: Nodes, ignore_identifier: str, sensor_identifier: str
) -> None:
"""Sort the nodes to their proper platforms."""
for (path, node) in nodes:
ignored = ignore_identifier in path or ignore_identifier in node.name
if ignored:
# Don't import this node as a device at all
continue
if hasattr(node, "protocol") and node.protocol == PROTO_GROUP:
hass_isy_data[ISY994_NODES][ISY_GROUP_PLATFORM].append(node)
continue
if sensor_identifier in path or sensor_identifier in node.name:
# User has specified to treat this as a sensor. First we need to
# determine if it should be a binary_sensor.
if _is_sensor_a_binary_sensor(hass_isy_data, node):
continue
hass_isy_data[ISY994_NODES][SENSOR].append(node)
continue
# We have a bunch of different methods for determining the device type,
# each of which works with different ISY firmware versions or device
# family. The order here is important, from most reliable to least.
if _check_for_node_def(hass_isy_data, node):
continue
if _check_for_insteon_type(hass_isy_data, node):
continue
if _check_for_zwave_cat(hass_isy_data, node):
continue
if _check_for_uom_id(hass_isy_data, node):
continue
if _check_for_states_in_uom(hass_isy_data, node):
continue
# Fallback as as sensor, e.g. for un-sortable items like NodeServer nodes.
hass_isy_data[ISY994_NODES][SENSOR].append(node)
def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
"""Categorize the ISY994 programs."""
for platform in SUPPORTED_PROGRAM_PLATFORMS:
folder = programs.get_by_name(f"{DEFAULT_PROGRAM_STRING}{platform}")
if not folder:
continue
for dtype, _, node_id in folder.children:
if dtype != TAG_FOLDER:
continue
entity_folder = folder[node_id]
actions = None
status = entity_folder.get_by_name(KEY_STATUS)
if not status or not status.protocol == PROTO_PROGRAM:
_LOGGER.warning(
"Program %s entity '%s' not loaded, invalid/missing status program",
platform,
entity_folder.name,
)
continue
if platform != BINARY_SENSOR:
actions = entity_folder.get_by_name(KEY_ACTIONS)
if not actions or not actions.protocol == PROTO_PROGRAM:
_LOGGER.warning(
"Program %s entity '%s' not loaded, invalid/missing actions program",
platform,
entity_folder.name,
)
continue
entity = (entity_folder.name, status, actions)
hass_isy_data[ISY994_PROGRAMS][platform].append(entity)
def _categorize_variables(
hass_isy_data: dict, variables: Variables, identifier: str
) -> None:
"""Gather the ISY994 Variables to be added as sensors."""
try:
var_to_add = [
(vtype, vname, vid)
for (vtype, vname, vid) in variables.children
if identifier in vname
]
except KeyError as err:
_LOGGER.error("Error adding ISY Variables: %s", err)
return
for vtype, vname, vid in var_to_add:
hass_isy_data[ISY994_VARIABLES].append((vname, variables[vtype][vid]))
async def migrate_old_unique_ids(
hass: HomeAssistantType, platform: str, devices: Optional[List[Any]]
) -> None:
"""Migrate to new controller-specific unique ids."""
registry = await async_get_registry(hass)
for device in devices:
old_entity_id = registry.async_get_entity_id(
platform, DOMAIN, device.old_unique_id
)
if old_entity_id is not None:
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
device.old_unique_id,
device.unique_id,
)
registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id)
old_entity_id_2 = registry.async_get_entity_id(
platform, DOMAIN, device.unique_id.replace(":", "")
)
if old_entity_id_2 is not None:
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
device.unique_id.replace(":", ""),
device.unique_id,
)
registry.async_update_entity(
old_entity_id_2, new_unique_id=device.unique_id
)
def convert_isy_value_to_hass(
value: Union[int, float, None],
uom: str,
precision: Union[int, str],
fallback_precision: Optional[int] = None,
) -> Union[float, int]:
"""Fix ISY Reported Values.
ISY provides float values as an integer and precision component.
Correct by shifting the decimal place left by the value of precision.
(e.g. value=2345, prec="2" == 23.45)
Insteon Thermostats report temperature in 0.5-deg precision as an int
by sending a value of 2 times the Temp. Correct by dividing by 2 here.
"""
if value is None or value == ISY_VALUE_UNKNOWN:
return None
if uom in [UOM_DOUBLE_TEMP, UOM_ISYV4_DEGREES]:
return round(float(value) / 2.0, 1)
if precision not in ("0", 0):
return round(float(value) / 10 ** int(precision), int(precision))
if fallback_precision:
return round(float(value), fallback_precision)
return value
|
from cerberus import Validator
def test_validated_schema_cache():
v = Validator({'foozifix': {'coerce': int}})
cache_size = len(v._valid_schemas)
v = Validator({'foozifix': {'type': 'integer'}})
cache_size += 1
assert len(v._valid_schemas) == cache_size
v = Validator({'foozifix': {'coerce': int}})
assert len(v._valid_schemas) == cache_size
max_cache_size = 427
assert cache_size <= max_cache_size, (
"There's an unexpected high amount (%s) of cached valid definition schemas. "
"Unless you added further tests, there are good chances that something is "
"wrong. If you added tests with new schemas, you can try to adjust the "
"variable `max_cache_size` according to the added schemas." % cache_size
)
|
from datetime import timedelta
import logging
import pysmarthab
from requests.exceptions import Timeout
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_WINDOW,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverEntity,
)
from . import DATA_HUB, DOMAIN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=60)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up SmartHab covers from a config entry."""
hub = hass.data[DOMAIN][config_entry.entry_id][DATA_HUB]
entities = (
SmartHabCover(cover)
for cover in await hub.async_get_device_list()
if isinstance(cover, pysmarthab.Shutter)
)
async_add_entities(entities, True)
class SmartHabCover(CoverEntity):
"""Representation a cover."""
def __init__(self, cover):
"""Initialize a SmartHabCover."""
self._cover = cover
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._cover.device_id
@property
def name(self) -> str:
"""Return the display name of this cover."""
return self._cover.label
@property
def current_cover_position(self) -> int:
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._cover.state
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
@property
def is_closed(self) -> bool:
"""Return if the cover is closed or not."""
return self._cover.state == 0
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_WINDOW
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self._cover.async_open()
async def async_close_cover(self, **kwargs):
"""Close cover."""
await self._cover.async_close()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
await self._cover.async_set_state(kwargs[ATTR_POSITION])
async def async_update(self):
"""Fetch new state data for this cover."""
try:
await self._cover.async_update()
except Timeout:
_LOGGER.error(
"Reached timeout while updating cover %s from API", self.entity_id
)
|
from numbers import Number
from typing import Optional
from homeassistant.const import PRECISION_HALVES, PRECISION_TENTHS
from homeassistant.core import HomeAssistant
from homeassistant.util.temperature import convert as convert_temperature
def display_temp(
hass: HomeAssistant, temperature: Optional[float], unit: str, precision: float
) -> Optional[float]:
"""Convert temperature into preferred units/precision for display."""
temperature_unit = unit
ha_unit = hass.config.units.temperature_unit
if temperature is None:
return temperature
# If the temperature is not a number this can cause issues
# with Polymer components, so bail early there.
if not isinstance(temperature, Number):
raise TypeError(f"Temperature is not a number: {temperature}")
if temperature_unit != ha_unit:
temperature = convert_temperature(temperature, temperature_unit, ha_unit)
# Round in the units appropriate
if precision == PRECISION_HALVES:
temperature = round(temperature * 2) / 2.0
elif precision == PRECISION_TENTHS:
temperature = round(temperature, 1)
# Integer as a fall back (PRECISION_WHOLE)
else:
temperature = round(temperature)
return temperature
|
import logging
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import async_entries_for_device
_LOGGER = logging.getLogger(__name__)
class UniFiBase(Entity):
"""UniFi entity base class."""
DOMAIN = ""
TYPE = ""
def __init__(self, item, controller) -> None:
"""Set up UniFi entity base.
Register mac to controller entities to cover disabled entities.
"""
self._item = item
self.controller = controller
self.controller.entities[self.DOMAIN][self.TYPE].add(item.mac)
async def async_added_to_hass(self) -> None:
"""Entity created."""
_LOGGER.debug(
"New %s entity %s (%s)", self.TYPE, self.entity_id, self._item.mac
)
for signal, method in (
(self.controller.signal_reachable, self.async_update_callback),
(self.controller.signal_options_update, self.options_updated),
(self.controller.signal_remove, self.remove_item),
):
self.async_on_remove(async_dispatcher_connect(self.hass, signal, method))
self._item.register_callback(self.async_update_callback)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect object when removed."""
_LOGGER.debug(
"Removing %s entity %s (%s)", self.TYPE, self.entity_id, self._item.mac
)
self._item.remove_callback(self.async_update_callback)
self.controller.entities[self.DOMAIN][self.TYPE].remove(self._item.mac)
@callback
def async_update_callback(self) -> None:
"""Update the entity's state."""
_LOGGER.debug(
"Updating %s entity %s (%s)", self.TYPE, self.entity_id, self._item.mac
)
self.async_write_ha_state()
async def options_updated(self) -> None:
"""Config entry options are updated, remove entity if option is disabled."""
raise NotImplementedError
async def remove_item(self, mac_addresses: set) -> None:
"""Remove entity if MAC is part of set.
Remove entity if no entry in entity registry exist.
Remove entity registry entry if no entry in device registry exist.
Remove device registry entry if there is only one linked entity (this entity).
Remove entity registry entry if there are more than one entity linked to the device registry entry.
"""
if self._item.mac not in mac_addresses:
return
entity_registry = await self.hass.helpers.entity_registry.async_get_registry()
entity_entry = entity_registry.async_get(self.entity_id)
if not entity_entry:
await self.async_remove()
return
device_registry = await self.hass.helpers.device_registry.async_get_registry()
device_entry = device_registry.async_get(entity_entry.device_id)
if not device_entry:
entity_registry.async_remove(self.entity_id)
return
if len(async_entries_for_device(entity_registry, entity_entry.device_id)) == 1:
device_registry.async_remove_device(device_entry.id)
return
entity_registry.async_remove(self.entity_id)
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
|
import pytest
from vcr.request import Request, HeadersDict
@pytest.mark.parametrize(
"method, uri, expected_str",
[
("GET", "http://www.google.com/", "<Request (GET) http://www.google.com/>"),
("OPTIONS", "*", "<Request (OPTIONS) *>"),
("CONNECT", "host.some.where:1234", "<Request (CONNECT) host.some.where:1234>"),
],
)
def test_str(method, uri, expected_str):
assert str(Request(method, uri, "", {})) == expected_str
def test_headers():
headers = {"X-Header1": ["h1"], "X-Header2": "h2"}
req = Request("GET", "http://go.com/", "", headers)
assert req.headers == {"X-Header1": "h1", "X-Header2": "h2"}
req.headers["X-Header1"] = "h11"
assert req.headers == {"X-Header1": "h11", "X-Header2": "h2"}
def test_add_header_deprecated():
req = Request("GET", "http://go.com/", "", {})
pytest.deprecated_call(req.add_header, "foo", "bar")
assert req.headers == {"foo": "bar"}
@pytest.mark.parametrize(
"uri, expected_port",
[
("http://go.com/", 80),
("http://go.com:80/", 80),
("http://go.com:3000/", 3000),
("https://go.com/", 443),
("https://go.com:443/", 443),
("https://go.com:3000/", 3000),
("*", None),
],
)
def test_port(uri, expected_port):
req = Request("GET", uri, "", {})
assert req.port == expected_port
@pytest.mark.parametrize(
"method, uri",
[
("GET", "http://go.com/"),
("GET", "http://go.com:80/"),
("CONNECT", "localhost:1234"),
("OPTIONS", "*"),
],
)
def test_uri(method, uri):
assert Request(method, uri, "", {}).uri == uri
def test_HeadersDict():
# Simple test of CaseInsensitiveDict
h = HeadersDict()
assert h == {}
h["Content-Type"] = "application/json"
assert h == {"Content-Type": "application/json"}
assert h["content-type"] == "application/json"
assert h["CONTENT-TYPE"] == "application/json"
# Test feature of HeadersDict: devolve list to first element
h = HeadersDict()
assert h == {}
h["x"] = ["foo", "bar"]
assert h == {"x": "foo"}
# Test feature of HeadersDict: preserve original key case
h = HeadersDict()
assert h == {}
h["Content-Type"] = "application/json"
assert h == {"Content-Type": "application/json"}
h["content-type"] = "text/plain"
assert h == {"Content-Type": "text/plain"}
h["CONtent-tyPE"] = "whoa"
assert h == {"Content-Type": "whoa"}
|
import logging
import voluptuous as vol
import yeelightsunflower
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
SUPPORT_YEELIGHT_SUNFLOWER = SUPPORT_BRIGHTNESS | SUPPORT_COLOR
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yeelight Sunflower Light platform."""
host = config.get(CONF_HOST)
hub = yeelightsunflower.Hub(host)
if not hub.available:
_LOGGER.error("Could not connect to Yeelight Sunflower hub")
return False
add_entities(SunflowerBulb(light) for light in hub.get_lights())
class SunflowerBulb(LightEntity):
"""Representation of a Yeelight Sunflower Light."""
def __init__(self, light):
"""Initialize a Yeelight Sunflower bulb."""
self._light = light
self._available = light.available
self._brightness = light.brightness
self._is_on = light.is_on
self._rgb_color = light.rgb_color
self._unique_id = light.zid
@property
def name(self):
"""Return the display name of this light."""
return f"sunflower_{self._light.zid}"
@property
def unique_id(self):
"""Return the unique ID of this light."""
return self._unique_id
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def is_on(self):
"""Return true if light is on."""
return self._is_on
@property
def brightness(self):
"""Return the brightness is 0-255; Yeelight's brightness is 0-100."""
return int(self._brightness / 100 * 255)
@property
def hs_color(self):
"""Return the color property."""
return color_util.color_RGB_to_hs(*self._rgb_color)
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_YEELIGHT_SUNFLOWER
def turn_on(self, **kwargs):
"""Instruct the light to turn on, optionally set colour/brightness."""
# when no arguments, just turn light on (full brightness)
if not kwargs:
self._light.turn_on()
else:
if ATTR_HS_COLOR in kwargs and ATTR_BRIGHTNESS in kwargs:
rgb = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR])
bright = int(kwargs[ATTR_BRIGHTNESS] / 255 * 100)
self._light.set_all(rgb[0], rgb[1], rgb[2], bright)
elif ATTR_HS_COLOR in kwargs:
rgb = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR])
self._light.set_rgb_color(rgb[0], rgb[1], rgb[2])
elif ATTR_BRIGHTNESS in kwargs:
bright = int(kwargs[ATTR_BRIGHTNESS] / 255 * 100)
self._light.set_brightness(bright)
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self._light.turn_off()
def update(self):
"""Fetch new state data for this light and update local values."""
self._light.update()
self._available = self._light.available
self._brightness = self._light.brightness
self._is_on = self._light.is_on
self._rgb_color = self._light.rgb_color
|
import pytest
from homeassistant.components import media_source
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.components.media_source import const
from homeassistant.components.media_source.error import Unresolvable
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_is_media_source_id():
"""Test media source validation."""
assert media_source.is_media_source_id(const.URI_SCHEME)
assert media_source.is_media_source_id(f"{const.URI_SCHEME}domain")
assert media_source.is_media_source_id(f"{const.URI_SCHEME}domain/identifier")
assert not media_source.is_media_source_id("test")
async def test_generate_media_source_id():
"""Test identifier generation."""
tests = [
(None, None),
(None, ""),
("", ""),
("domain", None),
("domain", ""),
("domain", "identifier"),
]
for domain, identifier in tests:
assert media_source.is_media_source_id(
media_source.generate_media_source_id(domain, identifier)
)
async def test_async_browse_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
# Test non-media ignored (/media has test.mp3 and not_media.txt)
media = await media_source.async_browse_media(hass, "")
assert isinstance(media, media_source.models.BrowseMediaSource)
assert media.title == "media/"
assert len(media.children) == 1
# Test invalid media content
with pytest.raises(ValueError):
await media_source.async_browse_media(hass, "invalid")
# Test base URI returns all domains
media = await media_source.async_browse_media(hass, const.URI_SCHEME)
assert isinstance(media, media_source.models.BrowseMediaSource)
assert len(media.children) == 1
assert media.children[0].title == "Local Media"
async def test_async_resolve_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
media = await media_source.async_resolve_media(
hass,
media_source.generate_media_source_id(const.DOMAIN, "local/test.mp3"),
)
assert isinstance(media, media_source.models.PlayMedia)
async def test_async_unresolve_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
# Test no media content
with pytest.raises(Unresolvable):
await media_source.async_resolve_media(hass, "")
async def test_websocket_browse_media(hass, hass_ws_client):
"""Test browse media websocket."""
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
media = media_source.models.BrowseMediaSource(
domain=const.DOMAIN,
identifier="/media",
title="Local Media",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type="listing",
can_play=False,
can_expand=True,
)
with patch(
"homeassistant.components.media_source.async_browse_media",
return_value=media,
):
await client.send_json(
{
"id": 1,
"type": "media_source/browse_media",
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["id"] == 1
assert media.as_dict() == msg["result"]
with patch(
"homeassistant.components.media_source.async_browse_media",
side_effect=BrowseError("test"),
):
await client.send_json(
{
"id": 2,
"type": "media_source/browse_media",
"media_content_id": "invalid",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "browse_media_failed"
assert msg["error"]["message"] == "test"
async def test_websocket_resolve_media(hass, hass_ws_client):
"""Test browse media websocket."""
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
client = await hass_ws_client(hass)
media = media_source.models.PlayMedia("/media/local/test.mp3", "audio/mpeg")
with patch(
"homeassistant.components.media_source.async_resolve_media",
return_value=media,
):
await client.send_json(
{
"id": 1,
"type": "media_source/resolve_media",
"media_content_id": f"{const.URI_SCHEME}{const.DOMAIN}/local/test.mp3",
}
)
msg = await client.receive_json()
assert msg["success"]
assert msg["id"] == 1
assert msg["result"]["url"].startswith(media.url)
assert msg["result"]["mime_type"] == media.mime_type
with patch(
"homeassistant.components.media_source.async_resolve_media",
side_effect=media_source.Unresolvable("test"),
):
await client.send_json(
{
"id": 2,
"type": "media_source/resolve_media",
"media_content_id": "invalid",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "resolve_media_failed"
assert msg["error"]["message"] == "test"
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from netappDisk import netappDisk
###############################################################################
class TestnetappDisk(CollectorTestCase):
def setUp(self):
config = get_collector_config('netappDisk', {
})
self.collector = netappDisk(config, None)
def test_import(self):
self.assertTrue(netappDisk)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import subprocess
import sys
import tempfile
from absl import flags
from absl import logging
from absl.flags import argparse_flags
from absl.testing import _bazelize_command
from absl.testing import absltest
from absl.testing import parameterized
import mock
import six
FLAGS = flags.FLAGS
class ArgparseFlagsTest(parameterized.TestCase):
def setUp(self):
self._absl_flags = flags.FlagValues()
flags.DEFINE_bool(
'absl_bool', None, 'help for --absl_bool.',
short_name='b', flag_values=self._absl_flags)
# Add a boolean flag that starts with "no", to verify it can correctly
# handle the "no" prefixes in boolean flags.
flags.DEFINE_bool(
'notice', None, 'help for --notice.',
flag_values=self._absl_flags)
flags.DEFINE_string(
'absl_string', 'default', 'help for --absl_string=%.',
short_name='s', flag_values=self._absl_flags)
flags.DEFINE_integer(
'absl_integer', 1, 'help for --absl_integer.',
flag_values=self._absl_flags)
flags.DEFINE_float(
'absl_float', 1, 'help for --absl_integer.',
flag_values=self._absl_flags)
flags.DEFINE_enum(
'absl_enum', 'apple', ['apple', 'orange'], 'help for --absl_enum.',
flag_values=self._absl_flags)
def test_dash_as_prefix_char_only(self):
with self.assertRaises(ValueError):
argparse_flags.ArgumentParser(prefix_chars='/')
def test_default_inherited_absl_flags_value(self):
parser = argparse_flags.ArgumentParser()
self.assertIs(parser._inherited_absl_flags, flags.FLAGS)
def test_parse_absl_flags(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
self.assertFalse(self._absl_flags.is_parsed())
self.assertTrue(self._absl_flags['absl_string'].using_default_value)
self.assertTrue(self._absl_flags['absl_integer'].using_default_value)
self.assertTrue(self._absl_flags['absl_float'].using_default_value)
self.assertTrue(self._absl_flags['absl_enum'].using_default_value)
parser.parse_args(
['--absl_string=new_string', '--absl_integer', '2'])
self.assertEqual(self._absl_flags.absl_string, 'new_string')
self.assertEqual(self._absl_flags.absl_integer, 2)
self.assertTrue(self._absl_flags.is_parsed())
self.assertFalse(self._absl_flags['absl_string'].using_default_value)
self.assertFalse(self._absl_flags['absl_integer'].using_default_value)
self.assertTrue(self._absl_flags['absl_float'].using_default_value)
self.assertTrue(self._absl_flags['absl_enum'].using_default_value)
@parameterized.named_parameters(
('true', ['--absl_bool'], True),
('false', ['--noabsl_bool'], False),
('does_not_accept_equal_value', ['--absl_bool=true'], SystemExit),
('does_not_accept_space_value', ['--absl_bool', 'true'], SystemExit),
('long_name_single_dash', ['-absl_bool'], SystemExit),
('short_name', ['-b'], True),
('short_name_false', ['-nob'], SystemExit),
('short_name_double_dash', ['--b'], SystemExit),
('short_name_double_dash_false', ['--nob'], SystemExit),
)
def test_parse_boolean_flags(self, args, expected):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
self.assertIsNone(self._absl_flags['absl_bool'].value)
self.assertIsNone(self._absl_flags['b'].value)
if isinstance(expected, bool):
parser.parse_args(args)
self.assertEqual(expected, self._absl_flags.absl_bool)
self.assertEqual(expected, self._absl_flags.b)
else:
with self.assertRaises(expected):
parser.parse_args(args)
@parameterized.named_parameters(
('true', ['--notice'], True),
('false', ['--nonotice'], False),
)
def test_parse_boolean_existing_no_prefix(self, args, expected):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
self.assertIsNone(self._absl_flags['notice'].value)
parser.parse_args(args)
self.assertEqual(expected, self._absl_flags.notice)
def test_unrecognized_flag(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
with self.assertRaises(SystemExit):
parser.parse_args(['--unknown_flag=what'])
def test_absl_validators(self):
@flags.validator('absl_integer', flag_values=self._absl_flags)
def ensure_positive(value):
return value > 0
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
with self.assertRaises(SystemExit):
parser.parse_args(['--absl_integer', '-2'])
del ensure_positive
@parameterized.named_parameters(
('regular_name_double_dash', '--absl_string=new_string', 'new_string'),
('regular_name_single_dash', '-absl_string=new_string', SystemExit),
('short_name_double_dash', '--s=new_string', SystemExit),
('short_name_single_dash', '-s=new_string', 'new_string'),
)
def test_dashes(self, argument, expected):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
if isinstance(expected, six.string_types):
parser.parse_args([argument])
self.assertEqual(self._absl_flags.absl_string, expected)
else:
with self.assertRaises(expected):
parser.parse_args([argument])
def test_absl_flags_not_added_to_namespace(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
args = parser.parse_args(['--absl_string=new_string'])
self.assertIsNone(getattr(args, 'absl_string', None))
def test_mixed_flags_and_positional(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
parser.add_argument('--header', help='Header message to print.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
args = parser.parse_args(
['--absl_string=new_string', '--header=HEADER', '--absl_integer',
'2', '3', '4'])
self.assertEqual(self._absl_flags.absl_string, 'new_string')
self.assertEqual(self._absl_flags.absl_integer, 2)
self.assertEqual(args.header, 'HEADER')
self.assertListEqual(args.integers, [3, 4])
def test_subparsers(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
parser.add_argument('--header', help='Header message to print.')
subparsers = parser.add_subparsers(help='The command to execute.')
sub_parser = subparsers.add_parser(
'sub_cmd', help='Sub command.', inherited_absl_flags=self._absl_flags)
sub_parser.add_argument('--sub_flag', help='Sub command flag.')
def sub_command_func():
pass
sub_parser.set_defaults(command=sub_command_func)
args = parser.parse_args([
'--header=HEADER', '--absl_string=new_value', 'sub_cmd',
'--absl_integer=2', '--sub_flag=new_sub_flag_value'])
self.assertEqual(args.header, 'HEADER')
self.assertEqual(self._absl_flags.absl_string, 'new_value')
self.assertEqual(args.command, sub_command_func)
self.assertEqual(self._absl_flags.absl_integer, 2)
self.assertEqual(args.sub_flag, 'new_sub_flag_value')
def test_subparsers_no_inherit_in_subparser(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
subparsers = parser.add_subparsers(help='The command to execute.')
subparsers.add_parser(
'sub_cmd', help='Sub command.',
# Do not inherit absl flags in the subparser.
# This is the behavior that this test exercises.
inherited_absl_flags=None)
with self.assertRaises(SystemExit):
parser.parse_args(['sub_cmd', '--absl_string=new_value'])
def test_help_main_module_flags(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
help_message = parser.format_help()
# Only the short name is shown in the usage string.
self.assertIn('[-s ABSL_STRING]', help_message)
# Both names are included in the options section.
self.assertIn('-s ABSL_STRING, --absl_string ABSL_STRING', help_message)
# Verify help messages.
self.assertIn('help for --absl_string=%.', help_message)
self.assertIn('<apple|orange>: help for --absl_enum.', help_message)
def test_help_non_main_module_flags(self):
flags.DEFINE_string(
'non_main_module_flag', 'default', 'help',
module_name='other.module', flag_values=self._absl_flags)
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
help_message = parser.format_help()
# Non main module key flags are not printed in the help message.
self.assertNotIn('non_main_module_flag', help_message)
def test_help_non_main_module_key_flags(self):
flags.DEFINE_string(
'non_main_module_flag', 'default', 'help',
module_name='other.module', flag_values=self._absl_flags)
flags.declare_key_flag('non_main_module_flag', flag_values=self._absl_flags)
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
help_message = parser.format_help()
# Main module key fags are printed in the help message, even if the flag
# is defined in another module.
self.assertIn('non_main_module_flag', help_message)
@parameterized.named_parameters(
('h', ['-h']),
('help', ['--help']),
('helpshort', ['--helpshort']),
('helpfull', ['--helpfull']),
)
def test_help_flags(self, args):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
with self.assertRaises(SystemExit):
parser.parse_args(args)
@parameterized.named_parameters(
('h', ['-h']),
('help', ['--help']),
('helpshort', ['--helpshort']),
('helpfull', ['--helpfull']),
)
def test_no_help_flags(self, args):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags, add_help=False)
with mock.patch.object(parser, 'print_help'):
with self.assertRaises(SystemExit):
parser.parse_args(args)
parser.print_help.assert_not_called()
def test_helpfull_message(self):
flags.DEFINE_string(
'non_main_module_flag', 'default', 'help',
module_name='other.module', flag_values=self._absl_flags)
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
with self.assertRaises(SystemExit),\
mock.patch.object(sys, 'stdout', new=six.StringIO()) as mock_stdout:
parser.parse_args(['--helpfull'])
stdout_message = mock_stdout.getvalue()
logging.info('captured stdout message:\n%s', stdout_message)
self.assertIn('--non_main_module_flag', stdout_message)
self.assertIn('other.module', stdout_message)
# Make sure the main module is not included.
self.assertNotIn(sys.argv[0], stdout_message)
# Special flags defined in absl.flags.
self.assertIn('absl.flags:', stdout_message)
self.assertIn('--flagfile', stdout_message)
self.assertIn('--undefok', stdout_message)
@parameterized.named_parameters(
('at_end',
('1', '--absl_string=value_from_cmd', '--flagfile='),
'value_from_file'),
('at_beginning',
('--flagfile=', '1', '--absl_string=value_from_cmd'),
'value_from_cmd'),
)
def test_flagfile(self, cmd_args, expected_absl_string_value):
# Set gnu_getopt to False, to verify it's ignored by argparse_flags.
self._absl_flags.set_gnu_getopt(False)
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
parser.add_argument('--header', help='Header message to print.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
flagfile = tempfile.NamedTemporaryFile(dir=FLAGS.test_tmpdir, delete=False)
self.addCleanup(os.unlink, flagfile.name)
with flagfile:
flagfile.write(b'''
# The flag file.
--absl_string=value_from_file
--absl_integer=1
--header=header_from_file
''')
expand_flagfile = lambda x: x + flagfile.name if x == '--flagfile=' else x
cmd_args = [expand_flagfile(x) for x in cmd_args]
args = parser.parse_args(cmd_args)
self.assertEqual([1], args.integers)
self.assertEqual('header_from_file', args.header)
self.assertEqual(expected_absl_string_value, self._absl_flags.absl_string)
@parameterized.parameters(
('positional', {'positional'}, False),
('--not_existed', {'existed'}, False),
('--empty', set(), False),
('-single_dash', {'single_dash'}, True),
('--double_dash', {'double_dash'}, True),
('--with_value=value', {'with_value'}, True),
)
def test_is_undefok(self, arg, undefok_names, is_undefok):
self.assertEqual(is_undefok, argparse_flags._is_undefok(arg, undefok_names))
@parameterized.named_parameters(
('single', 'single', ['--single'], []),
('multiple', 'first,second', ['--first', '--second'], []),
('single_dash', 'dash', ['-dash'], []),
('mixed_dash', 'mixed', ['-mixed', '--mixed'], []),
('value', 'name', ['--name=value'], []),
('boolean_positive', 'bool', ['--bool'], []),
('boolean_negative', 'bool', ['--nobool'], []),
('left_over', 'strip', ['--first', '--strip', '--last'],
['--first', '--last']),
)
def test_strip_undefok_args(self, undefok, args, expected_args):
actual_args = argparse_flags._strip_undefok_args(undefok, args)
self.assertListEqual(expected_args, actual_args)
@parameterized.named_parameters(
('at_end', ['--unknown', '--undefok=unknown']),
('at_beginning', ['--undefok=unknown', '--unknown']),
('multiple', ['--unknown', '--undefok=unknown,another_unknown']),
('with_value', ['--unknown=value', '--undefok=unknown']),
('maybe_boolean', ['--nounknown', '--undefok=unknown']),
('with_space', ['--unknown', '--undefok', 'unknown']),
)
def test_undefok_flag_correct_use(self, cmd_args):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
args = parser.parse_args(cmd_args) # Make sure it doesn't raise.
# Make sure `undefok` is not exposed in namespace.
sentinel = object()
self.assertIs(sentinel, getattr(args, 'undefok', sentinel))
def test_undefok_flag_existing(self):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
parser.parse_args(
['--absl_string=new_value', '--undefok=absl_string'])
self.assertEqual('new_value', self._absl_flags.absl_string)
@parameterized.named_parameters(
('no_equal', ['--unknown', 'value', '--undefok=unknown']),
('single_dash', ['--unknown', '-undefok=unknown']),
)
def test_undefok_flag_incorrect_use(self, cmd_args):
parser = argparse_flags.ArgumentParser(
inherited_absl_flags=self._absl_flags)
with self.assertRaises(SystemExit):
parser.parse_args(cmd_args)
class ArgparseWithAppRunTest(parameterized.TestCase):
@parameterized.named_parameters(
('simple',
'main_simple', 'parse_flags_simple',
['--argparse_echo=I am argparse.', '--absl_echo=I am absl.'],
['I am argparse.', 'I am absl.']),
('subcommand_roll_dice',
'main_subcommands', 'parse_flags_subcommands',
['--argparse_echo=I am argparse.', '--absl_echo=I am absl.',
'roll_dice', '--num_faces=12'],
['I am argparse.', 'I am absl.', 'Rolled a dice: ']),
('subcommand_shuffle',
'main_subcommands', 'parse_flags_subcommands',
['--argparse_echo=I am argparse.', '--absl_echo=I am absl.',
'shuffle', 'a', 'b', 'c'],
['I am argparse.', 'I am absl.', 'Shuffled: ']),
)
def test_argparse_with_app_run(
self, main_func_name, flags_parser_func_name, args, output_strings):
env = os.environ.copy()
env['MAIN_FUNC'] = main_func_name
env['FLAGS_PARSER_FUNC'] = flags_parser_func_name
helper = _bazelize_command.get_executable_path(
'absl/flags/tests/argparse_flags_test_helper')
try:
stdout = subprocess.check_output(
[helper] + args, env=env, universal_newlines=True)
except subprocess.CalledProcessError as e:
error_info = ('ERROR: argparse_helper failed\n'
'Command: {}\n'
'Exit code: {}\n'
'----- output -----\n{}'
'------------------')
error_info = error_info.format(e.cmd, e.returncode,
e.output + '\n' if e.output else '<empty>')
print(error_info, file=sys.stderr)
raise
for output_string in output_strings:
self.assertIn(output_string, stdout)
if __name__ == '__main__':
absltest.main()
|
from homematicip.base.enums import EventType
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.homematicip_cloud.hap import HomematicipHAP
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .helper import (
HAPID,
HomeFactory,
async_manipulate_test_data,
get_and_check_entity_basics,
)
from tests.async_mock import patch
async def test_hmip_load_all_supported_devices(hass, default_mock_hap_factory):
"""Ensure that all supported devices could be loaded."""
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=None, test_groups=None
)
assert len(mock_hap.hmip_device_by_entity_id) == 231
async def test_hmip_remove_device(hass, default_mock_hap_factory):
"""Test Remove of hmip device."""
entity_id = "light.treppe_ch"
entity_name = "Treppe CH"
device_model = "HmIP-BSL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Treppe"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
assert hmip_device
device_registry = await dr.async_get_registry(hass)
entity_registry = await er.async_get_registry(hass)
pre_device_count = len(device_registry.devices)
pre_entity_count = len(entity_registry.entities)
pre_mapping_count = len(mock_hap.hmip_device_by_entity_id)
hmip_device.fire_remove_event()
await hass.async_block_till_done()
assert len(device_registry.devices) == pre_device_count - 1
assert len(entity_registry.entities) == pre_entity_count - 3
assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 3
async def test_hmip_add_device(hass, default_mock_hap_factory, hmip_config_entry):
"""Test Remove of hmip device."""
entity_id = "light.treppe_ch"
entity_name = "Treppe CH"
device_model = "HmIP-BSL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Treppe"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
assert hmip_device
device_registry = await dr.async_get_registry(hass)
entity_registry = await er.async_get_registry(hass)
pre_device_count = len(device_registry.devices)
pre_entity_count = len(entity_registry.entities)
pre_mapping_count = len(mock_hap.hmip_device_by_entity_id)
hmip_device.fire_remove_event()
await hass.async_block_till_done()
assert len(device_registry.devices) == pre_device_count - 1
assert len(entity_registry.entities) == pre_entity_count - 3
assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 3
reloaded_hap = HomematicipHAP(hass, hmip_config_entry)
with patch(
"homeassistant.components.homematicip_cloud.HomematicipHAP",
return_value=reloaded_hap,
), patch.object(reloaded_hap, "async_connect"), patch.object(
reloaded_hap, "get_hap", return_value=mock_hap.home
), patch(
"homeassistant.components.homematicip_cloud.hap.asyncio.sleep"
):
mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED)
await hass.async_block_till_done()
assert len(device_registry.devices) == pre_device_count
assert len(entity_registry.entities) == pre_entity_count
new_hap = hass.data[HMIPC_DOMAIN][HAPID]
assert len(new_hap.hmip_device_by_entity_id) == pre_mapping_count
async def test_hmip_remove_group(hass, default_mock_hap_factory):
"""Test Remove of hmip group."""
entity_id = "switch.strom_group"
entity_name = "Strom Group"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_groups=["Strom"])
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
assert hmip_device
device_registry = await dr.async_get_registry(hass)
entity_registry = await er.async_get_registry(hass)
pre_device_count = len(device_registry.devices)
pre_entity_count = len(entity_registry.entities)
pre_mapping_count = len(mock_hap.hmip_device_by_entity_id)
hmip_device.fire_remove_event()
await hass.async_block_till_done()
assert len(device_registry.devices) == pre_device_count
assert len(entity_registry.entities) == pre_entity_count - 1
assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 1
async def test_all_devices_unavailable_when_hap_not_connected(
hass, default_mock_hap_factory
):
"""Test make all devices unavaulable when hap is not connected."""
entity_id = "light.treppe_ch"
entity_name = "Treppe CH"
device_model = "HmIP-BSL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Treppe"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
assert hmip_device
assert mock_hap.home.connected
await async_manipulate_test_data(hass, mock_hap.home, "connected", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_UNAVAILABLE
async def test_hap_reconnected(hass, default_mock_hap_factory):
"""Test reconnect hap."""
entity_id = "light.treppe_ch"
entity_name = "Treppe CH"
device_model = "HmIP-BSL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Treppe"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
assert hmip_device
assert mock_hap.home.connected
await async_manipulate_test_data(hass, mock_hap.home, "connected", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_UNAVAILABLE
mock_hap._accesspoint_connected = False # pylint: disable=protected-access
await async_manipulate_test_data(hass, mock_hap.home, "connected", True)
await hass.async_block_till_done()
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
async def test_hap_with_name(hass, mock_connection, hmip_config_entry):
"""Test hap with name."""
home_name = "TestName"
entity_id = f"light.{home_name.lower()}_treppe_ch"
entity_name = f"{home_name} Treppe CH"
device_model = "HmIP-BSL"
hmip_config_entry.data = {**hmip_config_entry.data, "name": home_name}
mock_hap = await HomeFactory(
hass, mock_connection, hmip_config_entry
).async_get_mock_hap(test_devices=["Treppe"])
assert mock_hap
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == STATE_ON
assert ha_state.attributes["friendly_name"] == entity_name
async def test_hmip_reset_energy_counter_services(hass, default_mock_hap_factory):
"""Test reset_energy_counter service."""
entity_id = "switch.pc"
entity_name = "Pc"
device_model = "HMIP-PSM"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state
await hass.services.async_call(
"homematicip_cloud",
"reset_energy_counter",
{"entity_id": "switch.pc"},
blocking=True,
)
assert hmip_device.mock_calls[-1][0] == "reset_energy_counter"
assert len(hmip_device._connection.mock_calls) == 2 # pylint: disable=W0212
await hass.services.async_call(
"homematicip_cloud", "reset_energy_counter", {"entity_id": "all"}, blocking=True
)
assert hmip_device.mock_calls[-1][0] == "reset_energy_counter"
assert len(hmip_device._connection.mock_calls) == 4 # pylint: disable=W0212
async def test_hmip_multi_area_device(hass, default_mock_hap_factory):
"""Test multi area device. Check if devices are created and referenced."""
entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5"
entity_name = "Wired Eingangsmodul – 32-fach Channel5"
device_model = "HmIPW-DRI32"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wired Eingangsmodul – 32-fach"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state
# get the entity
entity_registry = await er.async_get_registry(hass)
entity = entity_registry.async_get(ha_state.entity_id)
assert entity
# get the device
device_registry = await dr.async_get_registry(hass)
device = device_registry.async_get(entity.device_id)
assert device.name == "Wired Eingangsmodul – 32-fach"
# get the hap
hap_device = device_registry.async_get(device.via_device_id)
assert hap_device.name == "Access Point"
|
import sys
import types
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.decorators import (monkeypatch, cached, clear_cache,
copy_cache, cachedproperty)
class DecoratorsTC(TestCase):
def test_monkeypatch_instance_method(self):
class MyClass: pass
@monkeypatch(MyClass)
def meth1(self):
return 12
class XXX(object):
@monkeypatch(MyClass)
def meth2(self):
return 12
if sys.version_info < (3, 0):
self.assertIsInstance(MyClass.meth1, types.MethodType)
self.assertIsInstance(MyClass.meth2, types.MethodType)
else:
# with python3, unbound method are functions
self.assertIsInstance(MyClass.meth1, types.FunctionType)
self.assertIsInstance(MyClass.meth2, types.FunctionType)
self.assertEqual(MyClass().meth1(), 12)
self.assertEqual(MyClass().meth2(), 12)
def test_monkeypatch_property(self):
class MyClass: pass
@monkeypatch(MyClass, methodname='prop1')
@property
def meth1(self):
return 12
self.assertIsInstance(MyClass.prop1, property)
self.assertEqual(MyClass().prop1, 12)
def test_monkeypatch_arbitrary_callable(self):
class MyClass: pass
class ArbitraryCallable(object):
def __call__(self):
return 12
# ensure it complains about missing __name__
with self.assertRaises(AttributeError) as cm:
monkeypatch(MyClass)(ArbitraryCallable())
self.assertTrue(str(cm.exception).endswith('has no __name__ attribute: you should provide an explicit `methodname`'))
# ensure no black magic under the hood
monkeypatch(MyClass, 'foo')(ArbitraryCallable())
self.assertTrue(callable(MyClass.foo))
self.assertEqual(MyClass().foo(), 12)
def test_monkeypatch_with_same_name(self):
class MyClass: pass
@monkeypatch(MyClass)
def meth1(self):
return 12
self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'],
['meth1'])
inst = MyClass()
self.assertEqual(inst.meth1(), 12)
def test_monkeypatch_with_custom_name(self):
class MyClass: pass
@monkeypatch(MyClass, 'foo')
def meth2(self, param):
return param + 12
self.assertEqual([attr for attr in dir(MyClass) if attr[:2] != '__'],
['foo'])
inst = MyClass()
self.assertEqual(inst.foo(4), 16)
def test_cannot_cache_generator(self):
def foo():
yield 42
self.assertRaises(AssertionError, cached, foo)
def test_cached_preserves_docstrings_and_name(self):
class Foo(object):
@cached
def foo(self):
""" what's up doc ? """
def bar(self, zogzog):
""" what's up doc ? """
bar = cached(bar, 1)
@cached
def quux(self, zogzog):
""" what's up doc ? """
self.assertEqual(Foo.foo.__doc__, """ what's up doc ? """)
self.assertEqual(Foo.foo.__name__, 'foo')
self.assertEqual(Foo.bar.__doc__, """ what's up doc ? """)
self.assertEqual(Foo.bar.__name__, 'bar')
self.assertEqual(Foo.quux.__doc__, """ what's up doc ? """)
self.assertEqual(Foo.quux.__name__, 'quux')
def test_cached_single_cache(self):
class Foo(object):
@cached(cacheattr=u'_foo')
def foo(self):
""" what's up doc ? """
foo = Foo()
foo.foo()
self.assertTrue(hasattr(foo, '_foo'))
clear_cache(foo, 'foo')
self.assertFalse(hasattr(foo, '_foo'))
def test_cached_multi_cache(self):
class Foo(object):
@cached(cacheattr=u'_foo')
def foo(self, args):
""" what's up doc ? """
foo = Foo()
foo.foo(1)
self.assertEqual(foo._foo, {(1,): None})
clear_cache(foo, 'foo')
self.assertFalse(hasattr(foo, '_foo'))
def test_cached_keyarg_cache(self):
class Foo(object):
@cached(cacheattr=u'_foo', keyarg=1)
def foo(self, other, args):
""" what's up doc ? """
foo = Foo()
foo.foo(2, 1)
self.assertEqual(foo._foo, {2: None})
clear_cache(foo, 'foo')
self.assertFalse(hasattr(foo, '_foo'))
def test_cached_property(self):
class Foo(object):
@property
@cached(cacheattr=u'_foo')
def foo(self):
""" what's up doc ? """
foo = Foo()
foo.foo
self.assertEqual(foo._foo, None)
clear_cache(foo, 'foo')
self.assertFalse(hasattr(foo, '_foo'))
def test_copy_cache(self):
class Foo(object):
@cached(cacheattr=u'_foo')
def foo(self, args):
""" what's up doc ? """
foo = Foo()
foo.foo(1)
self.assertEqual(foo._foo, {(1,): None})
foo2 = Foo()
self.assertFalse(hasattr(foo2, '_foo'))
copy_cache(foo2, 'foo', foo)
self.assertEqual(foo2._foo, {(1,): None})
def test_cachedproperty(self):
class Foo(object):
x = 0
@cachedproperty
def bar(self):
self.__class__.x += 1
return self.__class__.x
@cachedproperty
def quux(self):
""" some prop """
return 42
foo = Foo()
self.assertEqual(Foo.x, 0)
self.assertFalse('bar' in foo.__dict__)
self.assertEqual(foo.bar, 1)
self.assertTrue('bar' in foo.__dict__)
self.assertEqual(foo.bar, 1)
self.assertEqual(foo.quux, 42)
self.assertEqual(Foo.bar.__doc__,
'<wrapped by the cachedproperty decorator>')
self.assertEqual(Foo.quux.__doc__,
'<wrapped by the cachedproperty decorator>\n some prop ')
foo2 = Foo()
self.assertEqual(foo2.bar, 2)
# make sure foo.foo is cached
self.assertEqual(foo.bar, 1)
class Kallable(object):
def __call__(self):
return 42
self.assertRaises(TypeError, cachedproperty, Kallable())
if __name__ == '__main__':
unittest_main()
|
import asyncio
from datetime import timedelta
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_SCAN_INTERVAL, CONF_URL
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "freedns"
DEFAULT_INTERVAL = timedelta(minutes=10)
TIMEOUT = 10
UPDATE_URL = "https://freedns.afraid.org/dynamic/update.php"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Exclusive(CONF_URL, DOMAIN): cv.string,
vol.Exclusive(CONF_ACCESS_TOKEN, DOMAIN): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize the FreeDNS component."""
conf = config[DOMAIN]
url = conf.get(CONF_URL)
auth_token = conf.get(CONF_ACCESS_TOKEN)
update_interval = conf[CONF_SCAN_INTERVAL]
session = hass.helpers.aiohttp_client.async_get_clientsession()
result = await _update_freedns(hass, session, url, auth_token)
if result is False:
return False
async def update_domain_callback(now):
"""Update the FreeDNS entry."""
await _update_freedns(hass, session, url, auth_token)
hass.helpers.event.async_track_time_interval(
update_domain_callback, update_interval
)
return True
async def _update_freedns(hass, session, url, auth_token):
"""Update FreeDNS."""
params = None
if url is None:
url = UPDATE_URL
if auth_token is not None:
params = {}
params[auth_token] = ""
try:
with async_timeout.timeout(TIMEOUT):
resp = await session.get(url, params=params)
body = await resp.text()
if "has not changed" in body:
# IP has not changed.
_LOGGER.debug("FreeDNS update skipped: IP has not changed")
return True
if "ERROR" not in body:
_LOGGER.debug("Updating FreeDNS was successful: %s", body)
return True
if "Invalid update URL" in body:
_LOGGER.error("FreeDNS update token is invalid")
else:
_LOGGER.warning("Updating FreeDNS failed: %s", body)
except aiohttp.ClientError:
_LOGGER.warning("Can't connect to FreeDNS API")
except asyncio.TimeoutError:
_LOGGER.warning("Timeout from FreeDNS API at %s", url)
return False
|
from collections import deque
from datetime import timedelta
from functools import partial
from itertools import islice
import logging
from time import time
from typing import Deque, Dict, List
import pyatmo
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_time_interval
from .const import AUTH, DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
CAMERA_DATA_CLASS_NAME = "CameraData"
WEATHERSTATION_DATA_CLASS_NAME = "WeatherStationData"
HOMECOACH_DATA_CLASS_NAME = "HomeCoachData"
HOMEDATA_DATA_CLASS_NAME = "HomeData"
HOMESTATUS_DATA_CLASS_NAME = "HomeStatus"
PUBLICDATA_DATA_CLASS_NAME = "PublicData"
NEXT_SCAN = "next_scan"
DATA_CLASSES = {
WEATHERSTATION_DATA_CLASS_NAME: pyatmo.WeatherStationData,
HOMECOACH_DATA_CLASS_NAME: pyatmo.HomeCoachData,
CAMERA_DATA_CLASS_NAME: pyatmo.CameraData,
HOMEDATA_DATA_CLASS_NAME: pyatmo.HomeData,
HOMESTATUS_DATA_CLASS_NAME: pyatmo.HomeStatus,
PUBLICDATA_DATA_CLASS_NAME: pyatmo.PublicData,
}
BATCH_SIZE = 3
DEFAULT_INTERVALS = {
HOMEDATA_DATA_CLASS_NAME: 900,
HOMESTATUS_DATA_CLASS_NAME: 300,
CAMERA_DATA_CLASS_NAME: 900,
WEATHERSTATION_DATA_CLASS_NAME: 600,
HOMECOACH_DATA_CLASS_NAME: 300,
PUBLICDATA_DATA_CLASS_NAME: 600,
}
SCAN_INTERVAL = 60
class NetatmoDataHandler:
"""Manages the Netatmo data handling."""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry):
"""Initialize self."""
self.hass = hass
self._auth = hass.data[DOMAIN][entry.entry_id][AUTH]
self.listeners: List[CALLBACK_TYPE] = []
self._data_classes: Dict = {}
self.data = {}
self._queue: Deque = deque()
self._webhook: bool = False
async def async_setup(self):
"""Set up the Netatmo data handler."""
async_track_time_interval(
self.hass, self.async_update, timedelta(seconds=SCAN_INTERVAL)
)
self.listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-None",
self.handle_event,
)
)
async def async_update(self, event_time):
"""
Update device.
We do up to BATCH_SIZE calls in one update in order
to minimize the calls on the api service.
"""
for data_class in islice(self._queue, 0, BATCH_SIZE):
if data_class[NEXT_SCAN] > time():
continue
self._data_classes[data_class["name"]][NEXT_SCAN] = (
time() + data_class["interval"]
)
await self.async_fetch_data(
data_class["class"], data_class["name"], **data_class["kwargs"]
)
self._queue.rotate(BATCH_SIZE)
async def async_cleanup(self):
"""Clean up the Netatmo data handler."""
for listener in self.listeners:
listener()
async def handle_event(self, event):
"""Handle webhook events."""
if event["data"]["push_type"] == "webhook_activation":
_LOGGER.info("%s webhook successfully registered", MANUFACTURER)
self._webhook = True
elif event["data"]["push_type"] == "NACamera-connection":
_LOGGER.debug("%s camera reconnected", MANUFACTURER)
self._data_classes[CAMERA_DATA_CLASS_NAME][NEXT_SCAN] = time()
async def async_fetch_data(self, data_class, data_class_entry, **kwargs):
"""Fetch data and notify."""
try:
self.data[data_class_entry] = await self.hass.async_add_executor_job(
partial(data_class, **kwargs),
self._auth,
)
for update_callback in self._data_classes[data_class_entry][
"subscriptions"
]:
if update_callback:
update_callback()
except (pyatmo.NoDevice, pyatmo.ApiError) as err:
_LOGGER.debug(err)
async def register_data_class(
self, data_class_name, data_class_entry, update_callback, **kwargs
):
"""Register data class."""
if data_class_entry in self._data_classes:
self._data_classes[data_class_entry]["subscriptions"].append(
update_callback
)
return
self._data_classes[data_class_entry] = {
"class": DATA_CLASSES[data_class_name],
"name": data_class_entry,
"interval": DEFAULT_INTERVALS[data_class_name],
NEXT_SCAN: time() + DEFAULT_INTERVALS[data_class_name],
"kwargs": kwargs,
"subscriptions": [update_callback],
}
await self.async_fetch_data(
DATA_CLASSES[data_class_name], data_class_entry, **kwargs
)
self._queue.append(self._data_classes[data_class_entry])
_LOGGER.debug("Data class %s added", data_class_entry)
async def unregister_data_class(self, data_class_entry, update_callback):
"""Unregister data class."""
if update_callback not in self._data_classes[data_class_entry]["subscriptions"]:
return
self._data_classes[data_class_entry]["subscriptions"].remove(update_callback)
if not self._data_classes[data_class_entry].get("subscriptions"):
self._queue.remove(self._data_classes[data_class_entry])
self._data_classes.pop(data_class_entry)
_LOGGER.debug("Data class %s removed", data_class_entry)
@property
def webhook(self) -> bool:
"""Return the webhook state."""
return self._webhook
|
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from weblate.screenshots.fields import ScreenshotField
from weblate.trans.mixins import UserDisplayMixin
from weblate.trans.models import Translation, Unit
from weblate.trans.tasks import component_alerts
from weblate.utils.decorators import disable_for_loaddata
class ScreenshotQuerySet(models.QuerySet):
def order(self):
return self.order_by("name")
def filter_access(self, user):
if user.is_superuser:
return self
return self.filter(
Q(translation__component__project_id__in=user.allowed_project_ids)
& (
Q(translation__component__restricted=False)
| Q(translation__component_id__in=user.component_permissions)
)
)
class Screenshot(models.Model, UserDisplayMixin):
name = models.CharField(verbose_name=_("Screenshot name"), max_length=200)
image = ScreenshotField(
verbose_name=_("Image"),
help_text=_("Upload JPEG or PNG images up to 2000x2000 pixels."),
upload_to="screenshots/",
)
translation = models.ForeignKey(Translation, on_delete=models.deletion.CASCADE)
units = models.ManyToManyField(Unit, blank=True, related_name="screenshots")
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
blank=True,
on_delete=models.deletion.SET_NULL,
)
objects = ScreenshotQuerySet.as_manager()
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("screenshot", kwargs={"pk": self.pk})
@receiver(m2m_changed, sender=Screenshot.units.through)
@disable_for_loaddata
def change_screenshot_assignment(sender, instance, action, **kwargs):
# Update alerts in case there is change in string assignment
if instance.translation.component.alert_set.filter(
name="UnusedScreenshot"
).exists():
component_alerts.delay([instance.pk])
|
import functools
import re
from datetime import timedelta
from typing import (
TYPE_CHECKING,
Generic,
Optional,
Optional as NoParseOptional,
Tuple,
List,
Dict,
Type,
TypeVar,
Literal as Literal,
Any,
Union as UserInputOptional,
)
import discord
from discord.ext import commands as dpy_commands
from discord.ext.commands import BadArgument
from ..i18n import Translator
from ..utils.chat_formatting import humanize_timedelta, humanize_list
if TYPE_CHECKING:
from .context import Context
__all__ = [
"DictConverter",
"GuildConverter",
"UserInputOptional",
"NoParseOptional",
"TimedeltaConverter",
"get_dict_converter",
"get_timedelta_converter",
"parse_timedelta",
"Literal",
]
_ = Translator("commands.converter", __file__)
ID_REGEX = re.compile(r"([0-9]{15,21})")
# Taken with permission from
# https://github.com/mikeshardmind/SinbadCogs/blob/816f3bc2ba860243f75112904b82009a8a9e1f99/scheduler/time_utils.py#L9-L19
TIME_RE_STRING = r"\s?".join(
[
r"((?P<weeks>\d+?)\s?(weeks?|w))?",
r"((?P<days>\d+?)\s?(days?|d))?",
r"((?P<hours>\d+?)\s?(hours?|hrs|hr?))?",
r"((?P<minutes>\d+?)\s?(minutes?|mins?|m(?!o)))?", # prevent matching "months"
r"((?P<seconds>\d+?)\s?(seconds?|secs?|s))?",
]
)
TIME_RE = re.compile(TIME_RE_STRING, re.I)
def parse_timedelta(
argument: str,
*,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Optional[timedelta]:
"""
This converts a user provided string into a timedelta
The units should be in order from largest to smallest.
This works with or without whitespace.
Parameters
----------
argument : str
The user provided input
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can chose to provide are the same as the
parser understands. (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
Returns
-------
Optional[timedelta]
If matched, the timedelta which was parsed. This can return `None`
Raises
------
BadArgument
If the argument passed uses a unit not allowed, but understood
or if the value is out of bounds.
"""
matches = TIME_RE.match(argument)
allowed_units = allowed_units or ["weeks", "days", "hours", "minutes", "seconds"]
if matches:
params = {k: int(v) for k, v in matches.groupdict().items() if v is not None}
for k in params.keys():
if k not in allowed_units:
raise BadArgument(
_("`{unit}` is not a valid unit of time for this command").format(unit=k)
)
if params:
try:
delta = timedelta(**params)
except OverflowError:
raise BadArgument(
_("The time set is way too high, consider setting something reasonable.")
)
if maximum and maximum < delta:
raise BadArgument(
_(
"This amount of time is too large for this command. (Maximum: {maximum})"
).format(maximum=humanize_timedelta(timedelta=maximum))
)
if minimum and delta < minimum:
raise BadArgument(
_(
"This amount of time is too small for this command. (Minimum: {minimum})"
).format(minimum=humanize_timedelta(timedelta=minimum))
)
return delta
return None
class GuildConverter(discord.Guild):
"""Converts to a `discord.Guild` object.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by name.
"""
@classmethod
async def convert(cls, ctx: "Context", argument: str) -> discord.Guild:
match = ID_REGEX.fullmatch(argument)
if match is None:
ret = discord.utils.get(ctx.bot.guilds, name=argument)
else:
guild_id = int(match.group(1))
ret = ctx.bot.get_guild(guild_id)
if ret is None:
raise BadArgument(_('Server "{name}" not found.').format(name=argument))
return ret
# Below this line are a lot of lies for mypy about things that *end up* correct when
# These are used for command conversion purposes. Please refer to the portion
# which is *not* for type checking for the actual implementation
# and ensure the lies stay correct for how the object should look as a typehint
if TYPE_CHECKING:
DictConverter = Dict[str, str]
else:
class DictConverter(dpy_commands.Converter):
"""
Converts pairs of space separated values to a dict
"""
def __init__(self, *expected_keys: str, delims: Optional[List[str]] = None):
self.expected_keys = expected_keys
self.delims = delims or [" "]
self.pattern = re.compile(r"|".join(re.escape(d) for d in self.delims))
async def convert(self, ctx: "Context", argument: str) -> Dict[str, str]:
ret: Dict[str, str] = {}
args = self.pattern.split(argument)
if len(args) % 2 != 0:
raise BadArgument()
iterator = iter(args)
for key in iterator:
if self.expected_keys and key not in self.expected_keys:
raise BadArgument(_("Unexpected key {key}").format(key=key))
ret[key] = next(iterator)
return ret
if TYPE_CHECKING:
def get_dict_converter(*expected_keys: str, delims: Optional[List[str]] = None) -> Type[dict]:
...
else:
def get_dict_converter(*expected_keys: str, delims: Optional[List[str]] = None) -> Type[dict]:
"""
Returns a typechecking safe `DictConverter` suitable for use with discord.py
"""
class PartialMeta(type):
__call__ = functools.partialmethod(
type(DictConverter).__call__, *expected_keys, delims=delims
)
class ValidatedConverter(DictConverter, metaclass=PartialMeta):
pass
return ValidatedConverter
if TYPE_CHECKING:
TimedeltaConverter = timedelta
else:
class TimedeltaConverter(dpy_commands.Converter):
"""
This is a converter for timedeltas.
The units should be in order from largest to smallest.
This works with or without whitespace.
See `parse_timedelta` for more information about how this functions.
Attributes
----------
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can choose to provide are the same as the
parser understands: (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
default_unit : Optional[str]
If provided, it will additionally try to match integer-only input into
a timedelta, using the unit specified. Same units as in ``allowed_units``
apply.
"""
def __init__(self, *, minimum=None, maximum=None, allowed_units=None, default_unit=None):
self.allowed_units = allowed_units
self.default_unit = default_unit
self.minimum = minimum
self.maximum = maximum
async def convert(self, ctx: "Context", argument: str) -> timedelta:
if self.default_unit and argument.isdecimal():
argument = argument + self.default_unit
delta = parse_timedelta(
argument,
minimum=self.minimum,
maximum=self.maximum,
allowed_units=self.allowed_units,
)
if delta is not None:
return delta
raise BadArgument() # This allows this to be a required argument.
if TYPE_CHECKING:
def get_timedelta_converter(
*,
default_unit: Optional[str] = None,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Type[timedelta]:
...
else:
def get_timedelta_converter(
*,
default_unit: Optional[str] = None,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Type[timedelta]:
"""
This creates a type suitable for typechecking which works with discord.py's
commands.
See `parse_timedelta` for more information about how this functions.
Parameters
----------
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can choose to provide are the same as the
parser understands: (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
default_unit : Optional[str]
If provided, it will additionally try to match integer-only input into
a timedelta, using the unit specified. Same units as in ``allowed_units``
apply.
Returns
-------
type
The converter class, which will be a subclass of `TimedeltaConverter`
"""
class PartialMeta(type):
__call__ = functools.partialmethod(
type(DictConverter).__call__,
allowed_units=allowed_units,
default_unit=default_unit,
minimum=minimum,
maximum=maximum,
)
class ValidatedConverter(TimedeltaConverter, metaclass=PartialMeta):
pass
return ValidatedConverter
if not TYPE_CHECKING:
class NoParseOptional:
"""
This can be used instead of `typing.Optional`
to avoid discord.py special casing the conversion behavior.
.. seealso::
The `ignore_optional_for_conversion` option of commands.
"""
def __class_getitem__(cls, key):
if isinstance(key, tuple):
raise TypeError("Must only provide a single type to Optional")
return key
_T = TypeVar("_T")
if not TYPE_CHECKING:
#: This can be used when user input should be converted as discord.py
#: treats `typing.Optional`, but the type should not be equivalent to
#: ``typing.Union[DesiredType, None]`` for type checking.
#:
#: Note: In type checking context, this type hint can be passed
#: multiple types, but such usage is not supported and will fail at runtime
#:
#: .. warning::
#: This converter class is still provisional.
UserInputOptional = Optional
if not TYPE_CHECKING:
class Literal(dpy_commands.Converter):
"""
This can be used as a converter for `typing.Literal`.
In a type checking context it is `typing.Literal`.
In a runtime context, it's a converter which only matches the literals it was given.
.. warning::
This converter class is still provisional.
"""
def __init__(self, valid_names: Tuple[str]):
self.valid_names = valid_names
def __call__(self, ctx, arg):
# Callable's are treated as valid types:
# https://github.com/python/cpython/blob/3.8/Lib/typing.py#L148
# Without this, ``typing.Union[Literal["clear"], bool]`` would fail
return self.convert(ctx, arg)
async def convert(self, ctx, arg):
if arg in self.valid_names:
return arg
raise BadArgument(_("Expected one of: {}").format(humanize_list(self.valid_names)))
def __class_getitem__(cls, k):
if not k:
raise ValueError("Need at least one value for Literal")
if isinstance(k, tuple):
return cls(k)
else:
return cls((k,))
|
import os
import six
from babelfish import Language
from subliminal.subtitle import Subtitle, fix_line_ending, get_subtitle_path
def test_subtitle_text():
subtitle = Subtitle(Language('eng'))
subtitle.content = b'Some ascii text'
assert subtitle.text == 'Some ascii text'
def test_subtitle_text_no_content():
subtitle = Subtitle(Language('eng'))
assert subtitle.text is None
def test_subtitle_is_valid_no_content():
subtitle = Subtitle(Language('fra'))
assert subtitle.is_valid() is False
def test_subtitle_is_valid_valid(monkeypatch):
subtitle = Subtitle(Language('fra'))
text = (u'1\n'
u'00:00:20,000 --> 00:00:24,400\n'
u'En réponse à l\'augmentation de la criminalité\n'
u'dans certains quartiers,\n')
monkeypatch.setattr(Subtitle, 'text', text)
assert subtitle.is_valid() is True
def test_subtitle_is_valid_invalid(monkeypatch):
subtitle = Subtitle(Language('fra'))
text = (u'1\n'
u'00:00:20,000 --> 00:00:24,400\n'
u'En réponse à l\'augmentation de la criminalité\n'
u'dans certains quartiers,\n\n')
text += u'This line shouldn\'t be here'
monkeypatch.setattr(Subtitle, 'text', text)
assert subtitle.is_valid() is False
def test_subtitle_is_valid_valid_begin(monkeypatch):
subtitle = Subtitle(Language('fra'))
text = (u'1\n'
u'00:00:20,000 --> 00:00:24,400\n'
u'En réponse à l\'augmentation de la criminalité\n'
u'dans certains quartiers,\n\n')*20
text += u'This line shouldn\'t be here'
monkeypatch.setattr(Subtitle, 'text', text)
assert subtitle.is_valid() is True
def test_get_subtitle_path(movies):
video = movies['man_of_steel']
assert get_subtitle_path(video.name, extension='.sub') == os.path.splitext(video.name)[0] + '.sub'
def test_get_subtitle_path_language(movies):
video = movies['man_of_steel']
assert get_subtitle_path(video.name, Language('por', 'BR')) == os.path.splitext(video.name)[0] + '.pt-BR.srt'
def test_get_subtitle_path_language_undefined(movies):
video = movies['man_of_steel']
assert get_subtitle_path(video.name, Language('und')) == os.path.splitext(video.name)[0] + '.srt'
def test_fix_line_ending():
content = b'Text\r\nwith\rweird\nline ending\r\ncharacters'
assert fix_line_ending(content) == b'Text\nwith\nweird\nline ending\ncharacters'
def test_subtitle_valid_encoding():
subtitle = Subtitle(Language('deu'), False, None, 'windows-1252')
assert subtitle.encoding == 'cp1252'
def test_subtitle_empty_encoding():
subtitle = Subtitle(Language('deu'), False, None, None)
assert subtitle.encoding is None
def test_subtitle_invalid_encoding():
subtitle = Subtitle(Language('deu'), False, None, 'rubbish')
assert subtitle.encoding is None
def test_subtitle_guess_encoding_utf8():
subtitle = Subtitle(Language('zho'), False, None, None)
subtitle.content = b'Something here'
assert subtitle.guess_encoding() == 'utf-8'
assert isinstance(subtitle.text, six.text_type)
# regression for #921
def test_subtitle_text_guess_encoding_none():
content = b'\x00d\x00\x80\x00\x00\xff\xff\xff\xff\xff\xff,\x00\x00\x00\x00d\x00d\x00\x00\x02s\x84\x8f\xa9'
subtitle = Subtitle(Language('zho'), False, None, None)
subtitle.content = content
assert subtitle.guess_encoding() is None
assert not subtitle.is_valid()
assert not isinstance(subtitle.text, six.text_type)
|
from itertools import combinations
import numpy as np
from pyparsing import alphas, Combine, Literal, Optional, nums, Word
from pgmpy.models import BayesianModel, MarkovModel
from pgmpy.factors.discrete import TabularCPD, DiscreteFactor
class UAIReader(object):
"""
Class for reading UAI file format from files or strings.
"""
def __init__(self, path=None, string=None):
"""
Initialize an instance of UAI reader class
Parameters
----------
path : file or str
Path of the file containing UAI information.
string : str
String containing UAI information.
Example
-------
>>> reader = UAIReader('TestUai.uai')
Reference
---------
http://graphmod.ics.uci.edu/uai08/FileFormat
"""
if path:
data = open(path)
self.network = data.read()
elif string:
self.network = string
else:
raise ValueError("Must specify either path or string.")
self.grammar = self.get_grammar()
self.network_type = self.get_network_type()
self.variables = self.get_variables()
self.domain = self.get_domain()
self.edges = self.get_edges()
self.tables = self.get_tables()
def get_grammar(self):
"""
Returns the grammar of the UAI file.
"""
network_name = Word(alphas).setResultsName("network_name")
no_variables = Word(nums).setResultsName("no_variables")
grammar = network_name + no_variables
self.no_variables = int(grammar.parseString(self.network)["no_variables"])
domain_variables = (Word(nums) * self.no_variables).setResultsName(
"domain_variables"
)
grammar += domain_variables
no_functions = Word(nums).setResultsName("no_functions")
grammar += no_functions
self.no_functions = int(grammar.parseString(self.network)["no_functions"])
integer = Word(nums).setParseAction(lambda t: int(t[0]))
for function in range(0, self.no_functions):
scope_grammar = Word(nums).setResultsName("fun_scope_" + str(function))
grammar += scope_grammar
function_scope = grammar.parseString(self.network)[
"fun_scope_" + str(function)
]
function_grammar = ((integer) * int(function_scope)).setResultsName(
"fun_" + str(function)
)
grammar += function_grammar
floatnumber = Combine(
Word(nums) + Optional(Literal(".") + Optional(Word(nums)))
)
for function in range(0, self.no_functions):
no_values_grammar = Word(nums).setResultsName(
"fun_no_values_" + str(function)
)
grammar += no_values_grammar
no_values = grammar.parseString(self.network)[
"fun_no_values_" + str(function)
]
values_grammar = ((floatnumber) * int(no_values)).setResultsName(
"fun_values_" + str(function)
)
grammar += values_grammar
return grammar
def get_network_type(self):
"""
Returns the type of network defined by the file.
Returns
-------
string : str
String containing network type.
Example
-------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_network_type()
'MARKOV'
"""
network_type = self.grammar.parseString(self.network)
return network_type["network_name"]
def get_variables(self):
"""
Returns a list of variables.
Each variable is represented by an index of list.
For example if the no of variables are 4 then the list will be
[var_0, var_1, var_2, var_3]
Returns
-------
list: list of variables
Example
-------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_variables()
['var_0', 'var_1', 'var_2']
"""
variables = []
for var in range(0, self.no_variables):
var_name = "var_" + str(var)
variables.append(var_name)
return variables
def get_domain(self):
"""
Returns the dictionary of variables with keys as variable name
and values as domain of the variables.
Returns
-------
dict: dictionary containing variables and their domains
Example
-------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_domain()
{'var_0': '2', 'var_1': '2', 'var_2': '3'}
"""
domain = {}
var_domain = self.grammar.parseString(self.network)["domain_variables"]
for var in range(0, len(var_domain)):
domain["var_" + str(var)] = var_domain[var]
return domain
def get_edges(self):
"""
Returns the edges of the network.
Returns
-------
set: set containing the edges of the network
Example
-------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_edges()
{('var_0', 'var_1'), ('var_0', 'var_2'), ('var_1', 'var_2')}
"""
edges = []
for function in range(0, self.no_functions):
function_variables = self.grammar.parseString(self.network)[
"fun_" + str(function)
]
if isinstance(function_variables, int):
function_variables = [function_variables]
if self.network_type == "BAYES":
child_var = "var_" + str(function_variables[-1])
function_variables = function_variables[:-1]
for var in function_variables:
edges.append((child_var, "var_" + str(var)))
elif self.network_type == "MARKOV":
function_variables = ["var_" + str(var) for var in function_variables]
edges.extend(list(combinations(function_variables, 2)))
return set(edges)
def get_tables(self):
"""
Returns list of tuple of child variable and CPD in case of Bayesian
and list of tuple of scope of variables and values in case of Markov.
Returns
-------
list : list of tuples of child variable and values in Bayesian
list of tuples of scope of variables and values in case of Markov.
Example
-------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_tables()
[(['var_0', 'var_1'], ['4.000', '2.400', '1.000', '0.000']),
(['var_0', 'var_1', 'var_2'],
['2.2500', '3.2500', '3.7500', '0.0000', '0.0000', '10.0000',
'1.8750', '4.0000', '3.3330', '2.0000', '2.0000', '3.4000'])]
"""
tables = []
for function in range(0, self.no_functions):
function_variables = self.grammar.parseString(self.network)[
"fun_" + str(function)
]
if isinstance(function_variables, int):
function_variables = [function_variables]
if self.network_type == "BAYES":
child_var = "var_" + str(function_variables[-1])
values = self.grammar.parseString(self.network)[
"fun_values_" + str(function)
]
tables.append((child_var, list(values)))
elif self.network_type == "MARKOV":
function_variables = ["var_" + str(var) for var in function_variables]
values = self.grammar.parseString(self.network)[
"fun_values_" + str(function)
]
tables.append((function_variables, list(values)))
return tables
def get_model(self):
"""
Returns an instance of Bayesian Model or Markov Model.
Variables are in the pattern var_0, var_1, var_2 where var_0 is
0th index variable, var_1 is 1st index variable.
Return
------
model: an instance of Bayesian or Markov Model.
Examples
--------
>>> reader = UAIReader('TestUAI.uai')
>>> reader.get_model()
"""
if self.network_type == "BAYES":
model = BayesianModel()
model.add_nodes_from(self.variables)
model.add_edges_from(self.edges)
tabular_cpds = []
for cpd in self.tables:
child_var = cpd[0]
states = int(self.domain[child_var])
arr = list(map(float, cpd[1]))
values = np.array(arr)
values = values.reshape(states, values.size // states)
tabular_cpds.append(TabularCPD(child_var, states, values))
model.add_cpds(*tabular_cpds)
return model
elif self.network_type == "MARKOV":
model = MarkovModel(self.edges)
factors = []
for table in self.tables:
variables = table[0]
cardinality = [int(self.domain[var]) for var in variables]
value = list(map(float, table[1]))
factor = DiscreteFactor(
variables=variables, cardinality=cardinality, values=value
)
factors.append(factor)
model.add_factors(*factors)
return model
class UAIWriter(object):
"""
Class for writing models in UAI.
"""
def __init__(self, model):
"""
Initialize an instance of UAI writer class
Parameters
----------
model: A Bayesian or Markov model
The model to write
"""
if isinstance(model, BayesianModel):
self.network = "BAYES\n"
elif isinstance(model, MarkovModel):
self.network = "MARKOV\n"
else:
raise TypeError("Model must be an instance of Bayesian or Markov model.")
self.model = model
self.no_nodes = self.get_nodes()
self.domain = self.get_domain()
self.functions = self.get_functions()
self.tables = self.get_tables()
def __str__(self):
"""
Returns the UAI file as a string.
"""
self.network += self.no_nodes + "\n"
domain = sorted(self.domain.items(), key=lambda x: (x[1], x[0]))
self.network += " ".join([var[1] for var in domain]) + "\n"
self.network += str(len(self.functions)) + "\n"
for fun in self.functions:
self.network += str(len(fun)) + " "
self.network += " ".join(fun) + "\n"
self.network += "\n"
for table in self.tables:
self.network += str(len(table)) + "\n"
self.network += " ".join(table) + "\n"
return self.network[:-1]
def get_nodes(self):
"""
Adds variables to the network.
Example
-------
>>> writer = UAIWriter(model)
>>> writer.get_nodes()
"""
no_nodes = len(self.model.nodes())
return str(no_nodes)
def get_domain(self):
"""
Adds domain of each variable to the network.
Example
-------
>>> writer = UAIWriter(model)
>>> writer.get_domain()
"""
if isinstance(self.model, BayesianModel):
cpds = self.model.get_cpds()
cpds.sort(key=lambda x: x.variable)
domain = {}
for cpd in cpds:
domain[cpd.variable] = str(cpd.variable_card)
return domain
elif isinstance(self.model, MarkovModel):
factors = self.model.get_factors()
domain = {}
for factor in factors:
variables = factor.variables
for var in variables:
if var not in domain:
domain[var] = str(factor.get_cardinality([var])[var])
return domain
else:
raise TypeError("Model must be an instance of Markov or Bayesian model.")
def get_functions(self):
"""
Adds functions to the network.
Example
-------
>>> writer = UAIWriter(model)
>>> writer.get_functions()
"""
if isinstance(self.model, BayesianModel):
cpds = self.model.get_cpds()
cpds.sort(key=lambda x: x.variable)
variables = sorted(self.domain.items(), key=lambda x: (x[1], x[0]))
functions = []
for cpd in cpds:
child_var = cpd.variable
evidence = cpd.variables[:0:-1]
function = [
str(variables.index((var, self.domain[var]))) for var in evidence
]
function.append(
str(variables.index((child_var, self.domain[child_var])))
)
functions.append(function)
return functions
elif isinstance(self.model, MarkovModel):
factors = self.model.get_factors()
functions = []
variables = sorted(self.domain.items(), key=lambda x: (x[1], x[0]))
for factor in factors:
scope = factor.scope()
function = [
str(variables.index((var, self.domain[var]))) for var in scope
]
functions.append(function)
return functions
else:
raise TypeError("Model must be an instance of Markov or Bayesian model.")
def get_tables(self):
"""
Adds tables to the network.
Example
-------
>>> writer = UAIWriter(model)
>>> writer.get_tables()
"""
if isinstance(self.model, BayesianModel):
cpds = self.model.get_cpds()
cpds.sort(key=lambda x: x.variable)
tables = []
for cpd in cpds:
values = list(map(str, cpd.values.ravel()))
tables.append(values)
return tables
elif isinstance(self.model, MarkovModel):
factors = self.model.get_factors()
tables = []
for factor in factors:
values = list(map(str, factor.values.ravel()))
tables.append(values)
return tables
else:
raise TypeError("Model must be an instance of Markov or Bayesian model.")
def write_uai(self, filename):
"""
Write the xml data into the file.
Parameters
----------
filename: Name of the file.
Examples
-------
>>> writer = UAIWriter(model)
>>> writer.write_xmlbif(test_file)
"""
writer = self.__str__()
with open(filename, "w") as fout:
fout.write(writer)
|
import pytest
import warnings
def has_pyvista():
"""Check that pyvista is installed."""
try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
import pyvista # noqa: F401
return True
except ImportError:
return False
def has_mayavi():
"""Check that mayavi is installed."""
try:
with warnings.catch_warnings(record=True): # traits
from mayavi import mlab # noqa: F401
return True
except ImportError:
return False
def has_pyqt5():
"""Check if PyQt5 is installed."""
try:
import PyQt5 # noqa: F401
return True
except ImportError:
return False
def has_imageio_ffmpeg():
"""Check if imageio-ffmpeg is installed."""
try:
import imageio_ffmpeg # noqa: F401
return True
except ImportError:
return False
skips_if_not_mayavi = pytest.mark.skipif(
not has_mayavi(), reason='requires mayavi')
skips_if_not_pyvista = pytest.mark.skipif(
not has_pyvista(), reason='requires pyvista')
|
import datetime
import glob
import logging
import os
import time
from RestrictedPython import (
compile_restricted_exec,
limited_builtins,
safe_builtins,
utility_builtins,
)
from RestrictedPython.Eval import default_guarded_getitem
from RestrictedPython.Guards import (
full_write_guard,
guarded_iter_unpack_sequence,
guarded_unpack_sequence,
)
import voluptuous as vol
from homeassistant.const import SERVICE_RELOAD
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.service import async_set_service_schema
from homeassistant.loader import bind_hass
from homeassistant.util import sanitize_filename
import homeassistant.util.dt as dt_util
from homeassistant.util.yaml.loader import load_yaml
_LOGGER = logging.getLogger(__name__)
DOMAIN = "python_script"
FOLDER = "python_scripts"
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema(dict)}, extra=vol.ALLOW_EXTRA)
ALLOWED_HASS = {"bus", "services", "states"}
ALLOWED_EVENTBUS = {"fire"}
ALLOWED_STATEMACHINE = {
"entity_ids",
"all",
"get",
"is_state",
"is_state_attr",
"remove",
"set",
}
ALLOWED_SERVICEREGISTRY = {"services", "has_service", "call"}
ALLOWED_TIME = {
"sleep",
"strftime",
"strptime",
"gmtime",
"localtime",
"ctime",
"time",
"mktime",
}
ALLOWED_DATETIME = {"date", "time", "datetime", "timedelta", "tzinfo"}
ALLOWED_DT_UTIL = {
"utcnow",
"now",
"as_utc",
"as_timestamp",
"as_local",
"utc_from_timestamp",
"start_of_local_day",
"parse_datetime",
"parse_date",
"get_age",
}
class ScriptError(HomeAssistantError):
"""When a script error occurs."""
def setup(hass, config):
"""Initialize the Python script component."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
discover_scripts(hass)
def reload_scripts_handler(call):
"""Handle reload service calls."""
discover_scripts(hass)
hass.services.register(DOMAIN, SERVICE_RELOAD, reload_scripts_handler)
return True
def discover_scripts(hass):
"""Discover python scripts in folder."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
def python_script_service_handler(call):
"""Handle python script service calls."""
execute_script(hass, call.service, call.data)
existing = hass.services.services.get(DOMAIN, {}).keys()
for existing_service in existing:
if existing_service == SERVICE_RELOAD:
continue
hass.services.remove(DOMAIN, existing_service)
# Load user-provided service descriptions from python_scripts/services.yaml
services_yaml = os.path.join(path, "services.yaml")
if os.path.exists(services_yaml):
services_dict = load_yaml(services_yaml)
else:
services_dict = {}
for fil in glob.iglob(os.path.join(path, "*.py")):
name = os.path.splitext(os.path.basename(fil))[0]
hass.services.register(DOMAIN, name, python_script_service_handler)
service_desc = {
"description": services_dict.get(name, {}).get("description", ""),
"fields": services_dict.get(name, {}).get("fields", {}),
}
async_set_service_schema(hass, DOMAIN, name, service_desc)
@bind_hass
def execute_script(hass, name, data=None):
"""Execute a script."""
filename = f"{name}.py"
with open(hass.config.path(FOLDER, sanitize_filename(filename))) as fil:
source = fil.read()
execute(hass, filename, source, data)
@bind_hass
def execute(hass, filename, source, data=None):
"""Execute Python source."""
compiled = compile_restricted_exec(source, filename=filename)
if compiled.errors:
_LOGGER.error(
"Error loading script %s: %s", filename, ", ".join(compiled.errors)
)
return
if compiled.warnings:
_LOGGER.warning(
"Warning loading script %s: %s", filename, ", ".join(compiled.warnings)
)
def protected_getattr(obj, name, default=None):
"""Restricted method to get attributes."""
if name.startswith("async_"):
raise ScriptError("Not allowed to access async methods")
if (
obj is hass
and name not in ALLOWED_HASS
or obj is hass.bus
and name not in ALLOWED_EVENTBUS
or obj is hass.states
and name not in ALLOWED_STATEMACHINE
or obj is hass.services
and name not in ALLOWED_SERVICEREGISTRY
or obj is dt_util
and name not in ALLOWED_DT_UTIL
or obj is datetime
and name not in ALLOWED_DATETIME
or isinstance(obj, TimeWrapper)
and name not in ALLOWED_TIME
):
raise ScriptError(f"Not allowed to access {obj.__class__.__name__}.{name}")
return getattr(obj, name, default)
extra_builtins = {
"datetime": datetime,
"sorted": sorted,
"time": TimeWrapper(),
"dt_util": dt_util,
"min": min,
"max": max,
"sum": sum,
"any": any,
"all": all,
}
builtins = safe_builtins.copy()
builtins.update(utility_builtins)
builtins.update(limited_builtins)
builtins.update(extra_builtins)
logger = logging.getLogger(f"{__name__}.{filename}")
restricted_globals = {
"__builtins__": builtins,
"_print_": StubPrinter,
"_getattr_": protected_getattr,
"_write_": full_write_guard,
"_getiter_": iter,
"_getitem_": default_guarded_getitem,
"_iter_unpack_sequence_": guarded_iter_unpack_sequence,
"_unpack_sequence_": guarded_unpack_sequence,
"hass": hass,
"data": data or {},
"logger": logger,
}
try:
_LOGGER.info("Executing %s: %s", filename, data)
# pylint: disable=exec-used
exec(compiled.code, restricted_globals)
except ScriptError as err:
logger.error("Error executing script: %s", err)
except Exception as err: # pylint: disable=broad-except
logger.exception("Error executing script: %s", err)
class StubPrinter:
"""Class to handle printing inside scripts."""
def __init__(self, _getattr_):
"""Initialize our printer."""
def _call_print(self, *objects, **kwargs):
"""Print text."""
# pylint: disable=no-self-use
_LOGGER.warning("Don't use print() inside scripts. Use logger.info() instead")
class TimeWrapper:
"""Wrap the time module."""
# Class variable, only going to warn once per Home Assistant run
warned = False
# pylint: disable=no-self-use
def sleep(self, *args, **kwargs):
"""Sleep method that warns once."""
if not TimeWrapper.warned:
TimeWrapper.warned = True
_LOGGER.warning(
"Using time.sleep can reduce the performance of Home Assistant"
)
time.sleep(*args, **kwargs)
def __getattr__(self, attr):
"""Fetch an attribute from Time module."""
attribute = getattr(time, attr)
if callable(attribute):
def wrapper(*args, **kw):
"""Wrap to return callable method if callable."""
return attribute(*args, **kw)
return wrapper
return attribute
|
import datetime
import os
import random
import sys
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
sys.path = [os.path.abspath(os.path.dirname(os.path.dirname(__file__)))] + sys.path
os.environ['is_test_suite'] = 'True'
from auto_ml import Predictor
from auto_ml.utils_models import load_ml_model
from nose.tools import assert_equal, assert_not_equal, with_setup
from sklearn.metrics import accuracy_score
import dill
import numpy as np
import utils_testing as utils
def test_linear_model_analytics_classification(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, model_names='RidgeClassifier')
test_score = ml_predictor.score(df_titanic_test, df_titanic_test.survived)
print('test_score')
print(test_score)
assert -0.21 < test_score < -0.131
def test_input_df_unmodified():
np.random.seed(42)
df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
df_shape = df_boston_train.shape
ml_predictor.train(df_boston_train)
training_shape = df_boston_train.shape
assert training_shape[0] == df_shape[0]
assert training_shape[1] == df_shape[1]
test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV)
print('test_score')
print(test_score)
assert -3.35 < test_score < -2.8
def test_model_uses_user_provided_training_params(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
try:
ml_predictor.train(df_titanic_train, model_names='RidgeClassifier', training_params={'this_param_is_not_valid': True})
assert False
except ValueError as e:
assert True
def test_ignores_new_invalid_features():
# One of the great unintentional features of auto_ml is that you can pass in new features at prediction time, that weren't present at training time, and they're silently ignored!
# One edge case here is new features that are strange objects (lists, datetimes, intervals, or anything else that we can't process in our default data processing pipeline). Initially, we just ignored them in dict_vectorizer, but we need to ignore them earlier.
np.random.seed(0)
df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
ml_predictor.train(df_boston_train)
file_name = ml_predictor.save(str(random.random()))
saved_ml_pipeline = load_ml_model(file_name)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
df_boston_test_dictionaries = df_boston_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_boston_test_dictionaries:
if random.random() > 0.9:
row['totally_new_feature'] = datetime.datetime.now()
row['really_strange_feature'] = random.random
row['we_should_really_ignore_this'] = Predictor
row['pretty_vanilla_ignored_field'] = 8
row['potentially_confusing_things_here'] = float('nan')
row['potentially_confusing_things_again'] = float('inf')
row['this_is_a_list'] = [1,2,3,4,5]
predictions.append(saved_ml_pipeline.predict(row))
print('predictions')
print(predictions)
print('predictions[0]')
print(predictions[0])
print('type(predictions)')
print(type(predictions))
first_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -3.0
assert lower_bound < first_score < -2.7
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_boston_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_boston_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.1 < duration.total_seconds() / 1.0 < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_boston_test_dictionaries:
predictions.append(saved_ml_pipeline.predict(row))
second_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -2.7
|
import contextlib
import os
import posixpath
from radicale import pathutils
from radicale.log import logger
class StorageDiscoverMixin:
def discover(self, path, depth="0", child_context_manager=(
lambda path, href=None: contextlib.ExitStack())):
# Path should already be sanitized
sane_path = pathutils.strip_path(path)
attributes = sane_path.split("/") if sane_path else []
folder = self._get_collection_root_folder()
# Create the root collection
self._makedirs_synced(folder)
try:
filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
except ValueError as e:
# Path is unsafe
logger.debug("Unsafe path %r requested from storage: %s",
sane_path, e, exc_info=True)
return
# Check if the path exists and if it leads to a collection or an item
if not os.path.isdir(filesystem_path):
if attributes and os.path.isfile(filesystem_path):
href = attributes.pop()
else:
return
else:
href = None
sane_path = "/".join(attributes)
collection = self._collection_class(
self, pathutils.unstrip_path(sane_path, True))
if href:
yield collection._get(href)
return
yield collection
if depth == "0":
return
for href in collection._list():
with child_context_manager(sane_path, href):
yield collection._get(href)
for entry in os.scandir(filesystem_path):
if not entry.is_dir():
continue
href = entry.name
if not pathutils.is_safe_filesystem_path_component(href):
if not href.startswith(".Radicale"):
logger.debug("Skipping collection %r in %r",
href, sane_path)
continue
sane_child_path = posixpath.join(sane_path, href)
child_path = pathutils.unstrip_path(sane_child_path, True)
with child_context_manager(sane_child_path):
yield self._collection_class(self, child_path)
|
import mock
import paasta_tools.paastaapi.models as paastamodels
from paasta_tools.autoscaling.pause_service_autoscaler import (
delete_service_autoscale_pause_time,
)
from paasta_tools.autoscaling.pause_service_autoscaler import (
get_service_autoscale_pause_time,
)
from paasta_tools.autoscaling.pause_service_autoscaler import (
update_service_autoscale_pause_time,
)
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.client", autospec=True)
def test_get_service_autoscale_pause_time_error(mock_client):
mock_client.get_paasta_oapi_client.return_value = None
return_code = get_service_autoscale_pause_time("cluster1")
assert return_code == 1
mock_client.get_paasta_oapi_client.assert_called_with(
cluster="cluster1", http_res=True
)
mock_api = mock.Mock()
mock_client.get_paasta_oapi_client.return_value = mock.Mock(default=mock_api)
mock_api.get_service_autoscaler_pause.return_value = (
None,
500,
None,
)
return_code = get_service_autoscale_pause_time("cluster1")
assert return_code == 2
@mock.patch("builtins.print", autospec=True)
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.time", autospec=True)
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.client", autospec=True)
def test_get_service_autoscale_pause_time_not(mock_client, mock_time, mock_print):
mock_api = mock.Mock()
mock_client.get_paasta_oapi_client.return_value = mock.Mock(default=mock_api)
mock_api.get_service_autoscaler_pause.return_value = ("3", 200, None)
mock_time.time.return_value = 4
return_code = get_service_autoscale_pause_time("cluster1")
mock_print.assert_called_with("Service autoscaler is not paused")
assert return_code == 0
@mock.patch(
"paasta_tools.autoscaling.pause_service_autoscaler.print_paused_message",
autospec=True,
)
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.time", autospec=True)
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.client", autospec=True)
def test_get_service_autoscale_pause_time_paused(
mock_client, mock_time, mock_print_paused_message
):
mock_api = mock.Mock()
mock_client.get_paasta_oapi_client.return_value = mock.Mock(default=mock_api)
mock_api.get_service_autoscaler_pause.return_value = ("3", 200, None)
mock_time.time.return_value = 2
return_code = get_service_autoscale_pause_time("cluster1")
mock_print_paused_message.assert_called_with(3.0)
assert return_code == 0
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.client", autospec=True)
def test_update_service_autoscale_pause_time(mock_client):
mock_client.get_paasta_oapi_client.return_value = None
return_code = update_service_autoscale_pause_time("cluster1", "2")
assert return_code == 1
mock_client.get_paasta_oapi_client.assert_called_with(
cluster="cluster1", http_res=True
)
mock_api = mock.Mock()
mock_client.get_paasta_oapi_client.return_value = mock.Mock(default=mock_api)
mock_api.update_service_autoscaler_pause = mock_update = mock.Mock()
mock_update.return_value = (None, 500, None)
return_code = update_service_autoscale_pause_time("cluster1", "3")
mock_update.assert_called_once_with(
paastamodels.InlineObject(minutes=3), _return_http_data_only=False
)
assert return_code == 2
mock_update.return_value = (None, 200, None)
return_code = update_service_autoscale_pause_time("cluster1", "2")
assert return_code == 0
@mock.patch("paasta_tools.autoscaling.pause_service_autoscaler.client", autospec=True)
@mock.patch("paasta_tools.paastaapi.apis.DefaultApi", autospec=True)
def test_delete_service_autoscale_pause_time(mock_default_api, mock_client):
mock_client.get_paasta_oapi_client.return_value = None
return_code = delete_service_autoscale_pause_time("cluster1")
assert return_code == 1
mock_client.get_paasta_oapi_client.assert_called_with(
cluster="cluster1", http_res=True
)
mock_api = mock.Mock()
mock_client.get_paasta_oapi_client.return_value = mock.Mock(default=mock_api)
mock_api.delete_service_autoscaler_pause = mock_delete = mock.Mock()
mock_delete.return_value = (None, 500, None)
return_code = delete_service_autoscale_pause_time("cluster1")
mock_delete.assert_called_once_with(_return_http_data_only=False)
assert return_code == 2
mock_delete.return_value = (None, 200, None)
return_code = delete_service_autoscale_pause_time("cluster1")
assert return_code == 0
|
from PyQt5.QtCore import pyqtSignal, QUrl
from PyQt5.QtGui import QPalette
from PyQt5.QtWebEngineWidgets import QWebEngineView, QWebEnginePage
from qutebrowser.browser import shared
from qutebrowser.browser.webengine import webenginesettings, certificateerror
from qutebrowser.config import config
from qutebrowser.utils import log, debug, usertypes
class WebEngineView(QWebEngineView):
"""Custom QWebEngineView subclass with qutebrowser-specific features."""
def __init__(self, *, tabdata, win_id, private, parent=None):
super().__init__(parent)
self._win_id = win_id
self._tabdata = tabdata
theme_color = self.style().standardPalette().color(QPalette.Base)
if private:
assert webenginesettings.private_profile is not None
profile = webenginesettings.private_profile
assert profile.isOffTheRecord()
else:
profile = webenginesettings.default_profile
page = WebEnginePage(theme_color=theme_color, profile=profile,
parent=self)
self.setPage(page)
def render_widget(self):
"""Get the RenderWidgetHostViewQt for this view."""
return self.focusProxy()
def shutdown(self):
self.page().shutdown()
def createWindow(self, wintype):
"""Called by Qt when a page wants to create a new window.
This function is called from the createWindow() method of the
associated QWebEnginePage, each time the page wants to create a new
window of the given type. This might be the result, for example, of a
JavaScript request to open a document in a new window.
Args:
wintype: This enum describes the types of window that can be
created by the createWindow() function.
QWebEnginePage::WebBrowserWindow:
A complete web browser window.
QWebEnginePage::WebBrowserTab:
A web browser tab.
QWebEnginePage::WebDialog:
A window without decoration.
QWebEnginePage::WebBrowserBackgroundTab:
A web browser tab without hiding the current visible
WebEngineView.
Return:
The new QWebEngineView object.
"""
debug_type = debug.qenum_key(QWebEnginePage, wintype)
background = config.val.tabs.background
log.webview.debug("createWindow with type {}, background {}".format(
debug_type, background))
if wintype == QWebEnginePage.WebBrowserWindow:
# Shift-Alt-Click
target = usertypes.ClickTarget.window
elif wintype == QWebEnginePage.WebDialog:
log.webview.warning("{} requested, but we don't support "
"that!".format(debug_type))
target = usertypes.ClickTarget.tab
elif wintype == QWebEnginePage.WebBrowserTab:
# Middle-click / Ctrl-Click with Shift
# FIXME:qtwebengine this also affects target=_blank links...
if background:
target = usertypes.ClickTarget.tab
else:
target = usertypes.ClickTarget.tab_bg
elif wintype == QWebEnginePage.WebBrowserBackgroundTab:
# Middle-click / Ctrl-Click
if background:
target = usertypes.ClickTarget.tab_bg
else:
target = usertypes.ClickTarget.tab
else:
raise ValueError("Invalid wintype {}".format(debug_type))
tab = shared.get_tab(self._win_id, target)
return tab._widget # pylint: disable=protected-access
def contextMenuEvent(self, ev):
"""Prevent context menus when rocker gestures are enabled."""
if config.val.input.mouse.rocker_gestures:
ev.ignore()
return
super().contextMenuEvent(ev)
class WebEnginePage(QWebEnginePage):
"""Custom QWebEnginePage subclass with qutebrowser-specific features.
Attributes:
_is_shutting_down: Whether the page is currently shutting down.
_theme_color: The theme background color.
Signals:
certificate_error: Emitted on certificate errors.
Needs to be directly connected to a slot setting the
'ignore' attribute.
shutting_down: Emitted when the page is shutting down.
navigation_request: Emitted on acceptNavigationRequest.
"""
certificate_error = pyqtSignal(certificateerror.CertificateErrorWrapper)
shutting_down = pyqtSignal()
navigation_request = pyqtSignal(usertypes.NavigationRequest)
def __init__(self, *, theme_color, profile, parent=None):
super().__init__(profile, parent)
self._is_shutting_down = False
self._theme_color = theme_color
self._set_bg_color()
config.instance.changed.connect(self._set_bg_color)
@config.change_filter('colors.webpage.bg')
def _set_bg_color(self):
col = config.val.colors.webpage.bg
if col is None:
col = self._theme_color
self.setBackgroundColor(col)
def shutdown(self):
self._is_shutting_down = True
self.shutting_down.emit()
def certificateError(self, error):
"""Handle certificate errors coming from Qt."""
error = certificateerror.CertificateErrorWrapper(error)
self.certificate_error.emit(error)
return error.ignore
def javaScriptConfirm(self, url, js_msg):
"""Override javaScriptConfirm to use qutebrowser prompts."""
if self._is_shutting_down:
return False
try:
return shared.javascript_confirm(
url, js_msg, abort_on=[self.loadStarted, self.shutting_down])
except shared.CallSuper:
return super().javaScriptConfirm(url, js_msg)
def javaScriptPrompt(self, url, js_msg, default):
"""Override javaScriptPrompt to use qutebrowser prompts."""
if self._is_shutting_down:
return (False, "")
try:
return shared.javascript_prompt(
url, js_msg, default, abort_on=[self.loadStarted, self.shutting_down])
except shared.CallSuper:
return super().javaScriptPrompt(url, js_msg, default)
def javaScriptAlert(self, url, js_msg):
"""Override javaScriptAlert to use qutebrowser prompts."""
if self._is_shutting_down:
return
try:
shared.javascript_alert(
url, js_msg, abort_on=[self.loadStarted, self.shutting_down])
except shared.CallSuper:
super().javaScriptAlert(url, js_msg)
def javaScriptConsoleMessage(self, level, msg, line, source):
"""Log javascript messages to qutebrowser's log."""
level_map = {
QWebEnginePage.InfoMessageLevel: usertypes.JsLogLevel.info,
QWebEnginePage.WarningMessageLevel: usertypes.JsLogLevel.warning,
QWebEnginePage.ErrorMessageLevel: usertypes.JsLogLevel.error,
}
shared.javascript_log_message(level_map[level], source, line, msg)
def acceptNavigationRequest(self,
url: QUrl,
typ: QWebEnginePage.NavigationType,
is_main_frame: bool) -> bool:
"""Override acceptNavigationRequest to forward it to the tab API."""
type_map = {
QWebEnginePage.NavigationTypeLinkClicked:
usertypes.NavigationRequest.Type.link_clicked,
QWebEnginePage.NavigationTypeTyped:
usertypes.NavigationRequest.Type.typed,
QWebEnginePage.NavigationTypeFormSubmitted:
usertypes.NavigationRequest.Type.form_submitted,
QWebEnginePage.NavigationTypeBackForward:
usertypes.NavigationRequest.Type.back_forward,
QWebEnginePage.NavigationTypeReload:
usertypes.NavigationRequest.Type.reloaded,
QWebEnginePage.NavigationTypeOther:
usertypes.NavigationRequest.Type.other,
}
try:
type_map[QWebEnginePage.NavigationTypeRedirect] = (
usertypes.NavigationRequest.Type.redirect)
except AttributeError:
# Added in Qt 5.14
pass
navigation = usertypes.NavigationRequest(
url=url,
navigation_type=type_map.get(
typ, usertypes.NavigationRequest.Type.other),
is_main_frame=is_main_frame)
self.navigation_request.emit(navigation)
return navigation.accepted
|
import logging
import pyversasense as pyv
import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from .const import (
KEY_CONSUMER,
KEY_IDENTIFIER,
KEY_MEASUREMENT,
KEY_PARENT_MAC,
KEY_PARENT_NAME,
KEY_UNIT,
PERIPHERAL_CLASS_SENSOR,
PERIPHERAL_CLASS_SENSOR_ACTUATOR,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "versasense"
# Validation of the user's configuration
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass, config):
"""Set up the versasense component."""
session = aiohttp_client.async_get_clientsession(hass)
consumer = pyv.Consumer(config[DOMAIN]["host"], session)
hass.data[DOMAIN] = {KEY_CONSUMER: consumer}
await _configure_entities(hass, config, consumer)
# Return boolean to indicate that initialization was successful.
return True
async def _configure_entities(hass, config, consumer):
"""Fetch all devices with their peripherals for representation."""
devices = await consumer.fetchDevices()
_LOGGER.debug(devices)
sensor_info_list = []
switch_info_list = []
for mac, device in devices.items():
_LOGGER.info("Device connected: %s %s", device.name, mac)
hass.data[DOMAIN][mac] = {}
for peripheral_id, peripheral in device.peripherals.items():
hass.data[DOMAIN][mac][peripheral_id] = peripheral
if peripheral.classification == PERIPHERAL_CLASS_SENSOR:
sensor_info_list = _add_entity_info_to_list(
peripheral, device, sensor_info_list
)
elif peripheral.classification == PERIPHERAL_CLASS_SENSOR_ACTUATOR:
switch_info_list = _add_entity_info_to_list(
peripheral, device, switch_info_list
)
if sensor_info_list:
_load_platform(hass, config, "sensor", sensor_info_list)
if switch_info_list:
_load_platform(hass, config, "switch", switch_info_list)
def _add_entity_info_to_list(peripheral, device, entity_info_list):
"""Add info from a peripheral to specified list."""
for measurement in peripheral.measurements:
entity_info = {
KEY_IDENTIFIER: peripheral.identifier,
KEY_UNIT: measurement.unit,
KEY_MEASUREMENT: measurement.name,
KEY_PARENT_NAME: device.name,
KEY_PARENT_MAC: device.mac,
}
entity_info_list.append(entity_info)
return entity_info_list
def _load_platform(hass, config, entity_type, entity_info_list):
"""Load platform with list of entity info."""
hass.async_create_task(
async_load_platform(hass, entity_type, DOMAIN, entity_info_list, config)
)
|
from collections import Counter
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class SpacyEntities(FeatsFromSpacyDoc):
def __init__(self,
use_lemmas=False,
entity_types_to_censor=set(),
entity_types_to_use=None,
tag_types_to_censor=set(),
strip_final_period=False):
self._entity_types_to_use = entity_types_to_use
FeatsFromSpacyDoc.__init__(self, use_lemmas, entity_types_to_censor,
tag_types_to_censor, strip_final_period)
def get_feats(self, doc):
return Counter([
' '.join(str(ent).split()).lower()
for ent
in doc.ents
if ((self._entity_types_to_use is None
or ent.label_ in self._entity_types_to_use)
and (ent.label_ not in self._entity_types_to_censor))
])
|
import asyncio
import aiohue
from homeassistant.components.hue.hue_event import CONF_HUE_EVENT
from .conftest import create_mock_bridge, setup_bridge_for_sensors as setup_bridge
from tests.async_mock import Mock
PRESENCE_SENSOR_1_PRESENT = {
"state": {"presence": True, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Living room sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_1 = {
"state": {
"lightlevel": 1,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 1",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_1 = {
"state": {"temperature": 1775, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 1",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0402",
"capabilities": {"certified": True},
}
PRESENCE_SENSOR_2_NOT_PRESENT = {
"state": {"presence": False, "lastupdated": "2019-01-01T00:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Kitchen sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_2 = {
"state": {
"lightlevel": 10001,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 2",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_2 = {
"state": {"temperature": 1875, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 2",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0402",
"capabilities": {"certified": True},
}
PRESENCE_SENSOR_3_PRESENT = {
"state": {"presence": True, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Bedroom sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_3 = {
"state": {
"lightlevel": 1,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 3",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_3 = {
"state": {"temperature": 1775, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 3",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0402",
"capabilities": {"certified": True},
}
UNSUPPORTED_SENSOR = {
"state": {"status": 0, "lastupdated": "2019-01-01T01:00:00"},
"config": {"on": True, "reachable": True},
"name": "Unsupported sensor",
"type": "CLIPGenericStatus",
"modelid": "PHWA01",
"manufacturername": "Philips",
"swversion": "1.0",
"uniqueid": "arbitrary",
"recycle": True,
}
HUE_TAP_REMOTE_1 = {
"state": {"buttonevent": 17, "lastupdated": "2019-06-22T14:43:50"},
"swupdate": {"state": "notupdatable", "lastinstall": None},
"config": {"on": True},
"name": "Hue Tap",
"type": "ZGPSwitch",
"modelid": "ZGPSWITCH",
"manufacturername": "Philips",
"productname": "Hue tap switch",
"diversityid": "d8cde5d5-0eef-4b95-b0f0-71ddd2952af4",
"uniqueid": "00:00:00:00:00:44:23:08-f2",
"capabilities": {"certified": True, "primary": True, "inputs": []},
}
HUE_DIMMER_REMOTE_1 = {
"state": {"buttonevent": 4002, "lastupdated": "2019-12-28T21:58:02"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-10-13T13:16:15"},
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
"name": "Hue dimmer switch 1",
"type": "ZLLSwitch",
"modelid": "RWL021",
"manufacturername": "Philips",
"productname": "Hue dimmer switch",
"diversityid": "73bbabea-3420-499a-9856-46bf437e119b",
"swversion": "6.1.1.28573",
"uniqueid": "00:17:88:01:10:3e:3a:dc-02-fc00",
"capabilities": {"certified": True, "primary": True, "inputs": []},
}
SENSOR_RESPONSE = {
"1": PRESENCE_SENSOR_1_PRESENT,
"2": LIGHT_LEVEL_SENSOR_1,
"3": TEMPERATURE_SENSOR_1,
"4": PRESENCE_SENSOR_2_NOT_PRESENT,
"5": LIGHT_LEVEL_SENSOR_2,
"6": TEMPERATURE_SENSOR_2,
"7": HUE_TAP_REMOTE_1,
"8": HUE_DIMMER_REMOTE_1,
}
async def test_no_sensors(hass, mock_bridge):
"""Test the update_items function when no sensors are found."""
mock_bridge.allow_groups = True
mock_bridge.mock_sensor_responses.append({})
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
assert len(hass.states.async_all()) == 0
async def test_sensors_with_multiple_bridges(hass, mock_bridge):
"""Test the update_items function with some sensors."""
mock_bridge_2 = create_mock_bridge(hass)
mock_bridge_2.mock_sensor_responses.append(
{
"1": PRESENCE_SENSOR_3_PRESENT,
"2": LIGHT_LEVEL_SENSOR_3,
"3": TEMPERATURE_SENSOR_3,
}
)
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_bridge(hass, mock_bridge)
await setup_bridge(hass, mock_bridge_2, hostname="mock-bridge-2")
assert len(mock_bridge.mock_requests) == 1
assert len(mock_bridge_2.mock_requests) == 1
# 3 "physical" sensors with 3 virtual sensors each + 1 battery sensor
assert len(hass.states.async_all()) == 10
async def test_sensors(hass, mock_bridge):
"""Test the update_items function with some sensors."""
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
# 2 "physical" sensors with 3 virtual sensors each
assert len(hass.states.async_all()) == 7
presence_sensor_1 = hass.states.get("binary_sensor.living_room_sensor_motion")
light_level_sensor_1 = hass.states.get("sensor.living_room_sensor_light_level")
temperature_sensor_1 = hass.states.get("sensor.living_room_sensor_temperature")
assert presence_sensor_1 is not None
assert presence_sensor_1.state == "on"
assert light_level_sensor_1 is not None
assert light_level_sensor_1.state == "1.0"
assert light_level_sensor_1.name == "Living room sensor light level"
assert temperature_sensor_1 is not None
assert temperature_sensor_1.state == "17.75"
assert temperature_sensor_1.name == "Living room sensor temperature"
presence_sensor_2 = hass.states.get("binary_sensor.kitchen_sensor_motion")
light_level_sensor_2 = hass.states.get("sensor.kitchen_sensor_light_level")
temperature_sensor_2 = hass.states.get("sensor.kitchen_sensor_temperature")
assert presence_sensor_2 is not None
assert presence_sensor_2.state == "off"
assert light_level_sensor_2 is not None
assert light_level_sensor_2.state == "10.0"
assert light_level_sensor_2.name == "Kitchen sensor light level"
assert temperature_sensor_2 is not None
assert temperature_sensor_2.state == "18.75"
assert temperature_sensor_2.name == "Kitchen sensor temperature"
battery_remote_1 = hass.states.get("sensor.hue_dimmer_switch_1_battery_level")
assert battery_remote_1 is not None
assert battery_remote_1.state == "100"
assert battery_remote_1.name == "Hue dimmer switch 1 battery level"
async def test_unsupported_sensors(hass, mock_bridge):
"""Test that unsupported sensors don't get added and don't fail."""
response_with_unsupported = dict(SENSOR_RESPONSE)
response_with_unsupported["7"] = UNSUPPORTED_SENSOR
mock_bridge.mock_sensor_responses.append(response_with_unsupported)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
# 2 "physical" sensors with 3 virtual sensors each + 1 battery sensor
assert len(hass.states.async_all()) == 7
async def test_new_sensor_discovered(hass, mock_bridge):
"""Test if 2nd update has a new sensor."""
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
assert len(hass.states.async_all()) == 7
new_sensor_response = dict(SENSOR_RESPONSE)
new_sensor_response.update(
{
"9": PRESENCE_SENSOR_3_PRESENT,
"10": LIGHT_LEVEL_SENSOR_3,
"11": TEMPERATURE_SENSOR_3,
}
)
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 2
assert len(hass.states.async_all()) == 10
presence = hass.states.get("binary_sensor.bedroom_sensor_motion")
assert presence is not None
assert presence.state == "on"
temperature = hass.states.get("sensor.bedroom_sensor_temperature")
assert temperature is not None
assert temperature.state == "17.75"
async def test_sensor_removed(hass, mock_bridge):
"""Test if 2nd update has removed sensor."""
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
assert len(hass.states.async_all()) == 7
mock_bridge.mock_sensor_responses.clear()
keys = ("1", "2", "3")
mock_bridge.mock_sensor_responses.append({k: SENSOR_RESPONSE[k] for k in keys})
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
# To flush out the service call to update the group
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 2
assert len(hass.states.async_all()) == 3
sensor = hass.states.get("binary_sensor.living_room_sensor_motion")
assert sensor is not None
removed_sensor = hass.states.get("binary_sensor.kitchen_sensor_motion")
assert removed_sensor is None
async def test_update_timeout(hass, mock_bridge):
"""Test bridge marked as not available if timeout error during update."""
mock_bridge.api.sensors.update = Mock(side_effect=asyncio.TimeoutError)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 0
assert len(hass.states.async_all()) == 0
async def test_update_unauthorized(hass, mock_bridge):
"""Test bridge marked as not authorized if unauthorized during update."""
mock_bridge.api.sensors.update = Mock(side_effect=aiohue.Unauthorized)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 0
assert len(hass.states.async_all()) == 0
assert len(mock_bridge.handle_unauthorized_error.mock_calls) == 1
async def test_hue_events(hass, mock_bridge):
"""Test that hue remotes fire events when pressed."""
mock_bridge.mock_sensor_responses.append(SENSOR_RESPONSE)
mock_listener = Mock()
unsub = hass.bus.async_listen(CONF_HUE_EVENT, mock_listener)
await setup_bridge(hass, mock_bridge)
assert len(mock_bridge.mock_requests) == 1
assert len(hass.states.async_all()) == 7
assert len(mock_listener.mock_calls) == 0
new_sensor_response = dict(SENSOR_RESPONSE)
new_sensor_response["7"]["state"] = {
"buttonevent": 18,
"lastupdated": "2019-12-28T22:58:02",
}
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 2
assert len(hass.states.async_all()) == 7
assert len(mock_listener.mock_calls) == 1
assert mock_listener.mock_calls[0][1][0].data == {
"id": "hue_tap",
"unique_id": "00:00:00:00:00:44:23:08-f2",
"event": 18,
"last_updated": "2019-12-28T22:58:02",
}
new_sensor_response = dict(new_sensor_response)
new_sensor_response["8"]["state"] = {
"buttonevent": 3002,
"lastupdated": "2019-12-28T22:58:01",
}
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 3
assert len(hass.states.async_all()) == 7
assert len(mock_listener.mock_calls) == 2
assert mock_listener.mock_calls[1][1][0].data == {
"id": "hue_dimmer_switch_1",
"unique_id": "00:17:88:01:10:3e:3a:dc-02-fc00",
"event": 3002,
"last_updated": "2019-12-28T22:58:01",
}
# Add a new remote. In discovery the new event is registered **but not fired**
new_sensor_response = dict(new_sensor_response)
new_sensor_response["21"] = {
"state": {
"rotaryevent": 2,
"expectedrotation": 208,
"expectedeventduration": 400,
"lastupdated": "2020-01-31T15:56:19",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-11-26T03:35:21"},
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
"name": "Lutron Aurora 1",
"type": "ZLLRelativeRotary",
"modelid": "Z3-1BRL",
"manufacturername": "Lutron",
"productname": "Lutron Aurora",
"diversityid": "2c3a75ff-55c4-4e4d-8c44-82d330b8eb9b",
"swversion": "3.4",
"uniqueid": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
"capabilities": {
"certified": True,
"primary": True,
"inputs": [
{
"repeatintervals": [400],
"events": [
{"rotaryevent": 1, "eventtype": "start"},
{"rotaryevent": 2, "eventtype": "repeat"},
],
}
],
},
}
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 4
assert len(hass.states.async_all()) == 8
assert len(mock_listener.mock_calls) == 2
# A new press fires the event
new_sensor_response["21"]["state"]["lastupdated"] = "2020-01-31T15:57:19"
mock_bridge.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge.mock_requests) == 5
assert len(hass.states.async_all()) == 8
assert len(mock_listener.mock_calls) == 3
assert mock_listener.mock_calls[2][1][0].data == {
"id": "lutron_aurora_1",
"unique_id": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
"event": 2,
"last_updated": "2020-01-31T15:57:19",
}
unsub()
|
from datetime import timedelta
import logging
import speedtest
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_STARTED,
)
from homeassistant.core import CoreState, callback
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
CONF_MANUAL,
CONF_SERVER_ID,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SERVER,
DOMAIN,
SENSOR_TYPES,
SPEED_TEST_SERVICE,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SERVER_ID): cv.positive_int,
vol.Optional(
CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL)
): cv.positive_time_period,
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)
): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def server_id_valid(server_id):
"""Check if server_id is valid."""
try:
api = speedtest.Speedtest()
api.get_servers([int(server_id)])
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers):
return False
return True
async def async_setup(hass, config):
"""Import integration from config."""
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Speedtest.net component."""
coordinator = SpeedTestDataCoordinator(hass, config_entry)
await coordinator.async_setup()
async def _enable_scheduled_speedtests(*_):
"""Activate the data update coordinator."""
coordinator.update_interval = timedelta(
minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
)
await coordinator.async_refresh()
if not config_entry.options[CONF_MANUAL]:
if hass.state == CoreState.running:
await _enable_scheduled_speedtests()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
else:
# Running a speed test during startup can prevent
# integrations from being able to setup because it
# can saturate the network interface.
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests
)
hass.data[DOMAIN] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload SpeedTest Entry from config_entry."""
hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE)
hass.data[DOMAIN].async_unload()
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
hass.data.pop(DOMAIN)
return True
class SpeedTestDataCoordinator(DataUpdateCoordinator):
"""Get the latest data from speedtest.net."""
def __init__(self, hass, config_entry):
"""Initialize the data object."""
self.hass = hass
self.config_entry = config_entry
self.api = None
self.servers = {}
self._unsub_update_listener = None
super().__init__(
self.hass,
_LOGGER,
name=DOMAIN,
update_method=self.async_update,
)
def update_servers(self):
"""Update list of test servers."""
try:
server_list = self.api.get_servers()
except speedtest.ConfigRetrievalError:
_LOGGER.debug("Error retrieving server list")
return
self.servers[DEFAULT_SERVER] = {}
for server in sorted(
server_list.values(),
key=lambda server: server[0]["country"] + server[0]["sponsor"],
):
self.servers[
f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}"
] = server[0]
def update_data(self):
"""Get the latest data from speedtest.net."""
self.update_servers()
self.api.closest.clear()
if self.config_entry.options.get(CONF_SERVER_ID):
server_id = self.config_entry.options.get(CONF_SERVER_ID)
self.api.get_servers(servers=[server_id])
self.api.get_best_server()
_LOGGER.debug(
"Executing speedtest.net speed test with server_id: %s", self.api.best["id"]
)
self.api.download()
self.api.upload()
return self.api.results.dict()
async def async_update(self, *_):
"""Update Speedtest data."""
try:
return await self.hass.async_add_executor_job(self.update_data)
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err:
raise UpdateFailed from err
async def async_set_options(self):
"""Set options for entry."""
if not self.config_entry.options:
data = {**self.config_entry.data}
options = {
CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL),
CONF_MANUAL: data.pop(CONF_MANUAL, False),
CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")),
}
self.hass.config_entries.async_update_entry(
self.config_entry, data=data, options=options
)
async def async_setup(self):
"""Set up SpeedTest."""
try:
self.api = await self.hass.async_add_executor_job(speedtest.Speedtest)
except speedtest.ConfigRetrievalError as err:
raise ConfigEntryNotReady from err
async def request_update(call):
"""Request update."""
await self.async_request_refresh()
await self.async_set_options()
await self.hass.async_add_executor_job(self.update_servers)
self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update)
self._unsub_update_listener = self.config_entry.add_update_listener(
options_updated_listener
)
@callback
def async_unload(self):
"""Unload the coordinator."""
if not self._unsub_update_listener:
return
self._unsub_update_listener()
self._unsub_update_listener = None
async def options_updated_listener(hass, entry):
"""Handle options update."""
if entry.options[CONF_MANUAL]:
hass.data[DOMAIN].update_interval = None
return
hass.data[DOMAIN].update_interval = timedelta(
minutes=entry.options[CONF_SCAN_INTERVAL]
)
await hass.data[DOMAIN].async_request_refresh()
|
import sys
from twisted.internet import reactor
from twisted.python import log
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
from autobahn.websocket.compress import PerMessageDeflateOffer, \
PerMessageDeflateResponse, \
PerMessageDeflateResponseAccept
class EchoClientProtocol(WebSocketClientProtocol):
def onConnect(self, response):
print("WebSocket extensions in use: {}".format(response.extensions))
def sendHello(self):
msg = "Hello, world!" * 100
self.sendMessage(msg.encode('utf8'))
def onOpen(self):
self.sendHello()
def onMessage(self, payload, isBinary):
if not isBinary:
print("Text message received: {}".format(payload.decode('utf8')))
reactor.callLater(1, self.sendHello)
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Need the WebSocket server address, i.e. ws://127.0.0.1:9000")
sys.exit(1)
log.startLogging(sys.stdout)
factory = WebSocketClientFactory(sys.argv[1])
factory.protocol = EchoClientProtocol
# Enable WebSocket extension "permessage-deflate".
# The extensions offered to the server ..
offers = [PerMessageDeflateOffer()]
factory.setProtocolOptions(perMessageCompressionOffers=offers)
# Function to accept responses from the server ..
def accept(response):
if isinstance(response, PerMessageDeflateResponse):
return PerMessageDeflateResponseAccept(response)
factory.setProtocolOptions(perMessageCompressionAccept=accept)
# run client
connectWS(factory)
reactor.run()
|
import diamond.collector
from diamond.collector import str_to_bool
try:
from xml.etree import ElementTree
except ImportError:
import cElementTree as ElementTree
try:
import libvirt
except ImportError:
libvirt = None
class LibvirtKVMCollector(diamond.collector.Collector):
blockStats = {
'read_reqs': 0,
'read_bytes': 1,
'write_reqs': 2,
'write_bytes': 3
}
vifStats = {
'rx_bytes': 0,
'rx_packets': 1,
'rx_errors': 2,
'rx_drops': 3,
'tx_bytes': 4,
'tx_packets': 5,
'tx_errors': 6,
'tx_drops': 7
}
def get_default_config_help(self):
config_help = super(LibvirtKVMCollector,
self).get_default_config_help()
config_help.update({
'uri': """The libvirt connection URI. By default it's
'qemu:///system'. One decent option is
'qemu+unix:///system?socket=/var/run/libvirt/libvit-sock-ro'.""",
'sort_by_uuid': """Use the <uuid> of the instance instead of the
default <name>, useful in Openstack deploments where <name> is only
specific to the compute node""",
'cpu_absolute': """CPU stats reported as percentage by default, or
as cummulative nanoseconds since VM creation if this is True."""
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(LibvirtKVMCollector, self).get_default_config()
config.update({
'path': 'libvirt-kvm',
'sort_by_uuid': False,
'uri': 'qemu:///system',
'cpu_absolute': False
})
return config
def get_devices(self, dom, type):
devices = []
# Create a XML tree from the domain XML description.
tree = ElementTree.fromstring(dom.XMLDesc(0))
for target in tree.findall("devices/%s/target" % type):
dev = target.get("dev")
if dev not in devices:
devices.append(dev)
return devices
def get_disk_devices(self, dom):
return self.get_devices(dom, 'disk')
def get_network_devices(self, dom):
return self.get_devices(dom, 'interface')
def report_cpu_metric(self, statname, value, instance):
# Value in cummulative nanoseconds
if str_to_bool(self.config['cpu_absolute']):
metric = value
else:
# Nanoseconds (10^9), however, we want to express in 100%
metric = self.derivative(statname, float(value) / 10000000.0,
max_value=diamond.collector.MAX_COUNTER,
instance=instance)
self.publish(statname, metric, instance=instance)
def collect(self):
if libvirt is None:
self.log.error('Unable to import libvirt')
return {}
conn = libvirt.openReadOnly(self.config['uri'])
for dom in [conn.lookupByID(n) for n in conn.listDomainsID()]:
if str_to_bool(self.config['sort_by_uuid']):
name = dom.UUIDString()
else:
name = dom.name()
# CPU stats
vcpus = dom.getCPUStats(True, 0)
totalcpu = 0
idx = 0
for vcpu in vcpus:
cputime = vcpu['cpu_time']
self.report_cpu_metric('cpu.%s.time' % idx, cputime, name)
idx += 1
totalcpu += cputime
self.report_cpu_metric('cpu.total.time', totalcpu, name)
# Disk stats
disks = self.get_disk_devices(dom)
accum = {}
for stat in self.blockStats.keys():
accum[stat] = 0
for disk in disks:
stats = dom.blockStats(disk)
for stat in self.blockStats.keys():
idx = self.blockStats[stat]
val = stats[idx]
accum[stat] += val
self.publish('block.%s.%s' % (disk, stat), val,
instance=name)
for stat in self.blockStats.keys():
self.publish('block.total.%s' % stat, accum[stat],
instance=name)
# Network stats
vifs = self.get_network_devices(dom)
accum = {}
for stat in self.vifStats.keys():
accum[stat] = 0
for vif in vifs:
stats = dom.interfaceStats(vif)
for stat in self.vifStats.keys():
idx = self.vifStats[stat]
val = stats[idx]
accum[stat] += val
self.publish('net.%s.%s' % (vif, stat), val,
instance=name)
for stat in self.vifStats.keys():
self.publish('net.total.%s' % stat, accum[stat],
instance=name)
# Memory stats
mem = dom.memoryStats()
self.publish('memory.nominal', mem['actual'] * 1024,
instance=name)
self.publish('memory.rss', mem['rss'] * 1024, instance=name)
|
import unittest
from common import gpu_test
class TestRapids(unittest.TestCase):
@gpu_test
def test_dbscan(self):
import cudf
from cuml.cluster import DBSCAN
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
import numpy as np
from tensornetwork.block_sparse.sizetypes import SIZE_T
from tensornetwork.block_sparse.caching import get_cacher
from typing import List, Union, Any, Type, Tuple
def _randn(size: int, dtype: Type[np.number] = np.float64) -> np.ndarray:
"""
Initialize a 1d np.ndarray of length `size` of dtype `dtype`
with random gaussian values.
Args:
size: The length of the array.
dtype: The desired dtype.
Returns:
np.ndarray: The data array.
"""
data = np.random.randn(size).astype(dtype)
if ((np.dtype(dtype) is np.dtype(np.complex128)) or
(np.dtype(dtype) is np.dtype(np.complex64))):
data += 1j * np.random.randn(size).astype(dtype)
return data
def _random(size: int,
dtype: Type[np.number] = np.float64,
boundaries: Tuple = (0, 1)) -> np.ndarray:
"""
Initialize a 1d np.ndarray of length `size` of dtype `dtype`
with random uniform values.
Args:
size: The length of the array.
dtype: The desired dtype.
boundaries: The boundaries of the interval where numbers are
drawn from.
Returns:
np.ndarray: The data array.
"""
data = np.random.uniform(boundaries[0], boundaries[1], size).astype(dtype)
if ((np.dtype(dtype) is np.dtype(np.complex128)) or
(np.dtype(dtype) is np.dtype(np.complex64))):
data += 1j * np.random.uniform(boundaries[0], boundaries[1],
size).astype(dtype)
return data
def get_real_dtype(dtype: Type[np.number]) -> Type[np.number]:
if dtype == np.complex128:
return np.float64
if dtype == np.complex64:
return np.float32
return dtype
def flatten(list_of_list: List[List]) -> np.ndarray:
"""
Flatten a list of lists into a single list.
Args:
list_of_lists: A list of lists.
Returns:
list: The flattened input.
"""
return np.array([l for sublist in list_of_list for l in sublist])
def fuse_stride_arrays(dims: Union[List[int], np.ndarray],
strides: Union[List[int], np.ndarray]) -> np.ndarray:
"""
Compute linear positions of tensor elements
of a tensor with dimensions `dims` according to `strides`.
Args:
dims: An np.ndarray of (original) tensor dimensions.
strides: An np.ndarray of (possibly permuted) strides.
Returns:
np.ndarray: Linear positions of tensor elements according to `strides`.
"""
return fuse_ndarrays([
np.arange(0, strides[n] * dims[n], strides[n], dtype=SIZE_T)
for n in range(len(dims))
])
def fuse_ndarrays(arrays: List[Union[List, np.ndarray]]) -> np.ndarray:
"""
Fuse all `arrays` by simple kronecker addition.
Arrays are fused from "right to left",
Args:
arrays: A list of arrays to be fused.
Returns:
np.ndarray: The result of fusing `arrays`.
"""
if len(arrays) == 1:
return np.array(arrays[0])
fused_arrays = np.asarray(arrays[0])
for n in range(1, len(arrays)):
fused_arrays = np.ravel(np.add.outer(fused_arrays, arrays[n]))
return fused_arrays
def fuse_degeneracies(degen1: Union[List, np.ndarray],
degen2: Union[List, np.ndarray]) -> np.ndarray:
"""
Fuse degeneracies `degen1` and `degen2` of two leg-charges
by simple kronecker product. `degen1` and `degen2` typically belong to two
consecutive legs of `BlockSparseTensor`.
Given `degen1 = [1, 2, 3]` and `degen2 = [10, 100]`, this returns
`[10, 100, 20, 200, 30, 300]`.
When using row-major ordering of indices in `BlockSparseTensor`,
the position of `degen1` should be "to the left" of the position of `degen2`.
Args:
degen1: Iterable of integers
degen2: Iterable of integers
Returns:
np.ndarray: The result of fusing `dege1` with `degen2`.
"""
return np.reshape(
np.array(degen1)[:, None] * np.array(degen2)[None, :],
len(degen1) * len(degen2))
def _get_strides(dims: Union[List[int], np.ndarray]) -> np.ndarray:
"""
compute strides of `dims`.
"""
return np.flip(np.append(1, np.cumprod(np.flip(dims[1::]))))
def _find_best_partition(dims: Union[List[int], np.ndarray]) -> int:
"""
Find the most-levelled partition of `dims`.
A levelled partitioning is a partitioning such that
np.prod(dim[:partition]) and np.prod(dim[partition:])
are as close as possible.
Args:
dims: A list or np.ndarray of integers.
Returns:
int: The best partitioning.
"""
if len(dims) == 1:
raise ValueError(
'expecting dims with a length of at least 2, got len(dims) =1')
diffs = [
np.abs(np.prod(dims[0:n]) - np.prod(dims[n::]))
for n in range(1, len(dims))
]
min_inds = np.nonzero(diffs == np.min(diffs))[0]
if len(min_inds) > 1:
right_dims = [np.prod(dims[min_ind + 1:]) for min_ind in min_inds]
min_ind = min_inds[np.argmax(right_dims)]
else:
min_ind = min_inds[0]
return min_ind + 1
def get_dtype(itemsize: int) -> Type[np.number]:
"""
Return the `numpy.dtype` needed to store an
element of `itemsize` bytes.
"""
final_dtype = np.int8
if itemsize > 1:
final_dtype = np.int16
if itemsize > 2:
final_dtype = np.int32
if itemsize > 4:
final_dtype = np.int64
return final_dtype
def collapse(array: np.ndarray) -> np.ndarray:
"""
If possible, collapse a 2d numpy array
`array` along the rows into a 1d array of larger
dtype.
Args:
array: np.ndarray
Returns:
np.ndarray: The collapsed array.
"""
if array.ndim <= 1 or array.dtype.itemsize * array.shape[1] > 8:
return array
array = np.ascontiguousarray(array)
newdtype = get_dtype(array.dtype.itemsize * array.shape[1])
if array.shape[1] in (1, 2, 4, 8):
tmparray = array.view(newdtype)
else:
if array.shape[1] == 3:
width = 1
else:
width = 8 - array.shape[1]
tmparray = np.squeeze(
np.concatenate(
[array, np.zeros((array.shape[0], width), dtype=array.dtype)],
axis=1).view(newdtype))
return np.squeeze(tmparray)
def expand(array: np.ndarray, original_dtype: Type[np.number],
original_width: int, original_ndim: int) -> np.ndarray:
"""
Reverse operation to `collapse`.
Expand a 1d numpy array `array` into a 2d array
of dtype `original_dtype` by view-casting.
Args:
array: The collapsed array.
original_dtype: The dtype of the original (uncollapsed) array
original_width: The width (the length of the second dimension)
of the original (uncollapsed) array.
original_ndim: Number of dimensions of the original (uncollapsed)
array.
Returns:
np.ndarray: The expanded array.
"""
if original_ndim <= 1:
#nothing to expand
return np.squeeze(array)
if array.ndim == 1:
#the array has been collapsed
#now we uncollapse it
result = array[:, None].view(original_dtype)
if original_width in (3, 5, 6, 7):
result = np.ascontiguousarray(result[:, :original_width])
return result
return array
def unique(array: np.ndarray,
return_index: bool = False,
return_inverse: bool = False,
return_counts: bool = False,
label_dtype: Type[np.number] = np.int16) -> Any:
"""
Compute the unique elements of 1d or 2d `array` along the
zero axis of the array.
This function performs performs a similar
task to `numpy.unique` with `axis=0` argument,
but is substantially faster for 2d arrays.
Note that for the case of 2d arrays, the ordering of the array of unique
elements differs from the ordering of `numpy.unique`.
Args:
array: An input array of integers.
return_index: If `True`, also return the indices of `array`
that result in the unique array.
return_inverse: If `True`, also return the indices of the unique array
that can be used to reconstruct `array`.
return_counts: If `True`, also return the number of times
each unique item appears in `array`.
Returns:
np.ndarray: An array of unique elements.
np.ndarray (optional): The indices of array that result
in the unique array.
np.ndarray: (optional): The indices of the unique array
from which `array` can be reconstructed.
np.ndarray (optional): The number of times each element of the
unique array appears in `array`.
"""
array = np.asarray(array)
original_width = array.shape[1] if array.ndim == 2 else 0
original_ndim = array.ndim
collapsed_array = collapse(array)
if collapsed_array.ndim <= 1:
axis = None
else:
axis = 0
_return_index = (collapsed_array.dtype in (np.int8, np.int16)) or return_index
res = np.unique(
collapsed_array,
return_index=_return_index,
return_inverse=return_inverse,
return_counts=return_counts,
axis=axis)
if any([return_index, return_inverse, return_counts]):
out = list(res)
if _return_index and not return_index:
del out[1]
out[0] = expand(out[0], array.dtype, original_width, original_ndim)
if return_inverse and not return_index:
out[1] = out[1].astype(label_dtype)
elif return_inverse and return_index:
out[2] = out[2].astype(label_dtype)
out[0] = np.ascontiguousarray(out[0])
else:
if _return_index:
out = expand(res[0], array.dtype, original_width, original_ndim)
else:
out = expand(res, array.dtype, original_width, original_ndim)
return out
def intersect(A: np.ndarray,
B: np.ndarray,
axis=0,
assume_unique=False,
return_indices=False) -> Any:
"""
Extends numpy's intersect1d to find the row or column-wise intersection of
two 2d arrays. Takes identical input to numpy intersect1d.
Args:
A, B (np.ndarray): arrays of matching widths and datatypes
Returns:
ndarray: sorted 1D array of common rows/cols between the input arrays
ndarray: the indices of the first occurrences of the common values in A.
Only provided if return_indices is True.
ndarray: the indices of the first occurrences of the common values in B.
Only provided if return_indices is True.
"""
if A.dtype != B.dtype:
raise ValueError(f"array dtypes must macht to intersect, "
f"found A.dtype = {A.dtype}, B.dtype = {B.dtype}")
if axis not in (0, 1):
raise NotImplementedError(
"intersection can only be performed on first or second axis")
if A.ndim != B.ndim:
raise ValueError("array ndims must match to intersect")
if axis == 1:
if A.shape[0] != B.shape[0]:
raise ValueError("array heights must match to intersect on second axis")
out = intersect(
A.T,
B.T,
axis=0,
assume_unique=assume_unique,
return_indices=return_indices)
if return_indices:
return np.ascontiguousarray(out[0].T), out[1], out[2]
return np.ascontiguousarray(out.T)
if A.ndim > 1 and A.shape[1] != B.shape[1]:
raise ValueError("array widths must match to intersect on first axis")
original_width = A.shape[1] if A.ndim == 2 else 0
original_ndim = A.ndim
collapsed_A = collapse(A)
collapsed_B = collapse(B)
if collapsed_A.ndim > 1:
# arrays were not callapsable, fall back to slower implementation
return _intersect_ndarray(collapsed_A, collapsed_B, axis, assume_unique,
return_indices)
if collapsed_A.dtype in (np.int8,
np.int16) and collapsed_B.dtype in (np.int8,
np.int16):
#special case of dtype = np.int8 or np.int16
#original charges were unpadded in this case
C, A_locs, B_locs = np.intersect1d(
collapsed_A,
collapsed_B,
assume_unique=assume_unique,
return_indices=True)
C = expand(C, A.dtype, original_width, original_ndim)
if return_indices:
result = C, A_locs, B_locs
else:
result = C
else:
result = np.intersect1d(
collapsed_A,
collapsed_B,
assume_unique=assume_unique,
return_indices=return_indices)
if return_indices:
result = list(result)
result[0] = expand(result[0], A.dtype, original_width, original_ndim)
else:
result = expand(result, A.dtype, original_width, original_ndim)
return result
def _intersect_ndarray(A: np.ndarray,
B: np.ndarray,
axis=0,
assume_unique=False,
return_indices=False) -> Any:
"""
Extends numpy's intersect1d to find the row or column-wise intersection of
two 2d arrays. Takes identical input to numpy intersect1d.
Args:
A, B (np.ndarray): arrays of matching widths and datatypes
Returns:
ndarray: sorted 1D array of common rows/cols between the input arrays
ndarray: the indices of the first occurrences of the common values in A.
Only provided if return_indices is True.
ndarray: the indices of the first occurrences of the common values in B.
Only provided if return_indices is True.
"""
# pylint: disable=line-too-long
# see
# https://stackoverflow.com/questions/8317022/ get-intersecting-rows-across-two-2d-numpy-arrays
#pylint: disable=no-else-return
A = np.ascontiguousarray(A)
B = np.ascontiguousarray(B)
if A.ndim != B.ndim:
raise ValueError("array ndims must match to intersect")
if A.ndim == 1:
return np.intersect1d(
A, B, assume_unique=assume_unique, return_indices=return_indices)
elif A.ndim == 2:
if axis == 0:
ncols = A.shape[1]
if A.shape[1] != B.shape[1]:
raise ValueError("array widths must match to intersect")
dtype = {
'names': ['f{}'.format(i) for i in range(ncols)],
'formats': ncols * [A.dtype]
}
if return_indices:
C, A_locs, B_locs = np.intersect1d(
A.view(dtype),
B.view(dtype),
assume_unique=assume_unique,
return_indices=return_indices)
return C.view(A.dtype).reshape(-1, ncols), A_locs, B_locs
C = np.intersect1d(
A.view(dtype), B.view(dtype), assume_unique=assume_unique)
return C.view(A.dtype).reshape(-1, ncols)
elif axis == 1:
out = _intersect_ndarray(
A.T.copy(),
B.T.copy(),
axis=0,
assume_unique=assume_unique,
return_indices=return_indices)
if return_indices:
return np.ascontiguousarray(out[0].T), out[1], out[2]
return np.ascontiguousarray(out.T)
raise NotImplementedError(
"intersection can only be performed on first or second axis")
raise NotImplementedError("_intersect_ndarray is only implemented for 1d or 2d arrays")
|
import pandas as pd
import pytest
import pytz
from qstrader.simulation.daily_bday import DailyBusinessDaySimulationEngine
from qstrader.simulation.event import SimulationEvent
@pytest.mark.parametrize(
"starting_day,ending_day,pre_market,post_market,expected_events",
[
(
'2020-01-01', '2020-01-07', True, True,
[
('2020-01-01 00:00:00', 'pre_market'),
('2020-01-01 14:30:00', 'market_open'),
('2020-01-01 21:00:00', 'market_close'),
('2020-01-01 23:59:00', 'post_market'),
('2020-01-02 00:00:00', 'pre_market'),
('2020-01-02 14:30:00', 'market_open'),
('2020-01-02 21:00:00', 'market_close'),
('2020-01-02 23:59:00', 'post_market'),
('2020-01-03 00:00:00', 'pre_market'),
('2020-01-03 14:30:00', 'market_open'),
('2020-01-03 21:00:00', 'market_close'),
('2020-01-03 23:59:00', 'post_market'),
('2020-01-06 00:00:00', 'pre_market'),
('2020-01-06 14:30:00', 'market_open'),
('2020-01-06 21:00:00', 'market_close'),
('2020-01-06 23:59:00', 'post_market'),
('2020-01-07 00:00:00', 'pre_market'),
('2020-01-07 14:30:00', 'market_open'),
('2020-01-07 21:00:00', 'market_close'),
('2020-01-07 23:59:00', 'post_market'),
]
),
(
'2020-01-01', '2020-01-07', False, False,
[
('2020-01-01 14:30:00', 'market_open'),
('2020-01-01 21:00:00', 'market_close'),
('2020-01-02 14:30:00', 'market_open'),
('2020-01-02 21:00:00', 'market_close'),
('2020-01-03 14:30:00', 'market_open'),
('2020-01-03 21:00:00', 'market_close'),
('2020-01-06 14:30:00', 'market_open'),
('2020-01-06 21:00:00', 'market_close'),
('2020-01-07 14:30:00', 'market_open'),
('2020-01-07 21:00:00', 'market_close'),
]
)
]
)
def test_daily_rebalance(
starting_day, ending_day, pre_market, post_market, expected_events
):
"""
Checks that the daily business day event generation provides
the correct SimulationEvents for the given parameters.
"""
sd = pd.Timestamp(starting_day, tz=pytz.UTC)
ed = pd.Timestamp(ending_day, tz=pytz.UTC)
sim_engine = DailyBusinessDaySimulationEngine(sd, ed, pre_market, post_market)
for sim_events in zip(sim_engine, expected_events):
calculated_event = sim_events[0]
expected_event = SimulationEvent(pd.Timestamp(sim_events[1][0], tz=pytz.UTC), sim_events[1][1])
assert calculated_event == expected_event
|
import asyncio
import pytest
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.auth import auth_manager_from_config, auth_store
from homeassistant.auth.providers import (
auth_provider_from_config,
homeassistant as hass_auth,
)
from tests.async_mock import Mock, patch
@pytest.fixture
def data(hass):
"""Create a loaded data class."""
data = hass_auth.Data(hass)
hass.loop.run_until_complete(data.async_load())
return data
@pytest.fixture
def legacy_data(hass):
"""Create a loaded legacy data class."""
data = hass_auth.Data(hass)
hass.loop.run_until_complete(data.async_load())
data.is_legacy = True
return data
async def test_validating_password_invalid_user(data, hass):
"""Test validating an invalid user."""
with pytest.raises(hass_auth.InvalidAuth):
data.validate_login("non-existing", "pw")
async def test_not_allow_set_id():
"""Test we are not allowed to set an ID in config."""
hass = Mock()
with pytest.raises(vol.Invalid):
await auth_provider_from_config(
hass, None, {"type": "homeassistant", "id": "invalid"}
)
async def test_new_users_populate_values(hass, data):
"""Test that we populate data for new users."""
data.add_auth("hello", "test-pass")
await data.async_save()
manager = await auth_manager_from_config(hass, [{"type": "homeassistant"}], [])
provider = manager.auth_providers[0]
credentials = await provider.async_get_or_create_credentials({"username": "hello"})
user = await manager.async_get_or_create_user(credentials)
assert user.name == "hello"
assert user.is_active
async def test_changing_password_raises_invalid_user(data, hass):
"""Test that changing password raises invalid user."""
with pytest.raises(hass_auth.InvalidUser):
data.change_password("non-existing", "pw")
# Modern mode
async def test_adding_user(data, hass):
"""Test adding a user."""
data.add_auth("test-user", "test-pass")
data.validate_login(" test-user ", "test-pass")
async def test_adding_user_duplicate_username(data, hass):
"""Test adding a user with duplicate username."""
data.add_auth("test-user", "test-pass")
with pytest.raises(hass_auth.InvalidUser):
data.add_auth("TEST-user ", "other-pass")
async def test_validating_password_invalid_password(data, hass):
"""Test validating an invalid password."""
data.add_auth("test-user", "test-pass")
with pytest.raises(hass_auth.InvalidAuth):
data.validate_login(" test-user ", "invalid-pass")
with pytest.raises(hass_auth.InvalidAuth):
data.validate_login("test-user", "test-pass ")
with pytest.raises(hass_auth.InvalidAuth):
data.validate_login("test-user", "Test-pass")
async def test_changing_password(data, hass):
"""Test adding a user."""
data.add_auth("test-user", "test-pass")
data.change_password("TEST-USER ", "new-pass")
with pytest.raises(hass_auth.InvalidAuth):
data.validate_login("test-user", "test-pass")
data.validate_login("test-UsEr", "new-pass")
async def test_login_flow_validates(data, hass):
"""Test login flow."""
data.add_auth("test-user", "test-pass")
await data.async_save()
provider = hass_auth.HassAuthProvider(
hass, auth_store.AuthStore(hass), {"type": "homeassistant"}
)
flow = await provider.async_login_flow({})
result = await flow.async_step_init()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await flow.async_step_init(
{"username": "incorrect-user", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await flow.async_step_init(
{"username": "TEST-user ", "password": "incorrect-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await flow.async_step_init(
{"username": "test-USER", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["username"] == "test-USER"
async def test_saving_loading(data, hass):
"""Test saving and loading JSON."""
data.add_auth("test-user", "test-pass")
data.add_auth("second-user", "second-pass")
await data.async_save()
data = hass_auth.Data(hass)
await data.async_load()
data.validate_login("test-user ", "test-pass")
data.validate_login("second-user ", "second-pass")
async def test_get_or_create_credentials(hass, data):
"""Test that we can get or create credentials."""
manager = await auth_manager_from_config(hass, [{"type": "homeassistant"}], [])
provider = manager.auth_providers[0]
provider.data = data
credentials1 = await provider.async_get_or_create_credentials({"username": "hello"})
with patch.object(provider, "async_credentials", return_value=[credentials1]):
credentials2 = await provider.async_get_or_create_credentials(
{"username": "hello "}
)
assert credentials1 is credentials2
# Legacy mode
async def test_legacy_adding_user(legacy_data, hass):
"""Test in legacy mode adding a user."""
legacy_data.add_auth("test-user", "test-pass")
legacy_data.validate_login("test-user", "test-pass")
async def test_legacy_adding_user_duplicate_username(legacy_data, hass):
"""Test in legacy mode adding a user with duplicate username."""
legacy_data.add_auth("test-user", "test-pass")
with pytest.raises(hass_auth.InvalidUser):
legacy_data.add_auth("test-user", "other-pass")
# Not considered duplicate
legacy_data.add_auth("test-user ", "test-pass")
legacy_data.add_auth("Test-user", "test-pass")
async def test_legacy_validating_password_invalid_password(legacy_data, hass):
"""Test in legacy mode validating an invalid password."""
legacy_data.add_auth("test-user", "test-pass")
with pytest.raises(hass_auth.InvalidAuth):
legacy_data.validate_login("test-user", "invalid-pass")
async def test_legacy_changing_password(legacy_data, hass):
"""Test in legacy mode adding a user."""
user = "test-user"
legacy_data.add_auth(user, "test-pass")
legacy_data.change_password(user, "new-pass")
with pytest.raises(hass_auth.InvalidAuth):
legacy_data.validate_login(user, "test-pass")
legacy_data.validate_login(user, "new-pass")
async def test_legacy_changing_password_raises_invalid_user(legacy_data, hass):
"""Test in legacy mode that we initialize an empty config."""
with pytest.raises(hass_auth.InvalidUser):
legacy_data.change_password("non-existing", "pw")
async def test_legacy_login_flow_validates(legacy_data, hass):
"""Test in legacy mode login flow."""
legacy_data.add_auth("test-user", "test-pass")
await legacy_data.async_save()
provider = hass_auth.HassAuthProvider(
hass, auth_store.AuthStore(hass), {"type": "homeassistant"}
)
flow = await provider.async_login_flow({})
result = await flow.async_step_init()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await flow.async_step_init(
{"username": "incorrect-user", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await flow.async_step_init(
{"username": "test-user", "password": "incorrect-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await flow.async_step_init(
{"username": "test-user", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["username"] == "test-user"
async def test_legacy_saving_loading(legacy_data, hass):
"""Test in legacy mode saving and loading JSON."""
legacy_data.add_auth("test-user", "test-pass")
legacy_data.add_auth("second-user", "second-pass")
await legacy_data.async_save()
legacy_data = hass_auth.Data(hass)
await legacy_data.async_load()
legacy_data.is_legacy = True
legacy_data.validate_login("test-user", "test-pass")
legacy_data.validate_login("second-user", "second-pass")
with pytest.raises(hass_auth.InvalidAuth):
legacy_data.validate_login("test-user ", "test-pass")
async def test_legacy_get_or_create_credentials(hass, legacy_data):
"""Test in legacy mode that we can get or create credentials."""
manager = await auth_manager_from_config(hass, [{"type": "homeassistant"}], [])
provider = manager.auth_providers[0]
provider.data = legacy_data
credentials1 = await provider.async_get_or_create_credentials({"username": "hello"})
with patch.object(provider, "async_credentials", return_value=[credentials1]):
credentials2 = await provider.async_get_or_create_credentials(
{"username": "hello"}
)
assert credentials1 is credentials2
with patch.object(provider, "async_credentials", return_value=[credentials1]):
credentials3 = await provider.async_get_or_create_credentials(
{"username": "hello "}
)
assert credentials1 is not credentials3
async def test_race_condition_in_data_loading(hass):
"""Test race condition in the hass_auth.Data loading.
Ref issue: https://github.com/home-assistant/core/issues/21569
"""
counter = 0
async def mock_load(_):
"""Mock of homeassistant.helpers.storage.Store.async_load."""
nonlocal counter
counter += 1
await asyncio.sleep(0)
provider = hass_auth.HassAuthProvider(
hass, auth_store.AuthStore(hass), {"type": "homeassistant"}
)
with patch("homeassistant.helpers.storage.Store.async_load", new=mock_load):
task1 = provider.async_validate_login("user", "pass")
task2 = provider.async_validate_login("user", "pass")
results = await asyncio.gather(task1, task2, return_exceptions=True)
assert counter == 1
assert isinstance(results[0], hass_auth.InvalidAuth)
# results[1] will be a TypeError if race condition occurred
assert isinstance(results[1], hass_auth.InvalidAuth)
|
import json
import pytest_bdd as bdd
bdd.scenarios('private.feature')
@bdd.then(bdd.parsers.parse('the cookie {name} should be set to {value}'))
def check_cookie(quteproc, name, value):
"""Check if a given cookie is set correctly.
This assumes we're on the server cookies page.
"""
content = quteproc.get_content()
data = json.loads(content)
print(data)
assert data['cookies'][name] == value
@bdd.then(bdd.parsers.parse('the cookie {name} should not be set'))
def check_cookie_not_set(quteproc, name):
"""Check if a given cookie is not set."""
content = quteproc.get_content()
data = json.loads(content)
print(data)
assert name not in data['cookies']
@bdd.then(bdd.parsers.parse('the file {name} should not contain "{text}"'))
def check_not_contain(tmpdir, name, text):
path = tmpdir / name
assert text not in path.read()
|
from behave import then
from paasta_tools.cli.cmds.status import paasta_status_on_api_endpoint
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import load_system_paasta_config
@then(
'instance GET should return app_count "{app_count}" and an expected number of running instances for "{job_id}"'
)
def service_instance_status(context, app_count, job_id):
(service, instance, _, __) = decompose_job_id(job_id)
response = context.paasta_api_client.service.status_instance(
instance=instance, service=service
)
assert response.marathon.app_count == int(app_count), response
@then('instance GET should return error code "{error_code}" for "{job_id}"')
def service_instance_status_error(context, error_code, job_id):
(service, instance, _, __) = decompose_job_id(job_id)
response = None
try:
response = context.paasta_api_client.service.status_instance(
instance=instance, service=service
)
except context.paasta_api_client.api_error as exc:
assert exc.status == int(error_code)
assert not response
@then('resources GET should show "{resource}" has {used} used')
def resources_resource_used(context, resource, used):
used = float(used)
response = context.paasta_api_client.resources.resources().value
assert response[0].to_dict()[resource].get("used") == used, response
@then(
'resources GET with groupings "{groupings}" and filters "{filters}" should return {num:d} groups'
)
def resources_groupings_filters(context, groupings, filters, num):
groupings = groupings.split(",")
if len(filters) > 0:
filters = filters.split("|")
response = context.paasta_api_client.resources.resources(
groupings=groupings, filter=filters
)
assert len(response.value) == num, response
@then('resources GET with groupings "{groupings}" should return {num:d} groups')
def resources_groupings(context, groupings, num):
return resources_groupings_filters(context, groupings, [], num)
@then(
'marathon_dashboard GET should return "{service}.{instance}" in cluster "{cluster}" with shard {shard:d}'
)
def marathon_dashboard(context, service, instance, cluster, shard):
response = context.paasta_api_client.marathon_dashboard.marathon_dashboard()
dashboard = response[cluster]
shard_url = context.system_paasta_config.get_dashboard_links()[cluster][
"Marathon RO"
][shard]
for marathon_dashboard_item in dashboard.value:
if (
marathon_dashboard_item["service"] == service
and marathon_dashboard_item["instance"] == instance
):
assert marathon_dashboard_item["shard_url"] == shard_url
@then('paasta status via the API for "{service}.{instance}" should run successfully')
def paasta_status_via_api(context, service, instance):
output = []
system_paasta_config = load_system_paasta_config()
exit_code = paasta_status_on_api_endpoint(
cluster=system_paasta_config.get_cluster(),
service=service,
instance=instance,
output=output,
system_paasta_config=system_paasta_config,
verbose=0,
)
print(f"Got exitcode {exit_code} with output:\n{output}")
print() # sacrificial line for behave to eat instead of our output
assert exit_code == 0
assert len(output) > 0
|
import logging
from sqlalchemy import Table, text
from sqlalchemy.engine import reflection
from sqlalchemy.exc import InternalError, OperationalError, SQLAlchemyError
from .const import DOMAIN
from .models import SCHEMA_VERSION, Base, SchemaChanges
from .util import session_scope
_LOGGER = logging.getLogger(__name__)
def migrate_schema(instance):
"""Check if the schema needs to be upgraded."""
with session_scope(session=instance.get_session()) as session:
res = (
session.query(SchemaChanges)
.order_by(SchemaChanges.change_id.desc())
.first()
)
current_version = getattr(res, "schema_version", None)
if current_version is None:
current_version = _inspect_schema_version(instance.engine, session)
_LOGGER.debug(
"No schema version found. Inspected version: %s", current_version
)
if current_version == SCHEMA_VERSION:
return
_LOGGER.warning(
"Database is about to upgrade. Schema version: %s", current_version
)
with instance.hass.timeout.freeze(DOMAIN):
for version in range(current_version, SCHEMA_VERSION):
new_version = version + 1
_LOGGER.info("Upgrading recorder db schema to version %s", new_version)
_apply_update(instance.engine, new_version, current_version)
session.add(SchemaChanges(schema_version=new_version))
_LOGGER.info("Upgrade to version %s done", new_version)
def _create_index(engine, table_name, index_name):
"""Create an index for the specified table.
The index name should match the name given for the index
within the table definition described in the models
"""
table = Table(table_name, Base.metadata)
_LOGGER.debug("Looking up index %s for table %s", index_name, table_name)
# Look up the index object by name from the table is the models
index_list = [idx for idx in table.indexes if idx.name == index_name]
if not index_list:
_LOGGER.debug("The index %s no longer exists", index_name)
return
index = index_list[0]
_LOGGER.debug("Creating %s index", index_name)
_LOGGER.warning(
"Adding index `%s` to database. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!",
index_name,
)
try:
index.create(engine)
except OperationalError as err:
lower_err_str = str(err).lower()
if "already exists" not in lower_err_str and "duplicate" not in lower_err_str:
raise
_LOGGER.warning(
"Index %s already exists on %s, continuing", index_name, table_name
)
except InternalError as err:
if "duplicate" not in str(err).lower():
raise
_LOGGER.warning(
"Index %s already exists on %s, continuing", index_name, table_name
)
_LOGGER.debug("Finished creating %s", index_name)
def _drop_index(engine, table_name, index_name):
"""Drop an index from a specified table.
There is no universal way to do something like `DROP INDEX IF EXISTS`
so we will simply execute the DROP command and ignore any exceptions
WARNING: Due to some engines (MySQL at least) being unable to use bind
parameters in a DROP INDEX statement (at least via SQLAlchemy), the query
string here is generated from the method parameters without sanitizing.
DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT.
"""
_LOGGER.debug("Dropping index %s from table %s", index_name, table_name)
success = False
# Engines like DB2/Oracle
try:
engine.execute(text(f"DROP INDEX {index_name}"))
except SQLAlchemyError:
pass
else:
success = True
# Engines like SQLite, SQL Server
if not success:
try:
engine.execute(
text(
"DROP INDEX {table}.{index}".format(
index=index_name, table=table_name
)
)
)
except SQLAlchemyError:
pass
else:
success = True
if not success:
# Engines like MySQL, MS Access
try:
engine.execute(
text(
"DROP INDEX {index} ON {table}".format(
index=index_name, table=table_name
)
)
)
except SQLAlchemyError:
pass
else:
success = True
if success:
_LOGGER.debug(
"Finished dropping index %s from table %s", index_name, table_name
)
else:
if index_name == "ix_states_context_parent_id":
# Was only there on nightly so we do not want
# to generate log noise or issues about it.
return
_LOGGER.warning(
"Failed to drop index %s from table %s. Schema "
"Migration will continue; this is not a "
"critical operation",
index_name,
table_name,
)
def _add_columns(engine, table_name, columns_def):
"""Add columns to a table."""
_LOGGER.warning(
"Adding columns %s to table %s. Note: this can take several "
"minutes on large databases and slow computers. Please "
"be patient!",
", ".join(column.split(" ")[0] for column in columns_def),
table_name,
)
columns_def = [f"ADD {col_def}" for col_def in columns_def]
try:
engine.execute(
text(
"ALTER TABLE {table} {columns_def}".format(
table=table_name, columns_def=", ".join(columns_def)
)
)
)
return
except (InternalError, OperationalError):
# Some engines support adding all columns at once,
# this error is when they don't
_LOGGER.info("Unable to use quick column add. Adding 1 by 1")
for column_def in columns_def:
try:
engine.execute(
text(
"ALTER TABLE {table} {column_def}".format(
table=table_name, column_def=column_def
)
)
)
except (InternalError, OperationalError) as err:
if "duplicate" not in str(err).lower():
raise
_LOGGER.warning(
"Column %s already exists on %s, continuing",
column_def.split(" ")[1],
table_name,
)
def _apply_update(engine, new_version, old_version):
"""Perform operations to bring schema up to date."""
if new_version == 1:
_create_index(engine, "events", "ix_events_time_fired")
elif new_version == 2:
# Create compound start/end index for recorder_runs
_create_index(engine, "recorder_runs", "ix_recorder_runs_start_end")
# Create indexes for states
_create_index(engine, "states", "ix_states_last_updated")
elif new_version == 3:
# There used to be a new index here, but it was removed in version 4.
pass
elif new_version == 4:
# Queries were rewritten in this schema release. Most indexes from
# earlier versions of the schema are no longer needed.
if old_version == 3:
# Remove index that was added in version 3
_drop_index(engine, "states", "ix_states_created_domain")
if old_version == 2:
# Remove index that was added in version 2
_drop_index(engine, "states", "ix_states_entity_id_created")
# Remove indexes that were added in version 0
_drop_index(engine, "states", "states__state_changes")
_drop_index(engine, "states", "states__significant_changes")
_drop_index(engine, "states", "ix_states_entity_id_created")
_create_index(engine, "states", "ix_states_entity_id_last_updated")
elif new_version == 5:
# Create supporting index for States.event_id foreign key
_create_index(engine, "states", "ix_states_event_id")
elif new_version == 6:
_add_columns(
engine,
"events",
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
)
_create_index(engine, "events", "ix_events_context_id")
_create_index(engine, "events", "ix_events_context_user_id")
_add_columns(
engine,
"states",
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
)
_create_index(engine, "states", "ix_states_context_id")
_create_index(engine, "states", "ix_states_context_user_id")
elif new_version == 7:
_create_index(engine, "states", "ix_states_entity_id")
elif new_version == 8:
_add_columns(engine, "events", ["context_parent_id CHARACTER(36)"])
_add_columns(engine, "states", ["old_state_id INTEGER"])
_create_index(engine, "events", "ix_events_context_parent_id")
elif new_version == 9:
# We now get the context from events with a join
# since its always there on state_changed events
#
# Ideally we would drop the columns from the states
# table as well but sqlite doesn't support that
# and we would have to move to something like
# sqlalchemy alembic to make that work
#
_drop_index(engine, "states", "ix_states_context_id")
_drop_index(engine, "states", "ix_states_context_user_id")
# This index won't be there if they were not running
# nightly but we don't treat that as a critical issue
_drop_index(engine, "states", "ix_states_context_parent_id")
# Redundant keys on composite index:
# We already have ix_states_entity_id_last_updated
_drop_index(engine, "states", "ix_states_entity_id")
_create_index(engine, "events", "ix_events_event_type_time_fired")
_drop_index(engine, "events", "ix_events_event_type")
else:
raise ValueError(f"No schema migration defined for version {new_version}")
def _inspect_schema_version(engine, session):
"""Determine the schema version by inspecting the db structure.
When the schema version is not present in the db, either db was just
created with the correct schema, or this is a db created before schema
versions were tracked. For now, we'll test if the changes for schema
version 1 are present to make the determination. Eventually this logic
can be removed and we can assume a new db is being created.
"""
inspector = reflection.Inspector.from_engine(engine)
indexes = inspector.get_indexes("events")
for index in indexes:
if index["column_names"] == ["time_fired"]:
# Schema addition from version 1 detected. New DB.
session.add(SchemaChanges(schema_version=SCHEMA_VERSION))
return SCHEMA_VERSION
# Version 1 schema changes not found, this db needs to be migrated.
current_version = SchemaChanges(schema_version=0)
session.add(current_version)
return current_version.schema_version
|
import os
import sys
import re
import signal
import socket
import webbrowser
from http.server import HTTPServer
from http.server import SimpleHTTPRequestHandler
from io import BytesIO as StringIO
from nikola.plugin_categories import Command
from nikola.utils import dns_sd
class IPv6Server(HTTPServer):
"""An IPv6 HTTPServer."""
address_family = socket.AF_INET6
class CommandServe(Command):
"""Start test server."""
name = "serve"
doc_usage = "[options]"
doc_purpose = "start the test webserver"
dns_sd = None
cmd_options = (
{
'name': 'port',
'short': 'p',
'long': 'port',
'default': 8000,
'type': int,
'help': 'Port number',
},
{
'name': 'address',
'short': 'a',
'long': 'address',
'type': str,
'default': '',
'help': 'Address to bind, defaults to all local IPv4 interfaces',
},
{
'name': 'detach',
'short': 'd',
'long': 'detach',
'type': bool,
'default': False,
'help': 'Detach from TTY (work in the background)',
},
{
'name': 'browser',
'short': 'b',
'long': 'browser',
'type': bool,
'default': False,
'help': 'Open the test server in a web browser',
},
{
'name': 'ipv6',
'short': '6',
'long': 'ipv6',
'type': bool,
'default': False,
'help': 'Use IPv6',
},
)
def shutdown(self, signum=None, _frame=None):
"""Shut down the server that is running detached."""
if self.dns_sd:
self.dns_sd.Reset()
if os.path.exists(self.serve_pidfile):
os.remove(self.serve_pidfile)
if not self.detached:
self.logger.info("Server is shutting down.")
if signum:
sys.exit(0)
def _execute(self, options, args):
"""Start test server."""
out_dir = self.site.config['OUTPUT_FOLDER']
if not os.path.isdir(out_dir):
self.logger.error("Missing '{0}' folder?".format(out_dir))
else:
self.serve_pidfile = os.path.abspath('nikolaserve.pid')
os.chdir(out_dir)
if '[' in options['address']:
options['address'] = options['address'].strip('[').strip(']')
ipv6 = True
OurHTTP = IPv6Server
elif options['ipv6']:
ipv6 = True
OurHTTP = IPv6Server
else:
ipv6 = False
OurHTTP = HTTPServer
httpd = OurHTTP((options['address'], options['port']),
OurHTTPRequestHandler)
sa = httpd.socket.getsockname()
if ipv6:
server_url = "http://[{0}]:{1}/".format(*sa)
else:
server_url = "http://{0}:{1}/".format(*sa)
self.logger.info("Serving on {0} ...".format(server_url))
if options['browser']:
# Some browsers fail to load 0.0.0.0 (Issue #2755)
if sa[0] == '0.0.0.0':
server_url = "http://127.0.0.1:{1}/".format(*sa)
self.logger.info("Opening {0} in the default web browser...".format(server_url))
webbrowser.open(server_url)
if options['detach']:
self.detached = True
OurHTTPRequestHandler.quiet = True
try:
pid = os.fork()
if pid == 0:
signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
else:
with open(self.serve_pidfile, 'w') as fh:
fh.write('{0}\n'.format(pid))
self.logger.info("Detached with PID {0}. Run `kill {0}` or `kill $(cat nikolaserve.pid)` to stop the server.".format(pid))
except AttributeError:
if os.name == 'nt':
self.logger.warning("Detaching is not available on Windows, server is running in the foreground.")
else:
raise
else:
self.detached = False
try:
self.dns_sd = dns_sd(options['port'], (options['ipv6'] or '::' in options['address']))
signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
except KeyboardInterrupt:
self.shutdown()
return 130
class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
"""A request handler, modified for Nikola."""
extensions_map = dict(SimpleHTTPRequestHandler.extensions_map)
extensions_map[""] = "text/plain"
quiet = False
def log_message(self, *args):
"""Log messages. Or not, depending on a setting."""
if self.quiet:
return
else:
return super().log_message(*args)
# NOTICE: this is a patched version of send_head() to disable all sorts of
# caching. `nikola serve` is a development server, hence caching should
# not happen to have access to the newest resources.
#
# The original code was copy-pasted from Python 2.7. Python 3.3 contains
# the same code, missing the binary mode comment.
#
# Note that it might break in future versions of Python, in which case we
# would need to do even more magic.
def send_head(self):
"""Send response code and MIME header.
This is common code for GET and HEAD commands.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
and must be closed by the caller under all circumstances), or
None, in which case the caller has nothing further to do.
"""
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
path_parts = list(self.path.partition('?'))
if not path_parts[0].endswith('/'):
# redirect browser - doing basically what apache does
path_parts[0] += '/'
self.send_response(301)
self.send_header("Location", ''.join(path_parts))
# begin no-cache patch
# For redirects. With redirects, caching is even worse and can
# break more. Especially with 301 Moved Permanently redirects,
# like this one.
self.send_header("Cache-Control", "no-cache, no-store, "
"must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
# end no-cache patch
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
try:
# Always read in binary mode. Opening files in text mode may cause
# newline translations, making the actual size of the content
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
self.send_error(404, "File not found: {}".format(path))
return None
filtered_bytes = None
if ctype == 'text/html':
# Comment out any <base> to allow local resolution of relative URLs.
data = f.read().decode('utf8')
f.close()
data = re.sub(r'<base\s([^>]*)>', r'<!--base \g<1>-->', data, flags=re.IGNORECASE)
data = data.encode('utf8')
f = StringIO()
f.write(data)
filtered_bytes = len(data)
f.seek(0)
self.send_response(200)
if ctype.startswith('text/') or ctype.endswith('+xml'):
self.send_header("Content-Type", "{0}; charset=UTF-8".format(ctype))
else:
self.send_header("Content-Type", ctype)
if os.path.splitext(path)[1] == '.svgz':
# Special handling for svgz to make it work nice with browsers.
self.send_header("Content-Encoding", 'gzip')
if filtered_bytes is None:
fs = os.fstat(f.fileno())
self.send_header('Content-Length', str(fs[6]))
else:
self.send_header('Content-Length', filtered_bytes)
# begin no-cache patch
# For standard requests.
self.send_header("Cache-Control", "no-cache, no-store, "
"must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
# end no-cache patch
self.end_headers()
return f
|
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_regex(validator):
field = 'a_regex_email'
assert_success({field: '[email protected]'}, validator=validator)
assert_fail(
{field: 'invalid'},
update=True,
error=(
field,
(field, 'regex'),
errors.REGEX_MISMATCH,
r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$',
),
)
|
import os
import os.path as op
from ...utils import (verbose, get_subjects_dir, set_config)
FSAVERAGE_MANIFEST_PATH = op.dirname(__file__)
@verbose
def fetch_fsaverage(subjects_dir=None, verbose=None):
"""Fetch and update fsaverage.
Parameters
----------
subjects_dir : str | None
The path to use as the subjects directory in the MNE-Python
config file. None will use the existing config variable (i.e.,
will not change anything), and if it does not exist, will use
``~/mne_data/MNE-fsaverage-data``.
%(verbose)s
Returns
-------
fs_dir : str
The fsaverage directory.
(essentially ``subjects_dir + '/fsaverage'``).
Notes
-----
This function is designed to provide
1. All modern (Freesurfer 6) fsaverage subject files
2. All MNE fsaverage parcellations
3. fsaverage head surface, fiducials, head<->MRI trans, 1- and 3-layer
BEMs (and surfaces)
This function will compare the contents of ``subjects_dir/fsaverage``
to the ones provided in the remote zip file. If any are missing,
the zip file is downloaded and files are updated. No files will
be overwritten.
.. versionadded:: 0.18
"""
# Code used to create the BEM (other files taken from MNE-sample-data):
#
# $ mne watershed_bem -s fsaverage -d $PWD --verbose info --copy
# $ python
# >>> bem = mne.make_bem_model('fsaverage', subjects_dir='.', verbose=True)
# >>> mne.write_bem_surfaces(
# ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem.fif', bem)
# >>> sol = mne.make_bem_solution(bem, verbose=True)
# >>> mne.write_bem_solution(
# ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem-sol.fif', sol)
# >>> import os
# >>> import os.path as op
# >>> names = sorted(op.join(r, f)
# ... for r, d, files in os.walk('fsaverage')
# ... for f in files)
# with open('fsaverage.txt', 'w') as fid:
# fid.write('\n'.join(names))
#
from ..utils import _manifest_check_download
subjects_dir = _set_montage_coreg_path(subjects_dir)
subjects_dir = op.abspath(subjects_dir)
fs_dir = op.join(subjects_dir, 'fsaverage')
os.makedirs(fs_dir, exist_ok=True)
fsaverage_data_parts = {
'root.zip': dict(
url='https://osf.io/3bxqt/download?revision=2',
hash_='5133fe92b7b8f03ae19219d5f46e4177',
manifest=op.join(FSAVERAGE_MANIFEST_PATH, 'root.txt'),
destination=op.join(subjects_dir),
),
'bem.zip': dict(
url='https://osf.io/7ve8g/download?revision=4',
hash_='b31509cdcf7908af6a83dc5ee8f49fb1',
manifest=op.join(FSAVERAGE_MANIFEST_PATH, 'bem.txt'),
destination=op.join(subjects_dir, 'fsaverage'),
),
}
for fname, data in fsaverage_data_parts.items():
_manifest_check_download(
destination=data['destination'],
manifest_path=data['manifest'],
url=data['url'],
hash_=data['hash_'],
)
return fs_dir
def _get_create_subjects_dir(subjects_dir):
from ..utils import _get_path
subjects_dir = get_subjects_dir(subjects_dir, raise_error=False)
if subjects_dir is None:
subjects_dir = _get_path(None, 'MNE_DATA', 'montage coregistration')
subjects_dir = op.join(subjects_dir, 'MNE-fsaverage-data')
os.makedirs(subjects_dir, exist_ok=True)
return subjects_dir
def _set_montage_coreg_path(subjects_dir=None):
"""Set a subject directory suitable for montage(-only) coregistration.
Parameters
----------
subjects_dir : str | None
The path to use as the subjects directory in the MNE-Python
config file. None will use the existing config variable (i.e.,
will not change anything), and if it does not exist, will use
``~/mne_data/MNE-fsaverage-data``.
Returns
-------
subjects_dir : str
The subjects directory that was used.
See Also
--------
mne.datasets.fetch_fsaverage
mne.get_config
mne.set_config
Notes
-----
If you plan to only do EEG-montage based coregistrations with fsaverage
without any MRI warping, this function can facilitate the process.
Essentially it sets the default value for ``subjects_dir`` in MNE
functions to be ``~/mne_data/MNE-fsaverage-data`` (assuming it has
not already been set to some other value).
.. versionadded:: 0.18
"""
subjects_dir = _get_create_subjects_dir(subjects_dir)
old_subjects_dir = get_subjects_dir(None, raise_error=False)
if old_subjects_dir is None:
set_config('SUBJECTS_DIR', subjects_dir)
return subjects_dir
|
import aiopulse
from homeassistant.core import callback
from homeassistant.helpers import entity
from homeassistant.helpers.device_registry import async_get_registry as get_dev_reg
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_registry import async_get_registry as get_ent_reg
from .const import ACMEDA_ENTITY_REMOVE, DOMAIN, LOGGER
class AcmedaBase(entity.Entity):
"""Base representation of an Acmeda roller."""
def __init__(self, roller: aiopulse.Roller):
"""Initialize the roller."""
self.roller = roller
async def async_remove_and_unregister(self):
"""Unregister from entity and device registry and call entity remove function."""
LOGGER.error("Removing %s %s", self.__class__.__name__, self.unique_id)
ent_registry = await get_ent_reg(self.hass)
if self.entity_id in ent_registry.entities:
ent_registry.async_remove(self.entity_id)
dev_registry = await get_dev_reg(self.hass)
device = dev_registry.async_get_device(
identifiers={(DOMAIN, self.unique_id)}, connections=set()
)
if device is not None:
dev_registry.async_update_device(
device.id, remove_config_entry_id=self.registry_entry.config_entry_id
)
await self.async_remove()
async def async_added_to_hass(self):
"""Entity has been added to hass."""
self.roller.callback_subscribe(self.notify_update)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
ACMEDA_ENTITY_REMOVE.format(self.roller.id),
self.async_remove_and_unregister,
)
)
async def async_will_remove_from_hass(self):
"""Entity being removed from hass."""
self.roller.callback_unsubscribe(self.notify_update)
@callback
def notify_update(self):
"""Write updated device state information."""
LOGGER.debug("Device update notification received: %s", self.name)
self.async_write_ha_state()
@property
def should_poll(self):
"""Report that Acmeda entities do not need polling."""
return False
@property
def unique_id(self):
"""Return the unique ID of this roller."""
return self.roller.id
@property
def device_id(self):
"""Return the ID of this roller."""
return self.roller.id
@property
def name(self):
"""Return the name of roller."""
return self.roller.name
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.roller.name,
"manufacturer": "Rollease Acmeda",
"via_device": (DOMAIN, self.roller.hub.id),
}
|
import json
from homeassistant.components.myq.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
async def async_init_integration(
hass: HomeAssistant,
skip_setup: bool = False,
) -> MockConfigEntry:
"""Set up the myq integration in Home Assistant."""
devices_fixture = "myq/devices.json"
devices_json = load_fixture(devices_fixture)
devices_dict = json.loads(devices_json)
def _handle_mock_api_request(method, endpoint, **kwargs):
if endpoint == "Login":
return {"SecurityToken": 1234}
if endpoint == "My":
return {"Account": {"Id": 1}}
if endpoint == "Accounts/1/Devices":
return devices_dict
return {}
with patch("pymyq.api.API.request", side_effect=_handle_mock_api_request):
entry = MockConfigEntry(
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import aiohttp
from wled import WLEDConnectionError
from homeassistant import data_entry_flow
from homeassistant.components.wled import config_flow
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant
from . import init_integration
from tests.async_mock import MagicMock, patch
from tests.common import load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_show_user_form(hass: HomeAssistant) -> None:
"""Test that the user set up form is served."""
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "user"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
async def test_show_zeroconf_confirm_form(hass: HomeAssistant) -> None:
"""Test that the zeroconf confirmation form is served."""
flow = config_flow.WLEDFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_ZEROCONF, CONF_NAME: "test"}
result = await flow.async_step_zeroconf_confirm()
assert result["description_placeholders"] == {CONF_NAME: "test"}
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
async def test_show_zerconf_form(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test that the zeroconf confirmation form is served."""
aioclient_mock.get(
"http://192.168.1.123:80/json/",
text=load_fixture("wled/rgb.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.WLEDFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_ZEROCONF}
result = await flow.async_step_zeroconf(
{"host": "192.168.1.123", "hostname": "example.local.", "properties": {}}
)
assert flow.context[CONF_HOST] == "192.168.1.123"
assert flow.context[CONF_NAME] == "example"
assert result["description_placeholders"] == {CONF_NAME: "example"}
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
@patch("homeassistant.components.wled.WLED.update", side_effect=WLEDConnectionError)
async def test_connection_error(
update_mock: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we show user form on WLED connection error."""
aioclient_mock.get("http://example.com/json/", exc=aiohttp.ClientError)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: "example.com"},
)
assert result["errors"] == {"base": "cannot_connect"}
assert result["step_id"] == "user"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
@patch("homeassistant.components.wled.WLED.update", side_effect=WLEDConnectionError)
async def test_zeroconf_connection_error(
update_mock: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on WLED connection error."""
aioclient_mock.get("http://192.168.1.123/json/", exc=aiohttp.ClientError)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={"host": "192.168.1.123", "hostname": "example.local.", "properties": {}},
)
assert result["reason"] == "cannot_connect"
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
@patch("homeassistant.components.wled.WLED.update", side_effect=WLEDConnectionError)
async def test_zeroconf_confirm_connection_error(
update_mock: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow on WLED connection error."""
aioclient_mock.get("http://192.168.1.123:80/json/", exc=aiohttp.ClientError)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={
"source": SOURCE_ZEROCONF,
CONF_HOST: "example.com",
CONF_NAME: "test",
},
data={"host": "192.168.1.123", "hostname": "example.com.", "properties": {}},
)
assert result["reason"] == "cannot_connect"
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
@patch("homeassistant.components.wled.WLED.update", side_effect=WLEDConnectionError)
async def test_zeroconf_no_data(
update_mock: MagicMock, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort if zeroconf provides no data."""
flow = config_flow.WLEDFlowHandler()
flow.hass = hass
result = await flow.async_step_zeroconf()
assert result["reason"] == "cannot_connect"
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_user_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if WLED device already configured."""
await init_integration(hass, aioclient_mock)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_USER},
data={CONF_HOST: "192.168.1.123"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_zeroconf_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if WLED device already configured."""
await init_integration(hass, aioclient_mock)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={"host": "192.168.1.123", "hostname": "example.local.", "properties": {}},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_zeroconf_with_mac_device_exists_abort(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort zeroconf flow if WLED device already configured."""
await init_integration(hass, aioclient_mock)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "192.168.1.123",
"hostname": "example.local.",
"properties": {CONF_MAC: "aabbccddeeff"},
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_user_flow_implementation(
hass: HomeAssistant, aioclient_mock
) -> None:
"""Test the full manual user flow from start to finish."""
aioclient_mock.get(
"http://192.168.1.123:80/json/",
text=load_fixture("wled/rgb.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "user"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_HOST: "192.168.1.123"}
)
assert result["data"][CONF_HOST] == "192.168.1.123"
assert result["data"][CONF_MAC] == "aabbccddeeff"
assert result["title"] == "192.168.1.123"
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_full_zeroconf_flow_implementation(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the full manual user flow from start to finish."""
aioclient_mock.get(
"http://192.168.1.123:80/json/",
text=load_fixture("wled/rgb.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.WLEDFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_ZEROCONF}
result = await flow.async_step_zeroconf(
{"host": "192.168.1.123", "hostname": "example.local.", "properties": {}}
)
assert flow.context[CONF_HOST] == "192.168.1.123"
assert flow.context[CONF_NAME] == "example"
assert result["description_placeholders"] == {CONF_NAME: "example"}
assert result["step_id"] == "zeroconf_confirm"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await flow.async_step_zeroconf_confirm(user_input={})
assert result["data"][CONF_HOST] == "192.168.1.123"
assert result["data"][CONF_MAC] == "aabbccddeeff"
assert result["title"] == "example"
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
import argparse
import logging
import sys
from paasta_tools import tron_tools
from paasta_tools.tron_tools import MASTER_NAMESPACE
log = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser(
description="Update the Tron namespace configuration for a service."
)
parser.add_argument("services", nargs="*", help="Services to update.")
parser.add_argument(
"-a",
"--all",
dest="all_namespaces",
action="store_true",
help="Update all available Tron namespaces.",
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=tron_tools.DEFAULT_SOA_DIR,
help="Use a different soa config directory",
)
parser.add_argument("-v", "--verbose", action="store_true", default=False)
parser.add_argument("--dry-run", action="store_true", default=False)
parser.add_argument(
"--cluster",
help="Cluster to read configs for. Defaults to the configuration in /etc/paasta",
default=None,
)
args = parser.parse_args()
return args
def main():
args = parse_args()
log_level = logging.DEBUG if args.verbose else logging.INFO
logging.basicConfig(level=log_level)
if not args.cluster:
args.cluster = tron_tools.load_tron_config().get_cluster_name()
if args.all_namespaces:
if args.services:
log.error("Do not pass service names with --all flag")
sys.exit(1)
try:
services = tron_tools.get_tron_namespaces(
cluster=args.cluster, soa_dir=args.soa_dir
)
except Exception as e:
log.error("Failed to list tron namespaces: {error}".format(error=str(e)))
sys.exit(1)
else:
services = args.services
if not services:
log.warning("No namespaces found")
sys.exit(0)
if not args.dry_run:
client = tron_tools.get_tron_client()
updated = []
failed = []
skipped = []
master_config = tron_tools.create_complete_master_config(
cluster=args.cluster, soa_dir=args.soa_dir
)
if args.dry_run:
log.info(f"Would update {MASTER_NAMESPACE} to:")
log.info(f"{master_config}")
updated.append(MASTER_NAMESPACE)
else:
if client.update_namespace(MASTER_NAMESPACE, master_config):
updated.append(MASTER_NAMESPACE)
log.debug(f"Updated {MASTER_NAMESPACE}")
else:
skipped.append(MASTER_NAMESPACE)
log.debug(f"Skipped {MASTER_NAMESPACE}")
for service in sorted(services):
try:
new_config = tron_tools.create_complete_config(
cluster=args.cluster, service=service, soa_dir=args.soa_dir
)
if args.dry_run:
log.info(f"Would update {service} to:")
log.info(f"{new_config}")
updated.append(service)
else:
if client.update_namespace(service, new_config):
updated.append(service)
log.debug(f"Updated {service}")
else:
skipped.append(service)
log.debug(f"Skipped {service}")
except Exception as e:
log.error(f"Update for {service} failed: {str(e)}")
log.debug(f"Exception while updating {service}", exc_info=1)
failed.append(service)
skipped_report = skipped if args.verbose else len(skipped)
log.info(
f"Updated following namespaces: {updated}, "
f"failed: {failed}, skipped: {skipped_report}"
)
sys.exit(1 if failed else 0)
if __name__ == "__main__":
main()
|
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
)
from homeassistant.helpers import config_validation as cv
from .const import ( # pylint: disable=unused-import
CONF_CATEGORIES,
DEFAULT_RADIUS,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
)
DATA_SCHEMA = vol.Schema(
{vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): cv.positive_int}
)
_LOGGER = logging.getLogger(__name__)
class GdacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a GDACS config flow."""
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _show_form(self, errors=None):
"""Show the form to the user."""
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors or {}
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
_LOGGER.debug("User input: %s", user_input)
if not user_input:
return await self._show_form()
latitude = user_input.get(CONF_LATITUDE, self.hass.config.latitude)
user_input[CONF_LATITUDE] = latitude
longitude = user_input.get(CONF_LONGITUDE, self.hass.config.longitude)
user_input[CONF_LONGITUDE] = longitude
identifier = f"{user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}"
await self.async_set_unique_id(identifier)
self._abort_if_unique_id_configured()
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds
categories = user_input.get(CONF_CATEGORIES, [])
user_input[CONF_CATEGORIES] = categories
return self.async_create_entry(title=identifier, data=user_input)
|
import natsort
__all__ = ('TreeNode', 'clone_treenode', 'flatten_tree_structure',
'sort_classifications', 'join_hierarchical_category_path',
'parse_escaped_hierarchical_category_name',)
class TreeNode(object):
"""A tree node."""
indent_levels = None # use for formatting comments as tree
indent_change_before = 0 # use for formatting comments as tree
indent_change_after = 0 # use for formatting comments as tree
# The indent levels and changes allow to render a tree structure
# without keeping track of all that information during rendering.
#
# The indent_change_before is the different between the current
# comment's level and the previous comment's level; if the number
# is positive, the current level is indented further in, and if it
# is negative, it is indented further out. Positive values can be
# used to open HTML tags for each opened level.
#
# The indent_change_after is the difference between the next
# comment's level and the current comment's level. Negative values
# can be used to close HTML tags for each closed level.
#
# The indent_levels list contains one entry (index, count) per
# level, informing about the index of the current comment on that
# level and the count of comments on that level (before a comment
# of a higher level comes). This information can be used to render
# tree indicators, for example to generate a tree such as:
#
# +--- [(0,3)]
# +-+- [(1,3)]
# | +--- [(1,3), (0,2)]
# | +-+- [(1,3), (1,2)]
# | +--- [(1,3), (1,2), (0, 1)]
# +-+- [(2,3)]
# +- [(2,3), (0,1)]
#
# (The lists used as labels represent the content of the
# indent_levels property for that node.)
def __init__(self, name, parent=None):
"""Initialize node."""
self.name = name
self.parent = parent
self.children = []
def get_path(self):
"""Get path."""
path = []
curr = self
while curr is not None:
path.append(curr)
curr = curr.parent
return reversed(path)
def get_children(self):
"""Get children of a node."""
return self.children
def __str__(self):
"""Stringify node (return name)."""
return self.name
def _repr_partial(self):
"""Return partial representation."""
if self.parent:
return "{0}/{1!r}".format(self.parent._repr_partial(), self.name)
else:
return repr(self.name)
def __repr__(self):
"""Return programmer-friendly node representation."""
return "<TreeNode {0}>".format(self._repr_partial())
def clone_treenode(treenode, parent=None, acceptor=lambda x: True):
"""Clone a TreeNode.
Children are only cloned if `acceptor` returns `True` when
applied on them.
Returns the cloned node if it has children or if `acceptor`
applied to it returns `True`. In case neither applies, `None`
is returned.
"""
# Copy standard TreeNode stuff
node_clone = TreeNode(treenode.name, parent)
node_clone.children = [clone_treenode(node, parent=node_clone, acceptor=acceptor) for node in treenode.children]
node_clone.children = [node for node in node_clone.children if node]
node_clone.indent_levels = treenode.indent_levels
node_clone.indent_change_before = treenode.indent_change_before
node_clone.indent_change_after = treenode.indent_change_after
if hasattr(treenode, 'classification_path'):
# Copy stuff added by taxonomies_classifier plugin
node_clone.classification_path = treenode.classification_path
node_clone.classification_name = treenode.classification_name
# Accept this node if there are no children (left) and acceptor fails
if not node_clone.children and not acceptor(treenode):
return None
return node_clone
def flatten_tree_structure(root_list):
"""Flatten a tree."""
elements = []
def generate(input_list, indent_levels_so_far):
"""Generate flat list of nodes."""
for index, element in enumerate(input_list):
# add to destination
elements.append(element)
# compute and set indent levels
indent_levels = indent_levels_so_far + [(index, len(input_list))]
element.indent_levels = indent_levels
# add children
children = element.get_children()
element.children_count = len(children)
generate(children, indent_levels)
generate(root_list, [])
# Add indent change counters
level = 0
last_element = None
for element in elements:
new_level = len(element.indent_levels)
# Compute level change before this element
change = new_level - level
if last_element is not None:
last_element.indent_change_after = change
element.indent_change_before = change
# Update variables
level = new_level
last_element = element
# Set level change after last element
if last_element is not None:
last_element.indent_change_after = -level
return elements
def parse_escaped_hierarchical_category_name(category_name):
"""Parse a category name."""
result = []
current = None
index = 0
next_backslash = category_name.find('\\', index)
next_slash = category_name.find('/', index)
while index < len(category_name):
if next_backslash == -1 and next_slash == -1:
current = (current if current else "") + category_name[index:]
index = len(category_name)
elif next_slash >= 0 and (next_backslash == -1 or next_backslash > next_slash):
result.append((current if current else "") + category_name[index:next_slash])
current = ''
index = next_slash + 1
next_slash = category_name.find('/', index)
else:
if len(category_name) == next_backslash + 1:
raise Exception("Unexpected '\\' in '{0}' at last position!".format(category_name))
esc_ch = category_name[next_backslash + 1]
if esc_ch not in {'/', '\\'}:
raise Exception("Unknown escape sequence '\\{0}' in '{1}'!".format(esc_ch, category_name))
current = (current if current else "") + category_name[index:next_backslash] + esc_ch
index = next_backslash + 2
next_backslash = category_name.find('\\', index)
if esc_ch == '/':
next_slash = category_name.find('/', index)
if current is not None:
result.append(current)
return result
def join_hierarchical_category_path(category_path):
"""Join a category path."""
def escape(s):
"""Espace one part of category path."""
return s.replace('\\', '\\\\').replace('/', '\\/')
return '/'.join([escape(p) for p in category_path])
def sort_classifications(taxonomy, classifications, lang):
"""Sort the given list of classifications of the given taxonomy and language.
``taxonomy`` must be a ``Taxonomy`` plugin.
``classifications`` must be an iterable collection of
classification strings for that taxonomy.
``lang`` is the language the classifications are for.
The result will be returned as a sorted list. Sorting will
happen according to the way the complete classification
hierarchy for the taxonomy is sorted.
"""
if taxonomy.has_hierarchy:
# To sort a hierarchy of classifications correctly, we first
# build a tree out of them (and mark for each node whether it
# appears in the list), then sort the tree node-wise, and finally
# collapse the tree into a list of recombined classifications.
# Step 1: build hierarchy. Here, each node consists of a boolean
# flag (node appears in list) and a dictionary mapping path elements
# to nodes.
root = [False, {}]
for classification in classifications:
node = root
for elt in taxonomy.extract_hierarchy(classification):
if elt not in node[1]:
node[1][elt] = [False, {}]
node = node[1][elt]
node[0] = True
# Step 2: sort hierarchy. The result for a node is a pair
# (flag, subnodes), where subnodes is a list of pairs (name, subnode).
def sort_node(node, level=0):
"""Return sorted node, with children as `(name, node)` list instead of a dictionary."""
keys = natsort.natsorted(node[1].keys(), alg=natsort.ns.F | natsort.ns.IC)
taxonomy.sort_classifications(keys, lang, level)
subnodes = []
for key in keys:
subnodes.append((key, sort_node(node[1][key], level + 1)))
return (node[0], subnodes)
root = sort_node(root)
# Step 3: collapse the tree structure into a linear sorted list,
# with a node coming before its children.
def append_node(classifications, node, path=()):
"""Append the node and then its children to the classifications list."""
if node[0]:
classifications.append(taxonomy.recombine_classification_from_hierarchy(path))
for key, subnode in node[1]:
append_node(classifications, subnode, path + (key, ))
classifications = []
append_node(classifications, root)
return classifications
else:
# Sorting a flat hierarchy is simpler. We pre-sort with
# natsorted and call taxonomy.sort_classifications.
classifications = natsort.natsorted(classifications, alg=natsort.ns.F | natsort.ns.IC)
taxonomy.sort_classifications(classifications, lang)
return classifications
|
import platform
import pytest
import sys
import vcr
from assertions import assert_cassette_empty, assert_is_json
requests = pytest.importorskip("requests")
from requests.exceptions import ConnectionError # noqa E402
def test_status_code(httpbin_both, tmpdir):
"""Ensure that we can read the status code"""
url = httpbin_both.url + "/"
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
status_code = requests.get(url).status_code
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
assert status_code == requests.get(url).status_code
def test_headers(httpbin_both, tmpdir):
"""Ensure that we can read the headers back"""
url = httpbin_both + "/"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
headers = requests.get(url).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
assert headers == requests.get(url).headers
def test_body(tmpdir, httpbin_both):
"""Ensure the responses are all identical enough"""
url = httpbin_both + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
content = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
assert content == requests.get(url).content
def test_get_empty_content_type_json(tmpdir, httpbin_both):
"""Ensure GET with application/json content-type and empty request body doesn't crash"""
url = httpbin_both + "/status/200"
headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("get_empty_json.yaml")), match_on=("body",)):
status = requests.get(url, headers=headers).status_code
with vcr.use_cassette(str(tmpdir.join("get_empty_json.yaml")), match_on=("body",)):
assert status == requests.get(url, headers=headers).status_code
def test_effective_url(tmpdir, httpbin_both):
"""Ensure that the effective_url is captured"""
url = httpbin_both.url + "/redirect-to?url=/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
effective_url = requests.get(url).url
assert effective_url == httpbin_both.url + "/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
assert effective_url == requests.get(url).url
def test_auth(tmpdir, httpbin_both):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
url = httpbin_both + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
one = requests.get(url, auth=auth)
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
two = requests.get(url, auth=auth)
assert one.content == two.content
assert one.status_code == two.status_code
def test_auth_failed(tmpdir, httpbin_both):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
url = httpbin_both + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
one = requests.get(url, auth=auth)
two = requests.get(url, auth=auth)
assert one.content == two.content
assert one.status_code == two.status_code == 401
def test_post(tmpdir, httpbin_both):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req1 = requests.post(url, data).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req2 = requests.post(url, data).content
assert req1 == req2
def test_post_chunked_binary(tmpdir, httpbin):
"""Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str."""
data1 = iter([b"data", b"to", b"send"])
data2 = iter([b"data", b"to", b"send"])
url = httpbin.url + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req1 = requests.post(url, data1).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req2 = requests.post(url, data2).content
assert req1 == req2
@pytest.mark.skipif("sys.version_info >= (3, 6)", strict=True, raises=ConnectionError)
@pytest.mark.skipif(
(3, 5) < sys.version_info < (3, 6) and platform.python_implementation() == "CPython",
reason="Fails on CPython 3.5",
)
def test_post_chunked_binary_secure(tmpdir, httpbin_secure):
"""Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str."""
data1 = iter([b"data", b"to", b"send"])
data2 = iter([b"data", b"to", b"send"])
url = httpbin_secure.url + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req1 = requests.post(url, data1).content
print(req1)
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req2 = requests.post(url, data2).content
assert req1 == req2
def test_redirects(tmpdir, httpbin_both):
"""Ensure that we can handle redirects"""
url = httpbin_both + "/redirect-to?url=bytes/1024"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
content = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
assert content == requests.get(url).content
# Ensure that we've now cached *two* responses. One for the redirect
# and one for the final fetch
assert len(cass) == 2
assert cass.play_count == 2
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
"""Ensure that requests between schemes are treated separately"""
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
requests.get(httpbin_secure + "/")
requests.get(httpbin + "/")
assert cass.play_count == 0
assert len(cass) == 2
def test_gzip(tmpdir, httpbin_both):
"""
Ensure that requests (actually urllib3) is able to automatically decompress
the response body
"""
url = httpbin_both + "/gzip"
response = requests.get(url)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = requests.get(url)
assert_is_json(response.content)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
assert_is_json(response.content)
def test_session_and_connection_close(tmpdir, httpbin):
"""
This tests the issue in https://github.com/kevin1024/vcrpy/issues/48
If you use a requests.session and the connection is closed, then an
exception is raised in the urllib3 module vendored into requests:
`AttributeError: 'NoneType' object has no attribute 'settimeout'`
"""
with vcr.use_cassette(str(tmpdir.join("session_connection_closed.yaml"))):
session = requests.session()
session.get(httpbin + "/get", headers={"Connection": "close"})
session.get(httpbin + "/get", headers={"Connection": "close"})
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure):
with vcr.use_cassette(str(tmpdir.join("cert_validation_disabled.yaml"))):
requests.get(httpbin_secure.url, verify=False)
def test_session_can_make_requests_after_requests_unpatched(tmpdir, httpbin):
with vcr.use_cassette(str(tmpdir.join("test_session_after_unpatched.yaml"))):
session = requests.session()
session.get(httpbin + "/get")
with vcr.use_cassette(str(tmpdir.join("test_session_after_unpatched.yaml"))):
session = requests.session()
session.get(httpbin + "/get")
session.get(httpbin + "/status/200")
def test_session_created_before_use_cassette_is_patched(tmpdir, httpbin_both):
url = httpbin_both + "/bytes/1024"
# Record arbitrary, random data to the cassette
with vcr.use_cassette(str(tmpdir.join("session_created_outside.yaml"))):
session = requests.session()
body = session.get(url).content
# Create a session outside of any cassette context manager
session = requests.session()
# Make a request to make sure that a connectionpool is instantiated
session.get(httpbin_both + "/get")
with vcr.use_cassette(str(tmpdir.join("session_created_outside.yaml"))):
# These should only be the same if the patching succeeded.
assert session.get(url).content == body
def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpdir):
"""
This tests ensures that a session that was created while one cassette was
active is patched to the use the responses of a second cassette when it
is enabled.
"""
url = httpbin_both + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("first_nested.yaml"))):
session = requests.session()
first_body = session.get(url).content
with vcr.use_cassette(str(tmpdir.join("second_nested.yaml"))):
second_body = session.get(url).content
third_body = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("second_nested.yaml"))):
session = requests.session()
assert session.get(url).content == second_body
with vcr.use_cassette(str(tmpdir.join("first_nested.yaml"))):
assert session.get(url).content == first_body
assert session.get(url).content == third_body
# Make sure that the session can now get content normally.
assert "User-agent" in session.get(httpbin_both.url + "/robots.txt").text
def test_post_file(tmpdir, httpbin_both):
"""Ensure that we handle posting a file."""
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open("tox.ini", "rb") as f:
original_response = requests.post(url, f).content
# This also tests that we do the right thing with matching the body when they are files.
with vcr.use_cassette(
str(tmpdir.join("post_file.yaml")),
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
) as cass:
with open("tox.ini", "rb") as f:
tox_content = f.read()
assert cass.requests[0].body.read() == tox_content
with open("tox.ini", "rb") as f:
new_response = requests.post(url, f).content
assert original_response == new_response
def test_filter_post_params(tmpdir, httpbin_both):
"""
This tests the issue in https://github.com/kevin1024/vcrpy/issues/158
Ensure that a post request made through requests can still be filtered.
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
assert b'id=secret' not in cass.requests[0].body
"""
url = httpbin_both.url + "/post"
cass_loc = str(tmpdir.join("filter_post_params.yaml"))
with vcr.use_cassette(cass_loc, filter_post_data_parameters=["key"]) as cass:
requests.post(url, data={"key": "value"})
with vcr.use_cassette(cass_loc, filter_post_data_parameters=["key"]) as cass:
assert b"key=value" not in cass.requests[0].body
def test_post_unicode_match_on_body(tmpdir, httpbin_both):
"""Ensure that matching on POST body that contains Unicode characters works."""
data = {"key1": "value1", "●‿●": "٩(●̮̮̃•̃)۶"}
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml")), additional_matchers=("body",)):
req1 = requests.post(url, data).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml")), additional_matchers=("body",)):
req2 = requests.post(url, data).content
assert req1 == req2
|
import pytest
from yandextank.config_converter.converter import Section
class TestSection(object):
@pytest.mark.parametrize('options, expected', [
(
[
('connection_timeout', '10'),
('ignore_target_lock', '1'),
('some_stupid_comment', 'Here I go!'),
('another_stupid_comment', 'I\'m here!'),
],
{
'ignore_target_lock': True,
'connection_timeout': 10,
'meta': {
'some_stupid_comment': 'Here I go!',
'another_stupid_comment': 'I\'m here!'
}
}
)
])
def test_merged_options(self, options, expected):
assert Section('meta', 'DataUploader', options).merged_options == expected
|
import os
import unittest
import warnings
import numpy as np
import numpy.testing as np_test
from pgmpy.readwrite import XMLBIFReader, XMLBIFWriter
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImportError:
try:
import xml.etree.ElementTree as etree
except ImportError:
warnings.warn("Failed to import ElementTree from any known place")
TEST_FILE = """<?xml version="1.0"?>
<!--
Bayesian network in XMLBIF v0.3 (BayesNet Interchange Format)
Produced by JavaBayes (http://www.cs.cmu.edu/~javabayes/
Output created Mon Aug 01 10:33:28 AEST 2016
-->
<!-- DTD for the XMLBIF 0.3 format -->
<!DOCTYPE BIF [
<!ELEMENT BIF ( NETWORK )*>
<!ATTLIST BIF VERSION CDATA #REQUIRED>
<!ELEMENT NETWORK ( NAME, ( PROPERTY | VARIABLE | DEFINITION )* )>
<!ELEMENT NAME (#PCDATA)>
<!ELEMENT VARIABLE ( NAME, ( OUTCOME | PROPERTY )* ) >
<!ATTLIST VARIABLE TYPE (nature|decision|utility) "nature">
<!ELEMENT OUTCOME (#PCDATA)>
<!ELEMENT DEFINITION ( FOR | GIVEN | TABLE | PROPERTY )* >
<!ELEMENT FOR (#PCDATA)>
<!ELEMENT GIVEN (#PCDATA)>
<!ELEMENT TABLE (#PCDATA)>
<!ELEMENT PROPERTY (#PCDATA)>
]>
<BIF VERSION="0.3">
<NETWORK>
<NAME>Dog_Problem</NAME>
<!-- Variables -->
<VARIABLE TYPE="nature">
<NAME>kid</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (100, 165)</PROPERTY>
</VARIABLE>
<VARIABLE TYPE="nature">
<NAME>light_on</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (73, 165)</PROPERTY>
</VARIABLE>
<VARIABLE TYPE="nature">
<NAME>bowel_problem</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (190, 69)</PROPERTY>
</VARIABLE>
<VARIABLE TYPE="nature">
<NAME>dog_out</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (155, 165)</PROPERTY>
</VARIABLE>
<VARIABLE TYPE="nature">
<NAME>hear_bark</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (154, 241)</PROPERTY>
</VARIABLE>
<VARIABLE TYPE="nature">
<NAME>family_out</NAME>
<OUTCOME>true</OUTCOME>
<OUTCOME>false</OUTCOME>
<PROPERTY>position = (112, 69)</PROPERTY>
</VARIABLE>
<!-- Probability distributions -->
<DEFINITION>
<FOR>kid</FOR>
<TABLE>0.3 0.7 </TABLE>
</DEFINITION>
<DEFINITION>
<FOR>light_on</FOR>
<GIVEN>family_out</GIVEN>
<TABLE>0.6 0.4 0.05 0.95 </TABLE>
</DEFINITION>
<DEFINITION>
<FOR>bowel_problem</FOR>
<TABLE>0.01 0.99 </TABLE>
</DEFINITION>
<DEFINITION>
<FOR>dog_out</FOR>
<GIVEN>bowel_problem</GIVEN>
<GIVEN>family_out</GIVEN>
<TABLE>0.99 0.01 0.97 0.03 0.9 0.1 0.3 0.7 </TABLE>
</DEFINITION>
<DEFINITION>
<FOR>hear_bark</FOR>
<GIVEN>dog_out</GIVEN>
<TABLE>0.7 0.3 0.01 0.99 </TABLE>
</DEFINITION>
<DEFINITION>
<FOR>family_out</FOR>
<TABLE>0.15 0.85 </TABLE>
</DEFINITION>
</NETWORK>
</BIF>"""
class TestXMLBIFReaderMethods(unittest.TestCase):
def setUp(self):
self.reader = XMLBIFReader(string=TEST_FILE)
def test_get_variables(self):
var_expected = [
"kid",
"light_on",
"bowel_problem",
"dog_out",
"hear_bark",
"family_out",
]
self.assertListEqual(self.reader.variables, var_expected)
def test_get_states(self):
states_expected = {
"bowel_problem": ["true", "false"],
"dog_out": ["true", "false"],
"family_out": ["true", "false"],
"hear_bark": ["true", "false"],
"kid": ["true", "false"],
"light_on": ["true", "false"],
}
states = self.reader.variable_states
for variable in states_expected:
self.assertListEqual(states_expected[variable], states[variable])
def test_get_parents(self):
parents_expected = {
"bowel_problem": [],
"dog_out": ["bowel_problem", "family_out"],
"family_out": [],
"hear_bark": ["dog_out"],
"kid": [],
"light_on": ["family_out"],
}
parents = self.reader.variable_parents
for variable in parents_expected:
self.assertListEqual(parents_expected[variable], parents[variable])
def test_get_edges(self):
edges_expected = [
["family_out", "dog_out"],
["bowel_problem", "dog_out"],
["family_out", "light_on"],
["dog_out", "hear_bark"],
]
self.assertListEqual(sorted(self.reader.edge_list), sorted(edges_expected))
def test_get_values(self):
cpd_expected = {
"bowel_problem": np.array([[0.01], [0.99]]),
"dog_out": np.array([[0.99, 0.97, 0.9, 0.3], [0.01, 0.03, 0.1, 0.7]]),
"family_out": np.array([[0.15], [0.85]]),
"hear_bark": np.array([[0.7, 0.01], [0.3, 0.99]]),
"kid": np.array([[0.3], [0.7]]),
"light_on": np.array([[0.6, 0.05], [0.4, 0.95]]),
}
cpd = self.reader.variable_CPD
for variable in cpd_expected:
np_test.assert_array_equal(cpd_expected[variable], cpd[variable])
def test_get_property(self):
property_expected = {
"bowel_problem": ["position = (190, 69)"],
"dog_out": ["position = (155, 165)"],
"family_out": ["position = (112, 69)"],
"hear_bark": ["position = (154, 241)"],
"kid": ["position = (100, 165)"],
"light_on": ["position = (73, 165)"],
}
prop = self.reader.variable_property
for variable in property_expected:
self.assertListEqual(property_expected[variable], prop[variable])
def test_model(self):
self.reader.get_model().check_model()
def tearDown(self):
del self.reader
class TestXMLBIFReaderMethodsFile(unittest.TestCase):
def setUp(self):
with open("dog_problem.xml", "w") as fout:
fout.write(TEST_FILE)
self.reader = XMLBIFReader("dog_problem.xml")
def test_get_variables(self):
var_expected = [
"kid",
"light_on",
"bowel_problem",
"dog_out",
"hear_bark",
"family_out",
]
self.assertListEqual(self.reader.variables, var_expected)
def test_get_states(self):
states_expected = {
"bowel_problem": ["true", "false"],
"dog_out": ["true", "false"],
"family_out": ["true", "false"],
"hear_bark": ["true", "false"],
"kid": ["true", "false"],
"light_on": ["true", "false"],
}
states = self.reader.variable_states
for variable in states_expected:
self.assertListEqual(states_expected[variable], states[variable])
def test_get_parents(self):
parents_expected = {
"bowel_problem": [],
"dog_out": ["bowel_problem", "family_out"],
"family_out": [],
"hear_bark": ["dog_out"],
"kid": [],
"light_on": ["family_out"],
}
parents = self.reader.variable_parents
for variable in parents_expected:
self.assertListEqual(parents_expected[variable], parents[variable])
def test_get_edges(self):
edges_expected = [
["family_out", "dog_out"],
["bowel_problem", "dog_out"],
["family_out", "light_on"],
["dog_out", "hear_bark"],
]
self.assertListEqual(sorted(self.reader.edge_list), sorted(edges_expected))
def test_get_values(self):
cpd_expected = {
"bowel_problem": np.array([[0.01], [0.99]]),
"dog_out": np.array([[0.99, 0.97, 0.9, 0.3], [0.01, 0.03, 0.1, 0.7]]),
"family_out": np.array([[0.15], [0.85]]),
"hear_bark": np.array([[0.7, 0.01], [0.3, 0.99]]),
"kid": np.array([[0.3], [0.7]]),
"light_on": np.array([[0.6, 0.05], [0.4, 0.95]]),
}
cpd = self.reader.variable_CPD
for variable in cpd_expected:
np_test.assert_array_equal(cpd_expected[variable], cpd[variable])
def test_get_property(self):
property_expected = {
"bowel_problem": ["position = (190, 69)"],
"dog_out": ["position = (155, 165)"],
"family_out": ["position = (112, 69)"],
"hear_bark": ["position = (154, 241)"],
"kid": ["position = (100, 165)"],
"light_on": ["position = (73, 165)"],
}
prop = self.reader.variable_property
for variable in property_expected:
self.assertListEqual(property_expected[variable], prop[variable])
def test_model(self):
self.reader.get_model().check_model()
def tearDown(self):
del self.reader
os.remove("dog_problem.xml")
class TestXMLBIFWriterMethodsString(unittest.TestCase):
def setUp(self):
reader = XMLBIFReader(string=TEST_FILE)
self.expected_model = reader.get_model()
self.writer = XMLBIFWriter(self.expected_model)
self.model_stateless = BayesianModel(
[("D", "G"), ("I", "G"), ("G", "L"), ("I", "S")]
)
self.cpd_d = TabularCPD(variable="D", variable_card=2, values=[[0.6], [0.4]])
self.cpd_i = TabularCPD(variable="I", variable_card=2, values=[[0.7], [0.3]])
self.cpd_g = TabularCPD(
variable="G",
variable_card=3,
values=[
[0.3, 0.05, 0.9, 0.5],
[0.4, 0.25, 0.08, 0.3],
[0.3, 0.7, 0.02, 0.2],
],
evidence=["I", "D"],
evidence_card=[2, 2],
)
self.cpd_l = TabularCPD(
variable="L",
variable_card=2,
values=[[0.1, 0.4, 0.99], [0.9, 0.6, 0.01]],
evidence=["G"],
evidence_card=[3],
)
self.cpd_s = TabularCPD(
variable="S",
variable_card=2,
values=[[0.95, 0.2], [0.05, 0.8]],
evidence=["I"],
evidence_card=[2],
)
self.model_stateless.add_cpds(
self.cpd_d, self.cpd_i, self.cpd_g, self.cpd_l, self.cpd_s
)
self.writer_stateless = XMLBIFWriter(self.model_stateless)
def test_write_xmlbif_statefull(self):
self.writer.write_xmlbif("dog_problem_output.xbif")
with open("dog_problem_output.xbif", "r") as f:
file_text = f.read()
reader = XMLBIFReader(string=file_text)
model = reader.get_model(state_name_type=str)
self.assert_models_equivelent(self.expected_model, model)
os.remove("dog_problem_output.xbif")
def test_write_xmlbif_stateless(self):
self.writer_stateless.write_xmlbif("grade_problem_output.xbif")
with open("grade_problem_output.xbif", "r") as f:
reader = XMLBIFReader(f)
model = reader.get_model(state_name_type=int)
self.assert_models_equivelent(self.model_stateless, model)
self.assertDictEqual(
{
"D": [0, 1],
},
model.get_cpds("D").state_names,
)
os.remove("grade_problem_output.xbif")
def assert_models_equivelent(self, expected, got):
self.assertSetEqual(set(expected.nodes()), set(got.nodes()))
for node in expected.nodes():
self.assertListEqual(
sorted(expected.get_parents(node)), sorted(got.get_parents(node))
)
cpds_expected = expected.get_cpds(node=node)
cpds_got = got.get_cpds(node=node)
self.assertEqual(cpds_expected, cpds_got)
|
from ... import event
from .. import Widget
perf_counter = None # exists in PScript, time.perf_counter only in Python 3.3+
# todo: make it easy to enable high-res aa
class CanvasWidget(Widget):
""" A widget that provides an HTML5 canvas. The canvas is scaled with
the available space. Use ``self.node.getContext('2d')`` or
``self.node.getContext('webgl')`` in the ``init()`` method to get
a contex to perform the actual drawing.
The ``node`` of this widget is a
`<canvas> <https://developer.mozilla.org/docs/Web/HTML/Element/canvas>`_
wrapped in a `<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_
(the ``outernode``) to handle sizing.
"""
DEFAULT_MIN_SIZE = 50, 50
CSS = """
.flx-CanvasWidget {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.flx-CanvasWidget > canvas {
/* Set position to absolute so that the canvas is not going
* to be forcing a size on the container div. */
position: absolute;
}
"""
capture_wheel = event.BoolProp(False, settable=True, doc="""
Whether the wheel event is "captured", i.e. not propagated to result
into scrolling of the parent widget (or page). If True, if no scrolling
must have been performed outside of the widget for about half a second
in order for the widget to capture scroll events.
""")
def _create_dom(self):
global window
outernode = window.document.createElement('div')
innernode = window.document.createElement('canvas')
innernode.id = self.id + '-canvas'
outernode.appendChild(innernode)
# Disable context menu so we can handle RMB clicks
# Firefox is particularly stuborn with Shift+RMB, and RMB dbl click
for ev_name in ('contextmenu', 'click', 'dblclick'):
self._addEventListener(window.document, ev_name,
self._prevent_default_event, 0)
# If the canvas uses the wheel event for something, you'd want to
# disable browser-scroll when the mouse is over the canvas. But
# when you scroll down a page and the cursor comes over the canvas
# because of that, we don't want the canvas to capture too eagerly.
# This code only captures if there has not been scrolled elsewhere
# for about half a second.
def wheel_behavior(e):
id, t0 = window.flexx._wheel_timestamp
t1 = perf_counter()
if (t1 - t0) < 0.5:
window.flexx._wheel_timestamp = id, t1 # keep scrolling
else:
window.flexx._wheel_timestamp = e.target.id, t1 # new scroll
if not window.flexx._wheel_timestamp:
window.flexx._wheel_timestamp = 0, ''
self._addEventListener(window.document, 'wheel', wheel_behavior, 0)
return outernode, innernode
def _prevent_default_event(self, e):
""" Prevent the default action of an event unless all modifier
keys (shift, ctrl, alt) are pressed down.
"""
if e.target is self.node:
if not (e.altKey is True and e.ctrlKey is True and e.shiftKey is True):
e.preventDefault()
def _create_pointer_event(self, e):
# In a canvas, prevent browser zooming and the like
if e.type.startswith('touch'):
e.preventDefault()
return super()._create_pointer_event(e)
@event.emitter
def pointer_wheel(self, e):
global window
if self.capture_wheel <= 0:
return super().pointer_wheel(e) # normal behavior
elif window.flexx._wheel_timestamp[0] == self.node.id:
e.preventDefault()
return super().pointer_wheel(e)
@event.reaction
def _update_canvas_size(self, *events):
size = self.size
if size[0] or size[1]:
self.node.width = size[0]
self.node.height = size[1]
self.node.style.width = size[0] + 'px'
self.node.style.height = size[1] + 'px'
|
from __future__ import absolute_import
import unittest
from .common_imports import (
etree, BytesIO, _bytes, HelperTestCase, fileInTestDir, make_doctest, skipif
)
try:
import rnc2rng
except ImportError:
rnc2rng = None
class ETreeRelaxNGTestCase(HelperTestCase):
def test_relaxng(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(schema)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.error_log.filter_from_errors())
self.assertTrue(schema.validate(tree_valid)) # repeat valid
self.assertFalse(schema.error_log.filter_from_errors()) # repeat valid
def test_relaxng_stringio(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema_file = BytesIO('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(file=schema_file)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
def test_relaxng_elementtree_error(self):
self.assertRaises(ValueError, etree.RelaxNG, etree.ElementTree())
def test_relaxng_error(self):
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
schema = etree.RelaxNG(schema)
self.assertFalse(schema.validate(tree_invalid))
errors = schema.error_log
self.assertTrue([log for log in errors
if log.level_name == "ERROR"])
self.assertTrue([log for log in errors
if "not expect" in log.message])
def test_relaxng_generic_error(self):
tree_invalid = self.parse('''\
<test>
<reference id="my-ref">This is my unique ref.</reference>
<data ref="my-ref">Valid data</data>
<data ref="myref">Invalid data</data>
</test>
''')
schema = self.parse('''\
<grammar datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes"
xmlns="http://relaxng.org/ns/structure/1.0">
<define name="by-ref">
<data type="IDREF"/>
</define>
<start>
<element name="test">
<zeroOrMore>
<element name="reference">
<attribute name="id">
<data type="ID"/>
</attribute>
<text/>
</element>
</zeroOrMore>
<zeroOrMore>
<element name="data">
<attribute name="ref">
<data type="IDREF"/>
</attribute>
<text/>
</element>
</zeroOrMore>
</element>
</start>
</grammar>
''')
schema = etree.RelaxNG(schema)
self.assertFalse(schema.validate(tree_invalid))
errors = schema.error_log
self.assertTrue(errors)
self.assertTrue([log for log in errors if "IDREF" in log.message])
self.assertTrue([log for log in errors if "myref" in log.message])
def test_relaxng_invalid_schema(self):
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b" />
</zeroOrMore>
</element>
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema2(self):
schema = self.parse('''\
<grammar xmlns="http://relaxng.org/ns/structure/1.0" />
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema3(self):
schema = self.parse('''\
<grammar xmlns="http://relaxng.org/ns/structure/1.0">
<define name="test">
<element name="test"/>
</define>
</grammar>
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_invalid_schema4(self):
# segfault
schema = self.parse('''\
<element name="a" xmlns="mynamespace" />
''')
self.assertRaises(etree.RelaxNGParseError,
etree.RelaxNG, schema)
def test_relaxng_include(self):
# this will only work if we access the file through path or
# file object..
f = open(fileInTestDir('test1.rng'), 'rb')
try:
schema = etree.RelaxNG(file=f)
finally:
f.close()
def test_relaxng_shortcut(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<zeroOrMore>
<element name="b">
<text />
</element>
</zeroOrMore>
</element>
''')
self.assertTrue(tree_valid.relaxng(schema))
self.assertFalse(tree_invalid.relaxng(schema))
def test_multiple_elementrees(self):
tree = self.parse('<a><b>B</b><c>C</c></a>')
schema = etree.RelaxNG( self.parse('''\
<element name="a" xmlns="http://relaxng.org/ns/structure/1.0">
<element name="b">
<text />
</element>
<element name="c">
<text />
</element>
</element>
''') )
self.assertTrue(schema.validate(tree))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertTrue(schema.validate(tree)) # repeat valid
self.assertFalse(schema.error_log.filter_from_errors()) # repeat valid
schema = etree.RelaxNG( self.parse('''\
<element name="b" xmlns="http://relaxng.org/ns/structure/1.0">
<text />
</element>
''') )
c_tree = etree.ElementTree(tree.getroot()[1])
self.assertEqual(self._rootstring(c_tree), _bytes('<c>C</c>'))
self.assertFalse(schema.validate(c_tree))
self.assertTrue(schema.error_log.filter_from_errors())
b_tree = etree.ElementTree(tree.getroot()[0])
self.assertEqual(self._rootstring(b_tree), _bytes('<b>B</b>'))
self.assertTrue(schema.validate(b_tree))
self.assertFalse(schema.error_log.filter_from_errors())
class RelaxNGCompactTestCase(HelperTestCase):
pytestmark = skipif('rnc2rng is None')
def test_relaxng_compact(self):
tree_valid = self.parse('<a><b>B</b><c>C</c></a>')
tree_invalid = self.parse('<a><b></b></a>')
schema = etree.RelaxNG(file=fileInTestDir('test.rnc'))
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
def test_relaxng_compact_file_obj(self):
with open(fileInTestDir('test.rnc'), 'r') as f:
schema = etree.RelaxNG(file=f)
tree_valid = self.parse('<a><b>B</b><c>C</c></a>')
tree_invalid = self.parse('<a><b></b></a>')
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
def test_relaxng_compact_str(self):
tree_valid = self.parse('<a><b>B</b></a>')
tree_invalid = self.parse('<a><b>X</b></a>')
rnc_str = 'element a { element b { "B" } }'
schema = etree.RelaxNG.from_rnc_string(rnc_str)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ETreeRelaxNGTestCase)])
suite.addTests(
[make_doctest('../../../doc/validation.txt')])
if rnc2rng is not None:
suite.addTests([unittest.makeSuite(RelaxNGCompactTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
from difflib import SequenceMatcher
from kombu import version_info_t
def escape_regex(p, white=''):
# type: (str, str) -> str
"""Escape string for use within a regular expression."""
# what's up with re.escape? that code must be neglected or something
return ''.join(c if c.isalnum() or c in white
else ('\\000' if c == '\000' else '\\' + c)
for c in p)
def fmatch_iter(needle, haystack, min_ratio=0.6):
# type: (str, Sequence[str], float) -> Iterator[Tuple[float, str]]
"""Fuzzy match: iteratively.
Yields:
Tuple: of ratio and key.
"""
for key in haystack:
ratio = SequenceMatcher(None, needle, key).ratio()
if ratio >= min_ratio:
yield ratio, key
def fmatch_best(needle, haystack, min_ratio=0.6):
# type: (str, Sequence[str], float) -> str
"""Fuzzy match - Find best match (scalar)."""
try:
return sorted(
fmatch_iter(needle, haystack, min_ratio), reverse=True,
)[0][1]
except IndexError:
pass
def version_string_as_tuple(s):
# type: (str) -> version_info_t
"""Convert version string to version info tuple."""
v = _unpack_version(*s.split('.'))
# X.Y.3a1 -> (X, Y, 3, 'a1')
if isinstance(v.micro, str):
v = version_info_t(v.major, v.minor, *_splitmicro(*v[2:]))
# X.Y.3a1-40 -> (X, Y, 3, 'a1', '40')
if not v.serial and v.releaselevel and '-' in v.releaselevel:
v = version_info_t(*list(v[0:3]) + v.releaselevel.split('-'))
return v
def _unpack_version(major, minor=0, micro=0, releaselevel='', serial=''):
# type: (int, int, int, str, str) -> version_info_t
return version_info_t(int(major), int(minor), micro, releaselevel, serial)
def _splitmicro(micro, releaselevel='', serial=''):
# type: (int, str, str) -> Tuple[int, str, str]
for index, char in enumerate(micro):
if not char.isdigit():
break
else:
return int(micro or 0), releaselevel, serial
return int(micro[:index]), micro[index:], serial
|
import os
from nikola import __version__
from nikola.plugin_categories import Command
from nikola.utils import get_logger, req_missing, Commands
LOGGER = get_logger('console')
class CommandConsole(Command):
"""Start debugging console."""
name = "console"
shells = ['ipython', 'bpython', 'plain']
doc_purpose = "start an interactive Python console with access to your site"
doc_description = """\
The site engine is accessible as `site` and `nikola_site`, the config file as `conf`, and commands are available as `commands`.
If there is no console to use specified (as -b, -i, -p) it tries IPython, then falls back to bpython, and finally falls back to the plain Python console."""
header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site, nikola_site = site engine, commands = nikola commands)"
cmd_options = [
{
'name': 'bpython',
'short': 'b',
'long': 'bpython',
'type': bool,
'default': False,
'help': 'Use bpython',
},
{
'name': 'ipython',
'short': 'i',
'long': 'plain',
'type': bool,
'default': False,
'help': 'Use IPython',
},
{
'name': 'plain',
'short': 'p',
'long': 'plain',
'type': bool,
'default': False,
'help': 'Use the plain Python interpreter',
},
{
'name': 'command',
'short': 'c',
'long': 'command',
'type': str,
'default': None,
'help': 'Run a single command',
},
{
'name': 'script',
'short': 's',
'long': 'script',
'type': str,
'default': None,
'help': 'Execute a python script in the console context',
},
]
def ipython(self, willful=True):
"""Run an IPython shell."""
try:
import IPython
except ImportError:
if willful:
req_missing(['IPython'], 'use the IPython console')
raise # That’s how _execute knows whether to try something else.
else:
site = self.context['site'] # NOQA
nikola_site = self.context['nikola_site'] # NOQA
conf = self.context['conf'] # NOQA
commands = self.context['commands'] # NOQA
IPython.embed(header=self.header.format('IPython'))
def bpython(self, willful=True):
"""Run a bpython shell."""
try:
import bpython
except ImportError:
if willful:
req_missing(['bpython'], 'use the bpython console')
raise # That’s how _execute knows whether to try something else.
else:
bpython.embed(banner=self.header.format('bpython'), locals_=self.context)
def plain(self, willful=True):
"""Run a plain Python shell."""
import code
try:
import readline
except ImportError:
pass
else:
import rlcompleter
readline.set_completer(rlcompleter.Completer(self.context).complete)
readline.parse_and_bind("tab:complete")
pythonrc = os.environ.get("PYTHONSTARTUP")
if pythonrc and os.path.isfile(pythonrc):
try:
execfile(pythonrc) # NOQA
except NameError:
pass
code.interact(local=self.context, banner=self.header.format('Python'))
def _execute(self, options, args):
"""Start the console."""
self.site.scan_posts()
# Create nice object with all commands:
self.site.commands = Commands(self.site.doit, self.config, self._doitargs)
self.context = {
'conf': self.site.config,
'site': self.site,
'nikola_site': self.site,
'commands': self.site.commands,
}
if options['command']:
exec(options['command'], None, self.context)
elif options['script']:
with open(options['script']) as inf:
code = compile(inf.read(), options['script'], 'exec')
exec(code, None, self.context)
elif options['bpython']:
self.bpython(True)
elif options['ipython']:
self.ipython(True)
elif options['plain']:
self.plain(True)
else:
for shell in self.shells:
try:
return getattr(self, shell)(False)
except ImportError:
pass
raise ImportError
|
from django.test import TestCase
from zinnia import flags
from zinnia.flags import get_user_flagger
from zinnia.tests.utils import skip_if_custom_user
@skip_if_custom_user
class FlagsTestCase(TestCase):
"""Test cases for zinnia.flags"""
def setUp(self):
self.clear_user_flagger_cache()
def clear_user_flagger_cache(self):
get_user_flagger.cache_clear()
def test_get_user_flagger_cache(self):
get_user_flagger()
with self.assertNumQueries(0):
get_user_flagger()
def test_get_user_flagger_does_not_exist(self):
original_user_id = flags.COMMENT_FLAG_USER_ID
flags.COMMENT_FLAG_USER_ID = 4242
flagger = get_user_flagger()
self.assertEqual(flagger.username, 'Zinnia-Flagger')
flags.COMMENT_FLAG_USER_ID = original_user_id
def test_get_user_flagged_does_not_exist_twice_issue_245(self):
original_user_id = flags.COMMENT_FLAG_USER_ID
flags.COMMENT_FLAG_USER_ID = None
flagger = get_user_flagger()
self.assertEqual(flagger.username, 'Zinnia-Flagger')
self.clear_user_flagger_cache()
flagger = get_user_flagger()
self.assertEqual(flagger.username, 'Zinnia-Flagger')
flags.COMMENT_FLAG_USER_ID = original_user_id
|
import os.path as op
import numpy as np
from numpy.testing import assert_allclose
import pytest
from mne.datasets import testing
from mne import find_events, Epochs, pick_types
from mne.io import read_raw_fif
from mne.io.constants import FIFF
from mne.utils import run_tests_if_main
from mne.label import read_label
from mne.minimum_norm import (read_inverse_operator, apply_inverse_epochs,
prepare_inverse_operator, INVERSE_METHODS)
from mne.minimum_norm.time_frequency import (source_band_induced_power,
source_induced_power,
compute_source_psd,
compute_source_psd_epochs)
from mne.time_frequency.multitaper import psd_array_multitaper
data_path = testing.data_path(download=False)
fname_inv = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-meg-inv.fif')
fname_data = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw.fif')
fname_label = op.join(data_path, 'MEG', 'sample', 'labels', 'Aud-lh.label')
@testing.requires_testing_data
@pytest.mark.parametrize('method', INVERSE_METHODS)
def test_tfr_with_inverse_operator(method):
"""Test time freq with MNE inverse computation."""
tmin, tmax, event_id = -0.2, 0.5, 1
# Setup for reading the raw data
raw = read_raw_fif(fname_data)
events = find_events(raw, stim_channel='STI 014')
inv = read_inverse_operator(fname_inv)
inv = prepare_inverse_operator(inv, nave=1, lambda2=1. / 9., method=method)
raw.info['bads'] += ['MEG 2443', 'EEG 053'] # bads + 2 more
# picks MEG gradiometers
picks = pick_types(raw.info, meg=True, eeg=False, eog=True,
stim=False, exclude='bads')
# Load condition 1
event_id = 1
events3 = events[:3] # take 3 events to keep the computation time low
epochs = Epochs(raw, events3, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(grad=4000e-13, eog=150e-6),
preload=True)
# Compute a source estimate per frequency band
bands = dict(alpha=[10, 10])
label = read_label(fname_label)
# XXX someday we should refactor this so that you don't have to pass
# method -- maybe `prepare_inverse_operator` should add a `method`
# to it and when `prepared=True` the value passed in can be ignored
# (or better, default method=None means "dSPM if unprepared" and if they
# actually pass a value, we check against `inv['method']`)
stcs = source_band_induced_power(epochs, inv, bands, method=method,
n_cycles=2, use_fft=False, pca=True,
label=label, prepared=True)
stc = stcs['alpha']
assert len(stcs) == len(list(bands.keys()))
assert np.all(stc.data > 0)
assert_allclose(stc.times, epochs.times, atol=1e-6)
stcs_no_pca = source_band_induced_power(epochs, inv, bands, method=method,
n_cycles=2, use_fft=False,
pca=False, label=label,
prepared=True)
assert_allclose(stcs['alpha'].data, stcs_no_pca['alpha'].data)
# Compute a source estimate per frequency band
epochs = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(grad=4000e-13, eog=150e-6),
preload=True)
freqs = np.arange(7, 30, 2) # define frequencies of interest
power, phase_lock = source_induced_power(
epochs, inv, freqs, label, baseline=(-0.1, 0), baseline_mode='percent',
n_cycles=2, n_jobs=1, method=method, prepared=True)
assert np.all(phase_lock > 0)
assert np.all(phase_lock <= 1)
assert 5 < np.max(power) < 7
@testing.requires_testing_data
@pytest.mark.parametrize('method', INVERSE_METHODS)
@pytest.mark.parametrize('pick_ori', (None, 'normal')) # XXX vector someday?
def test_source_psd(method, pick_ori):
"""Test source PSD computation from raw."""
raw = read_raw_fif(fname_data)
raw.crop(0, 5).load_data()
inverse_operator = read_inverse_operator(fname_inv)
fmin, fmax = 40, 65 # Hz
n_fft = 512
assert inverse_operator['source_ori'] == FIFF.FIFFV_MNE_FREE_ORI
stc, ev = compute_source_psd(
raw, inverse_operator, lambda2=1. / 9., method=method,
fmin=fmin, fmax=fmax, pick_ori=pick_ori, n_fft=n_fft,
overlap=0., return_sensor=True, dB=True)
assert ev.data.shape == (len(ev.info['ch_names']), len(stc.times))
assert ev.times[0] >= fmin
assert ev.times[-1] <= fmax
# Time max at line frequency (60 Hz in US)
assert 58 <= ev.times[np.argmax(np.sum(ev.data, axis=0))] <= 61
assert ev.nave == 2
assert stc.shape[0] == inverse_operator['nsource']
assert stc.times[0] >= fmin
assert stc.times[-1] <= fmax
assert 58 <= stc.times[np.argmax(np.sum(stc.data, axis=0))] <= 61
if method in ('sLORETA', 'dSPM'):
stc_dspm = stc
stc_mne, _ = compute_source_psd(
raw, inverse_operator, lambda2=1. / 9., method='MNE',
fmin=fmin, fmax=fmax, pick_ori=pick_ori, n_fft=n_fft,
overlap=0., return_sensor=True, dB=True)
# normalize each source point by its power after undoing the dB
stc_dspm.data = 10 ** (stc_dspm.data / 10.)
stc_dspm /= stc_dspm.mean()
stc_mne.data = 10 ** (stc_mne.data / 10.)
stc_mne /= stc_mne.mean()
assert_allclose(stc_dspm.data, stc_mne.data, atol=1e-4)
@testing.requires_testing_data
@pytest.mark.parametrize('method', INVERSE_METHODS)
def test_source_psd_epochs(method):
"""Test multi-taper source PSD computation in label from epochs."""
raw = read_raw_fif(fname_data)
inverse_operator = read_inverse_operator(fname_inv)
label = read_label(fname_label)
event_id, tmin, tmax = 1, -0.2, 0.5
lambda2 = 1. / 9.
bandwidth = 8.
fmin, fmax = 0, 100
picks = pick_types(raw.info, meg=True, eeg=False, stim=True,
ecg=True, eog=True, include=['STI 014'],
exclude='bads')
reject = dict(grad=4000e-13, mag=4e-12, eog=150e-6)
events = find_events(raw, stim_channel='STI 014')
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=reject)
# only look at one epoch
epochs.drop_bad()
one_epochs = epochs[:1]
inv = prepare_inverse_operator(inverse_operator, nave=1,
lambda2=1. / 9., method="dSPM")
# return list
stc_psd = compute_source_psd_epochs(one_epochs, inv,
lambda2=lambda2, method=method,
pick_ori="normal", label=label,
bandwidth=bandwidth,
fmin=fmin, fmax=fmax,
prepared=True)[0]
# return generator
stcs = compute_source_psd_epochs(one_epochs, inv,
lambda2=lambda2, method=method,
pick_ori="normal", label=label,
bandwidth=bandwidth,
fmin=fmin, fmax=fmax,
return_generator=True,
prepared=True)
for stc in stcs:
stc_psd_gen = stc
assert_allclose(stc_psd.data, stc_psd_gen.data, atol=1e-7)
# compare with direct computation
stc = apply_inverse_epochs(one_epochs, inv,
lambda2=lambda2, method=method,
pick_ori="normal", label=label,
prepared=True)[0]
sfreq = epochs.info['sfreq']
psd, freqs = psd_array_multitaper(stc.data, sfreq=sfreq,
bandwidth=bandwidth, fmin=fmin,
fmax=fmax)
assert_allclose(psd, stc_psd.data, atol=1e-7)
assert_allclose(freqs, stc_psd.times)
# Check corner cases caused by tiny bandwidth
with pytest.raises(ValueError, match='use a value of at least'):
compute_source_psd_epochs(
one_epochs, inv, lambda2=lambda2, method=method,
pick_ori="normal", label=label, bandwidth=0.01, low_bias=True,
fmin=fmin, fmax=fmax, return_generator=False, prepared=True)
run_tests_if_main()
|
from flask import Flask, current_app, jsonify
from sqlalchemy.sql import sqltypes
# Application imports
from sandman2.exception import (
BadRequestException,
ForbiddenException,
NotFoundException,
NotAcceptableException,
NotImplementedException,
ConflictException,
ServerErrorException,
ServiceUnavailableException,
)
from sandman2.service import Service
from sandman2.model import db, Model, AutomapModel
from sandman2.admin import CustomAdminView
from flask_admin import Admin
from flask_httpauth import HTTPBasicAuth
# Augment sandman2's Model class with the Automap and Flask-SQLAlchemy model
# classes
auth = HTTPBasicAuth()
def get_app(
database_uri,
exclude_tables=None,
user_models=None,
reflect_all=True,
read_only=False,
schema=None):
"""Return an application instance connected to the database described in
*database_uri*.
:param str database_uri: The URI connection string for the database
:param list exclude_tables: A list of tables to exclude from the API
service
:param list user_models: A list of user-defined models to include in the
API service
:param bool reflect_all: Include all database tables in the API service
:param bool read_only: Only allow HTTP GET commands for all endpoints
:param str schema: Use the specified named schema instead of the default
"""
app = Flask('sandman2')
app.config['SQLALCHEMY_DATABASE_URI'] = database_uri
app.config['SANDMAN2_READ_ONLY'] = read_only
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.classes = []
db.init_app(app)
admin = Admin(app, base_template='layout.html', template_mode='bootstrap3')
_register_error_handlers(app)
if user_models:
with app.app_context():
_register_user_models(user_models, admin, schema=schema)
elif reflect_all:
with app.app_context():
_reflect_all(exclude_tables, admin, read_only, schema=schema)
@app.route('/')
def index():
"""Return a list of routes to the registered classes."""
routes = {}
for cls in app.classes:
routes[cls.__model__.__name__] = '{}{{/{}}}'.format(
cls.__model__.__url__,
cls.__model__.primary_key())
return jsonify(routes)
return app
def _register_error_handlers(app):
"""Register error-handlers for the application.
:param app: The application instance
"""
@app.errorhandler(BadRequestException)
@app.errorhandler(ForbiddenException)
@app.errorhandler(NotAcceptableException)
@app.errorhandler(NotFoundException)
@app.errorhandler(ConflictException)
@app.errorhandler(ServerErrorException)
@app.errorhandler(NotImplementedException)
@app.errorhandler(ServiceUnavailableException)
def handle_application_error(error): # pylint:disable=unused-variable
"""Handler used to send JSON error messages rather than default HTML
ones."""
response = jsonify(error.to_dict())
response.status_code = error.code
return response
def register_service(cls, primary_key_type):
"""Register an API service endpoint.
:param cls: The class to register
:param str primary_key_type: The type (as a string) of the primary_key
field
"""
view_func = cls.as_view(cls.__name__.lower()) # pylint: disable=no-member
methods = set(cls.__model__.__methods__) # pylint: disable=no-member
if 'GET' in methods: # pylint: disable=no-member
current_app.add_url_rule(
cls.__model__.__url__ + '/', defaults={'resource_id': None},
view_func=view_func,
methods=['GET'])
current_app.add_url_rule(
'{resource}/meta'.format(resource=cls.__model__.__url__),
view_func=view_func,
methods=['GET'])
if 'POST' in methods: # pylint: disable=no-member
current_app.add_url_rule(
cls.__model__.__url__ + '/', view_func=view_func, methods=['POST', ])
current_app.add_url_rule(
'{resource}/<{pk_type}:{pk}>'.format(
resource=cls.__model__.__url__,
pk='resource_id', pk_type=primary_key_type),
view_func=view_func,
methods=methods - {'POST'})
current_app.classes.append(cls)
def _reflect_all(exclude_tables=None, admin=None, read_only=False, schema=None):
"""Register all tables in the given database as services.
:param list exclude_tables: A list of tables to exclude from the API
service
"""
AutomapModel.prepare( # pylint:disable=maybe-no-member
db.engine, reflect=True, schema=schema)
for cls in AutomapModel.classes:
if exclude_tables and cls.__table__.name in exclude_tables:
continue
if read_only:
cls.__methods__ = {'GET'}
register_model(cls, admin)
def register_model(cls, admin=None):
"""Register *cls* to be included in the API service
:param cls: Class deriving from :class:`sandman2.models.Model`
"""
cls.__url__ = '/{}'.format(cls.__name__.lower())
service_class = type(
cls.__name__ + 'Service',
(Service,),
{
'__model__': cls,
})
# inspect primary key
cols = list(cls().__table__.primary_key.columns)
# composite keys not supported (yet)
primary_key_type = 'string'
if len(cols) == 1:
col_type = cols[0].type
# types defined at http://flask.pocoo.org/docs/0.10/api/#url-route-registrations
if isinstance(col_type, sqltypes.String):
primary_key_type = 'string'
elif isinstance(col_type, sqltypes.Integer):
primary_key_type = 'int'
elif isinstance(col_type, sqltypes.Numeric):
primary_key_type = 'float'
# registration
register_service(service_class, primary_key_type)
if admin is not None:
admin.add_view(CustomAdminView(cls, db.session))
def _register_user_models(user_models, admin=None, schema=None):
"""Register any user-defined models with the API Service.
:param list user_models: A list of user-defined models to include in the
API service
"""
if any([issubclass(cls, AutomapModel) for cls in user_models]):
AutomapModel.prepare( # pylint:disable=maybe-no-member
db.engine, reflect=True, schema=schema)
for user_model in user_models:
register_model(user_model, admin)
|
from functools import partial
import re
from Plugwise_Smile.Smile import Smile
import jsonpickle
import pytest
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
def _read_json(environment, call):
"""Undecode the json data."""
fixture = load_fixture(f"plugwise/{environment}/{call}.json")
return jsonpickle.decode(fixture)
@pytest.fixture(name="mock_smile")
def mock_smile():
"""Create a Mock Smile for testing exceptions."""
with patch(
"homeassistant.components.plugwise.config_flow.Smile",
) as smile_mock:
smile_mock.InvalidAuthentication = Smile.InvalidAuthentication
smile_mock.ConnectionFailedError = Smile.ConnectionFailedError
smile_mock.return_value.connect.return_value = True
yield smile_mock.return_value
@pytest.fixture(name="mock_smile_unauth")
def mock_smile_unauth(aioclient_mock: AiohttpClientMocker) -> None:
"""Mock the Plugwise Smile unauthorized for Home Assistant."""
aioclient_mock.get(re.compile(".*"), status=401)
aioclient_mock.put(re.compile(".*"), status=401)
@pytest.fixture(name="mock_smile_error")
def mock_smile_error(aioclient_mock: AiohttpClientMocker) -> None:
"""Mock the Plugwise Smile server failure for Home Assistant."""
aioclient_mock.get(re.compile(".*"), status=500)
aioclient_mock.put(re.compile(".*"), status=500)
@pytest.fixture(name="mock_smile_notconnect")
def mock_smile_notconnect():
"""Mock the Plugwise Smile general connection failure for Home Assistant."""
with patch("homeassistant.components.plugwise.gateway.Smile") as smile_mock:
smile_mock.InvalidAuthentication = Smile.InvalidAuthentication
smile_mock.ConnectionFailedError = Smile.ConnectionFailedError
smile_mock.PlugwiseError = Smile.PlugwiseError
smile_mock.return_value.connect.side_effect = AsyncMock(return_value=False)
yield smile_mock.return_value
def _get_device_data(chosen_env, device_id):
"""Mock return data for specific devices."""
return _read_json(chosen_env, "get_device_data/" + device_id)
@pytest.fixture(name="mock_smile_adam")
def mock_smile_adam():
"""Create a Mock Adam environment for testing exceptions."""
chosen_env = "adam_multiple_devices_per_zone"
with patch("homeassistant.components.plugwise.gateway.Smile") as smile_mock:
smile_mock.InvalidAuthentication = Smile.InvalidAuthentication
smile_mock.ConnectionFailedError = Smile.ConnectionFailedError
smile_mock.XMLDataMissingError = Smile.XMLDataMissingError
smile_mock.return_value.gateway_id = "fe799307f1624099878210aa0b9f1475"
smile_mock.return_value.heater_id = "90986d591dcd426cae3ec3e8111ff730"
smile_mock.return_value.smile_version = "3.0.15"
smile_mock.return_value.smile_type = "thermostat"
smile_mock.return_value.smile_hostname = "smile98765"
smile_mock.return_value.connect.side_effect = AsyncMock(return_value=True)
smile_mock.return_value.full_update_device.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.single_master_thermostat.side_effect = Mock(
return_value=False
)
smile_mock.return_value.set_schedule_state.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.set_preset.side_effect = AsyncMock(return_value=True)
smile_mock.return_value.set_temperature.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.set_relay_state.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.get_all_devices.return_value = _read_json(
chosen_env, "get_all_devices"
)
smile_mock.return_value.get_device_data.side_effect = partial(
_get_device_data, chosen_env
)
yield smile_mock.return_value
@pytest.fixture(name="mock_smile_anna")
def mock_smile_anna():
"""Create a Mock Anna environment for testing exceptions."""
chosen_env = "anna_heatpump"
with patch("homeassistant.components.plugwise.gateway.Smile") as smile_mock:
smile_mock.InvalidAuthentication = Smile.InvalidAuthentication
smile_mock.ConnectionFailedError = Smile.ConnectionFailedError
smile_mock.XMLDataMissingError = Smile.XMLDataMissingError
smile_mock.return_value.gateway_id = "015ae9ea3f964e668e490fa39da3870b"
smile_mock.return_value.heater_id = "1cbf783bb11e4a7c8a6843dee3a86927"
smile_mock.return_value.smile_version = "4.0.15"
smile_mock.return_value.smile_type = "thermostat"
smile_mock.return_value.smile_hostname = "smile98765"
smile_mock.return_value.connect.side_effect = AsyncMock(return_value=True)
smile_mock.return_value.full_update_device.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.single_master_thermostat.side_effect = Mock(
return_value=True
)
smile_mock.return_value.set_schedule_state.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.set_preset.side_effect = AsyncMock(return_value=True)
smile_mock.return_value.set_temperature.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.set_relay_state.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.get_all_devices.return_value = _read_json(
chosen_env, "get_all_devices"
)
smile_mock.return_value.get_device_data.side_effect = partial(
_get_device_data, chosen_env
)
yield smile_mock.return_value
@pytest.fixture(name="mock_smile_p1")
def mock_smile_p1():
"""Create a Mock P1 DSMR environment for testing exceptions."""
chosen_env = "p1v3_full_option"
with patch("homeassistant.components.plugwise.gateway.Smile") as smile_mock:
smile_mock.InvalidAuthentication = Smile.InvalidAuthentication
smile_mock.ConnectionFailedError = Smile.ConnectionFailedError
smile_mock.XMLDataMissingError = Smile.XMLDataMissingError
smile_mock.return_value.gateway_id = "e950c7d5e1ee407a858e2a8b5016c8b3"
smile_mock.return_value.heater_id = None
smile_mock.return_value.smile_version = "3.3.9"
smile_mock.return_value.smile_type = "power"
smile_mock.return_value.smile_hostname = "smile98765"
smile_mock.return_value.connect.side_effect = AsyncMock(return_value=True)
smile_mock.return_value.full_update_device.side_effect = AsyncMock(
return_value=True
)
smile_mock.return_value.single_master_thermostat.side_effect = Mock(
return_value=None
)
smile_mock.return_value.get_all_devices.return_value = _read_json(
chosen_env, "get_all_devices"
)
smile_mock.return_value.get_device_data.side_effect = partial(
_get_device_data, chosen_env
)
yield smile_mock.return_value
|
import logging
from threading import Thread
from time import sleep
from kalliope.core.SignalModule import SignalModule
from kalliope.core.Cortex import Cortex
from kalliope.core.SynapseLauncher import SynapseLauncher
from kalliope.core.OrderListener import OrderListener
from kalliope import Utils, BrainLoader
from kalliope.core.TriggerLauncher import TriggerLauncher
from transitions import Machine
from kalliope.core.PlayerLauncher import PlayerLauncher
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.HookManager import HookManager
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Order(SignalModule, Thread):
states = ['init',
'starting_trigger',
'unpausing_trigger',
'waiting_for_trigger_callback',
'pausing_trigger',
'start_order_listener',
'waiting_for_order_listener_callback',
'analysing_order']
def __init__(self):
super(SignalModule, self).__init__()
Thread.__init__(self, name=Order)
Utils.print_info('Starting order signal')
# load settings and brain from singleton
sl = SettingLoader()
self.settings = sl.settings
self.brain = BrainLoader().brain
# keep in memory the order to process
self.order_to_process = None
# get the player instance
self.player_instance = PlayerLauncher.get_player(settings=self.settings)
# save an instance of the trigger
self.trigger_instance = None
self.trigger_callback_called = False
# variable from notifications
self.skip_trigger = False # keep the status of the trigger, if true we can skip it in the statue machine
self.counter_max_retry = 0 # 0 means disabled
# save the current order listener
self.order_listener = None
self.order_listener_callback_called = False
# Initialize the state machine
self.machine = Machine(model=self, states=Order.states, initial='init', queued=True)
# define transitions
self.machine.add_transition('start_trigger', 'init', 'starting_trigger')
self.machine.add_transition('unpause_trigger', 'analysing_order', 'unpausing_trigger')
self.machine.add_transition('wait_trigger_callback', 'unpausing_trigger', 'waiting_for_trigger_callback')
self.machine.add_transition('pause_trigger', 'waiting_for_trigger_callback', 'pausing_trigger')
self.machine.add_transition('wait_for_order', 'pausing_trigger', 'waiting_for_order_listener_callback')
self.machine.add_transition('analyse_order', 'waiting_for_order_listener_callback', 'analysing_order')
self.machine.add_transition('start_order_listener', 'analysing_order', 'start_order_listener')
self.machine.add_ordered_transitions()
# add method which are called when changing state
self.machine.on_enter_starting_trigger('start_trigger_process')
self.machine.on_enter_unpausing_trigger('unpausing_trigger_process')
self.machine.on_enter_waiting_for_trigger_callback('waiting_for_trigger_callback_thread')
self.machine.on_enter_pausing_trigger('pause_trigger_process')
self.machine.on_enter_start_order_listener('start_order_listener_thread')
self.machine.on_enter_waiting_for_order_listener_callback('waiting_for_order_listener_callback_thread')
self.machine.on_enter_analysing_order('analysing_order_thread')
def run(self):
# run hook on_start
self.start_trigger()
def start_trigger_process(self):
"""
This function will start the trigger thread that listen for the hotword
"""
logger.debug("[Order] Entering state: %s" % self.state)
self.trigger_instance = TriggerLauncher.get_trigger(settings=self.settings, callback=self.trigger_callback)
self.trigger_callback_called = False
self.trigger_instance.daemon = True
# Wait that the kalliope trigger is pronounced by the user
self.trigger_instance.start()
HookManager.on_start()
self.next_state()
def unpausing_trigger_process(self):
"""
If the trigger was in pause, this method will unpause it to listen again for the hotword
"""
logger.debug("Entering state: %s" % self.state)
HookManager.on_waiting_for_trigger()
self.trigger_instance.unpause()
self.trigger_callback_called = False
self.next_state()
def waiting_for_trigger_callback_thread(self):
"""
Method to print in debug that the main process is waiting for a trigger detection
"""
logger.debug("[Order] Entering state: %s" % self.state)
if self.settings.options.deaf: # the user asked to deaf inside the deaf neuron
Utils.print_info("Kalliope is deaf")
self.trigger_instance.pause()
else:
Utils.print_info("Waiting for trigger detection")
# this loop is used to keep the main thread alive
while not self.trigger_callback_called:
sleep(0.1)
self.next_state()
def waiting_for_order_listener_callback_thread(self):
"""
Method to print in debug that the main process is waiting for an order to analyse
"""
logger.debug("[Order] Entering state: %s" % self.state)
# this loop is used to keep the main thread alive
while not self.order_listener_callback_called:
sleep(0.1)
# TODO on end listening here
self.next_state()
def trigger_callback(self):
"""
we have detected the hotword, we can now pause the Trigger for a while
The user can speak out loud his order during this time.
"""
logger.debug("[Order] Trigger callback called, switching to the next state")
self.trigger_callback_called = True
def pause_trigger_process(self):
"""
The trigger has been awaken, we pause it
:return:
"""
logger.debug("[Order] Entering state: %s" % self.state)
self.trigger_instance.pause()
# if here, then the trigger has been called and paused
HookManager.on_triggered()
self.next_state()
def start_order_listener_thread(self):
"""
Start the STT engine thread
"""
logger.debug("[Order] Entering state: %s" % self.state)
# start listening for an order
HookManager.on_start_listening()
self.order_listener_callback_called = False
self.order_listener = OrderListener(callback=self.order_listener_callback)
self.order_listener.daemon = True
self.order_listener.start()
self.next_state()
def order_listener_callback(self, order):
"""
Receive an order, try to retrieve it in the brain.yml to launch to attached plugins
:param order: the sentence received
:type order: str
"""
logger.debug("[Order] Order listener callback called. Order to process: %s" % order)
HookManager.on_stop_listening()
self.order_to_process = order
self.order_listener_callback_called = True
# save in kalliope memory the last order
Cortex.save('kalliope_last_order', order)
def analysing_order_thread(self):
"""
Start the order analyser with the caught order to process
"""
if self.order_to_process is None or self.order_to_process == "":
logger.debug("[Order] No audio caught from analysing_order_thread")
HookManager.on_stt_error()
else:
logger.debug("[Order] order in analysing_order_thread %s" % self.order_to_process)
SynapseLauncher.run_matching_synapse_from_order(self.order_to_process,
self.brain,
self.settings,
is_api_call=False)
if self.skip_trigger:
self.start_order_listener()
else:
self.unpause_trigger()
def on_notification_received(self, notification=None, payload=None):
logger.debug("[Order] received notification, notification: %s, payload: %s" % (notification, payload))
# skip_trigger: change the trigger status
if notification == "skip_trigger":
self.switch_trigger(payload)
if notification == "skip_trigger_max_retry":
self.set_counter_max_retry(payload)
if notification == "skip_trigger_decrease_max_retry":
self.decrease_max_retry()
def set_counter_max_retry(self, payload):
"""
set a 'counter_max_retry' for max retry before switching automatically the skip_trigger flag to False
:param payload: payload that contains the max_retry counter value to set
"""
if "max_retry" in payload:
if payload["max_retry"] > 0:
self.counter_max_retry = payload["max_retry"]
logger.debug("[Order] max_retry set to %s" % self.counter_max_retry)
def decrease_max_retry(self):
"""
will decrease the current value of 'counter_max_retry' if the current is > 0
If the new value == 0, then the skip_trigger flag is automatically switched to False
"""
logger.debug("[Order] current max_retry: %s" % self.counter_max_retry)
if self.counter_max_retry > 0:
self.counter_max_retry = self.counter_max_retry - 1
logger.debug("[Order] new max_retry value after decrease: %s" % self.counter_max_retry)
if self.counter_max_retry == 0:
logger.debug("[Order] max_retry reached '0'. Set skip_trigger to False")
# the counter raised 0, we can stop to skip the trigger
self.skip_trigger = False
else:
logger.debug("[Order] cannot decrease max_retry because current value is <= 0")
def switch_trigger(self, payload):
"""
switch the skip_trigger flag
:param payload: payload dict that contains the new status of the skip_trigger flag
"""
if "status" in payload:
status = Utils.str_to_bool(payload["status"])
if status:
logger.debug("[Order] switch signals to True")
self.skip_trigger = True
else:
logger.debug("[Order] switch signals to False")
self.skip_trigger = False
|
from asyncio import run_coroutine_threadsafe
from aiohttp import ClientSession
import my_pypi_package
from homeassistant import config_entries, core
from homeassistant.helpers import config_entry_oauth2_flow
# TODO the following two API examples are based on our suggested best practices
# for libraries using OAuth2 with requests or aiohttp. Delete the one you won't use.
# For more info see the docs at <insert url>.
class ConfigEntryAuth(my_pypi_package.AbstractAuth):
"""Provide NEW_NAME authentication tied to an OAuth2 based config entry."""
def __init__(
self,
hass: core.HomeAssistant,
config_entry: config_entries.ConfigEntry,
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
):
"""Initialize NEW_NAME Auth."""
self.hass = hass
self.config_entry = config_entry
self.session = config_entry_oauth2_flow.OAuth2Session(
hass, config_entry, implementation
)
super().__init__(self.session.token)
def refresh_tokens(self) -> str:
"""Refresh and return new NEW_NAME tokens using Home Assistant OAuth2 session."""
run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self.hass.loop
).result()
return self.session.token["access_token"]
class AsyncConfigEntryAuth(my_pypi_package.AbstractAuth):
"""Provide NEW_NAME authentication tied to an OAuth2 based config entry."""
def __init__(
self,
websession: ClientSession,
oauth_session: config_entry_oauth2_flow.OAuth2Session,
):
"""Initialize NEW_NAME auth."""
super().__init__(websession)
self._oauth_session = oauth_session
async def async_get_access_token(self) -> str:
"""Return a valid access token."""
if not self._oauth_session.valid_token:
await self._oauth_session.async_ensure_token_valid()
return self._oauth_session.token["access_token"]
|
import copy
import xml.etree.ElementTree as ET
from collections import OrderedDict
from http import client
from urllib.parse import quote
from radicale import pathutils
MIMETYPES = {
"VADDRESSBOOK": "text/vcard",
"VCALENDAR": "text/calendar"}
OBJECT_MIMETYPES = {
"VCARD": "text/vcard",
"VLIST": "text/x-vlist",
"VCALENDAR": "text/calendar"}
NAMESPACES = {
"C": "urn:ietf:params:xml:ns:caldav",
"CR": "urn:ietf:params:xml:ns:carddav",
"D": "DAV:",
"CS": "http://calendarserver.org/ns/",
"ICAL": "http://apple.com/ns/ical/",
"ME": "http://me.com/_namespace/",
"RADICALE": "http://radicale.org/ns/"}
NAMESPACES_REV = {}
for short, url in NAMESPACES.items():
NAMESPACES_REV[url] = short
ET.register_namespace("" if short == "D" else short, url)
def pretty_xml(element):
"""Indent an ElementTree ``element`` and its children."""
def pretty_xml_recursive(element, level):
indent = "\n" + level * " "
if len(element) > 0:
if not (element.text or "").strip():
element.text = indent + " "
if not (element.tail or "").strip():
element.tail = indent
for sub_element in element:
pretty_xml_recursive(sub_element, level + 1)
if not (sub_element.tail or "").strip():
sub_element.tail = indent
elif level > 0 and not (element.tail or "").strip():
element.tail = indent
element = copy.deepcopy(element)
pretty_xml_recursive(element, 0)
return '<?xml version="1.0"?>\n%s' % ET.tostring(element, "unicode")
def make_clark(human_tag):
"""Get XML Clark notation from human tag ``human_tag``.
If ``human_tag`` is already in XML Clark notation it is returned as-is.
"""
if human_tag.startswith("{"):
ns, tag = human_tag[len("{"):].split("}", maxsplit=1)
if not ns or not tag:
raise ValueError("Invalid XML tag: %r" % human_tag)
return human_tag
ns_prefix, tag = human_tag.split(":", maxsplit=1)
if not ns_prefix or not tag:
raise ValueError("Invalid XML tag: %r" % human_tag)
ns = NAMESPACES.get(ns_prefix)
if not ns:
raise ValueError("Unknown XML namespace prefix: %r" % human_tag)
return "{%s}%s" % (ns, tag)
def make_human_tag(clark_tag):
"""Replace known namespaces in XML Clark notation ``clark_tag`` with
prefix.
If the namespace is not in ``NAMESPACES`` the tag is returned as-is.
"""
if not clark_tag.startswith("{"):
ns_prefix, tag = clark_tag.split(":", maxsplit=1)
if not ns_prefix or not tag:
raise ValueError("Invalid XML tag: %r" % clark_tag)
if ns_prefix not in NAMESPACES:
raise ValueError("Unknown XML namespace prefix: %r" % clark_tag)
return clark_tag
ns, tag = clark_tag[len("{"):].split("}", maxsplit=1)
if not ns or not tag:
raise ValueError("Invalid XML tag: %r" % clark_tag)
ns_prefix = NAMESPACES_REV.get(ns)
if ns_prefix:
return "%s:%s" % (ns_prefix, tag)
return clark_tag
def make_response(code):
"""Return full W3C names from HTTP status codes."""
return "HTTP/1.1 %i %s" % (code, client.responses[code])
def make_href(base_prefix, href):
"""Return prefixed href."""
assert href == pathutils.sanitize_path(href)
return quote("%s%s" % (base_prefix, href))
def webdav_error(human_tag):
"""Generate XML error message."""
root = ET.Element(make_clark("D:error"))
root.append(ET.Element(make_clark(human_tag)))
return root
def get_content_type(item, encoding):
"""Get the content-type of an item with charset and component parameters.
"""
mimetype = OBJECT_MIMETYPES[item.name]
tag = item.component_name
content_type = "%s;charset=%s" % (mimetype, encoding)
if tag:
content_type += ";component=%s" % tag
return content_type
def props_from_request(xml_request):
"""Return a list of properties as a dictionary.
Properties that should be removed are set to `None`.
"""
result = OrderedDict()
if xml_request is None:
return result
# Requests can contain multipe <D:set> and <D:remove> elements.
# Each of these elements must contain exactly one <D:prop> element which
# can contain multpile properties.
# The order of the elements in the document must be respected.
props = []
for element in xml_request:
if element.tag in (make_clark("D:set"), make_clark("D:remove")):
for prop in element.findall("./%s/*" % make_clark("D:prop")):
props.append((element.tag == make_clark("D:set"), prop))
for is_set, prop in props:
key = make_human_tag(prop.tag)
value = None
if prop.tag == make_clark("D:resourcetype"):
key = "tag"
if is_set:
for resource_type in prop:
if resource_type.tag == make_clark("C:calendar"):
value = "VCALENDAR"
break
if resource_type.tag == make_clark("CR:addressbook"):
value = "VADDRESSBOOK"
break
elif prop.tag == make_clark("C:supported-calendar-component-set"):
if is_set:
value = ",".join(
supported_comp.attrib["name"] for supported_comp in prop
if supported_comp.tag == make_clark("C:comp"))
elif is_set:
value = prop.text or ""
result[key] = value
result.move_to_end(key)
return result
|
import pytest
import voluptuous as vol
from homeassistant.components.homekit.const import (
CONF_FEATURE,
CONF_FEATURE_LIST,
CONF_LINKED_BATTERY_SENSOR,
CONF_LOW_BATTERY_THRESHOLD,
DEFAULT_CONFIG_FLOW_PORT,
DOMAIN,
FEATURE_ON_OFF,
FEATURE_PLAY_PAUSE,
HOMEKIT_PAIRING_QR,
HOMEKIT_PAIRING_QR_SECRET,
TYPE_FAUCET,
TYPE_OUTLET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_SWITCH,
TYPE_VALVE,
)
from homeassistant.components.homekit.util import (
HomeKitSpeedMapping,
SpeedRange,
cleanup_name_for_homekit,
convert_to_float,
density_to_air_quality,
dismiss_setup_message,
find_next_available_port,
format_sw_version,
port_is_available,
show_setup_message,
temperature_to_homekit,
temperature_to_states,
validate_entity_config as vec,
validate_media_player_features,
)
from homeassistant.components.persistent_notification import (
ATTR_MESSAGE,
ATTR_NOTIFICATION_ID,
DOMAIN as PERSISTENT_NOTIFICATION_DOMAIN,
)
from homeassistant.const import (
ATTR_CODE,
ATTR_SUPPORTED_FEATURES,
CONF_NAME,
CONF_TYPE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import State
from .util import async_init_integration
from tests.common import async_mock_service
def test_validate_entity_config():
"""Test validate entities."""
configs = [
None,
[],
"string",
12345,
{"invalid_entity_id": {}},
{"demo.test": 1},
{"binary_sensor.demo": {CONF_LINKED_BATTERY_SENSOR: None}},
{"binary_sensor.demo": {CONF_LINKED_BATTERY_SENSOR: "switch.demo"}},
{"binary_sensor.demo": {CONF_LOW_BATTERY_THRESHOLD: "switch.demo"}},
{"binary_sensor.demo": {CONF_LOW_BATTERY_THRESHOLD: -10}},
{"demo.test": "test"},
{"demo.test": [1, 2]},
{"demo.test": None},
{"demo.test": {CONF_NAME: None}},
{"media_player.test": {CONF_FEATURE_LIST: [{CONF_FEATURE: "invalid_feature"}]}},
{
"media_player.test": {
CONF_FEATURE_LIST: [
{CONF_FEATURE: FEATURE_ON_OFF},
{CONF_FEATURE: FEATURE_ON_OFF},
]
}
},
{"switch.test": {CONF_TYPE: "invalid_type"}},
]
for conf in configs:
with pytest.raises(vol.Invalid):
vec(conf)
assert vec({}) == {}
assert vec({"demo.test": {CONF_NAME: "Name"}}) == {
"demo.test": {CONF_NAME: "Name", CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec(
{"binary_sensor.demo": {CONF_LINKED_BATTERY_SENSOR: "sensor.demo_battery"}}
) == {
"binary_sensor.demo": {
CONF_LINKED_BATTERY_SENSOR: "sensor.demo_battery",
CONF_LOW_BATTERY_THRESHOLD: 20,
}
}
assert vec({"binary_sensor.demo": {CONF_LOW_BATTERY_THRESHOLD: 50}}) == {
"binary_sensor.demo": {CONF_LOW_BATTERY_THRESHOLD: 50}
}
assert vec({"alarm_control_panel.demo": {}}) == {
"alarm_control_panel.demo": {ATTR_CODE: None, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"alarm_control_panel.demo": {ATTR_CODE: "1234"}}) == {
"alarm_control_panel.demo": {ATTR_CODE: "1234", CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"lock.demo": {}}) == {
"lock.demo": {ATTR_CODE: None, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"lock.demo": {ATTR_CODE: "1234"}}) == {
"lock.demo": {ATTR_CODE: "1234", CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"media_player.demo": {}}) == {
"media_player.demo": {CONF_FEATURE_LIST: {}, CONF_LOW_BATTERY_THRESHOLD: 20}
}
config = {
CONF_FEATURE_LIST: [
{CONF_FEATURE: FEATURE_ON_OFF},
{CONF_FEATURE: FEATURE_PLAY_PAUSE},
]
}
assert vec({"media_player.demo": config}) == {
"media_player.demo": {
CONF_FEATURE_LIST: {FEATURE_ON_OFF: {}, FEATURE_PLAY_PAUSE: {}},
CONF_LOW_BATTERY_THRESHOLD: 20,
}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_FAUCET}}) == {
"switch.demo": {CONF_TYPE: TYPE_FAUCET, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_OUTLET}}) == {
"switch.demo": {CONF_TYPE: TYPE_OUTLET, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_SHOWER}}) == {
"switch.demo": {CONF_TYPE: TYPE_SHOWER, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_SPRINKLER}}) == {
"switch.demo": {CONF_TYPE: TYPE_SPRINKLER, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_SWITCH}}) == {
"switch.demo": {CONF_TYPE: TYPE_SWITCH, CONF_LOW_BATTERY_THRESHOLD: 20}
}
assert vec({"switch.demo": {CONF_TYPE: TYPE_VALVE}}) == {
"switch.demo": {CONF_TYPE: TYPE_VALVE, CONF_LOW_BATTERY_THRESHOLD: 20}
}
def test_validate_media_player_features():
"""Test validate modes for media players."""
config = {}
attrs = {ATTR_SUPPORTED_FEATURES: 20873}
entity_state = State("media_player.demo", "on", attrs)
assert validate_media_player_features(entity_state, config) is True
config = {FEATURE_ON_OFF: None}
assert validate_media_player_features(entity_state, config) is True
entity_state = State("media_player.demo", "on")
assert validate_media_player_features(entity_state, config) is False
def test_convert_to_float():
"""Test convert_to_float method."""
assert convert_to_float(12) == 12
assert convert_to_float(12.4) == 12.4
assert convert_to_float(STATE_UNKNOWN) is None
assert convert_to_float(None) is None
def test_cleanup_name_for_homekit():
"""Ensure name sanitize works as expected."""
assert cleanup_name_for_homekit("abc") == "abc"
assert cleanup_name_for_homekit("a b c") == "a b c"
assert cleanup_name_for_homekit("ab_c") == "ab c"
assert (
cleanup_name_for_homekit('ab!@#$%^&*()-=":.,><?//\\ frog')
== "ab--#---&----- -.,------ frog"
)
assert cleanup_name_for_homekit("の日本_語文字セット") == "の日本 語文字セット"
def test_temperature_to_homekit():
"""Test temperature conversion from HA to HomeKit."""
assert temperature_to_homekit(20.46, TEMP_CELSIUS) == 20.5
assert temperature_to_homekit(92.1, TEMP_FAHRENHEIT) == 33.4
def test_temperature_to_states():
"""Test temperature conversion from HomeKit to HA."""
assert temperature_to_states(20, TEMP_CELSIUS) == 20.0
assert temperature_to_states(20.2, TEMP_FAHRENHEIT) == 68.5
def test_density_to_air_quality():
"""Test map PM2.5 density to HomeKit AirQuality level."""
assert density_to_air_quality(0) == 1
assert density_to_air_quality(35) == 1
assert density_to_air_quality(35.1) == 2
assert density_to_air_quality(75) == 2
assert density_to_air_quality(115) == 3
assert density_to_air_quality(150) == 4
assert density_to_air_quality(300) == 5
async def test_show_setup_msg(hass, hk_driver):
"""Test show setup message as persistence notification."""
pincode = b"123-45-678"
entry = await async_init_integration(hass)
assert entry
call_create_notification = async_mock_service(
hass, PERSISTENT_NOTIFICATION_DOMAIN, "create"
)
await hass.async_add_executor_job(
show_setup_message, hass, entry.entry_id, "bridge_name", pincode, "X-HM://0"
)
await hass.async_block_till_done()
assert hass.data[DOMAIN][entry.entry_id][HOMEKIT_PAIRING_QR_SECRET]
assert hass.data[DOMAIN][entry.entry_id][HOMEKIT_PAIRING_QR]
assert call_create_notification
assert call_create_notification[0].data[ATTR_NOTIFICATION_ID] == entry.entry_id
assert pincode.decode() in call_create_notification[0].data[ATTR_MESSAGE]
async def test_dismiss_setup_msg(hass):
"""Test dismiss setup message."""
call_dismiss_notification = async_mock_service(
hass, PERSISTENT_NOTIFICATION_DOMAIN, "dismiss"
)
await hass.async_add_executor_job(dismiss_setup_message, hass, "entry_id")
await hass.async_block_till_done()
assert call_dismiss_notification
assert call_dismiss_notification[0].data[ATTR_NOTIFICATION_ID] == "entry_id"
def test_homekit_speed_mapping():
"""Test if the SpeedRanges from a speed_list are as expected."""
# A standard 2-speed fan
speed_mapping = HomeKitSpeedMapping(["off", "low", "high"])
assert speed_mapping.speed_ranges == {
"off": SpeedRange(0, 0),
"low": SpeedRange(100 / 3, 50),
"high": SpeedRange(200 / 3, 100),
}
# A standard 3-speed fan
speed_mapping = HomeKitSpeedMapping(["off", "low", "medium", "high"])
assert speed_mapping.speed_ranges == {
"off": SpeedRange(0, 0),
"low": SpeedRange(100 / 4, 100 / 3),
"medium": SpeedRange(200 / 4, 200 / 3),
"high": SpeedRange(300 / 4, 100),
}
# a Dyson-like fan with 10 speeds
speed_mapping = HomeKitSpeedMapping([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert speed_mapping.speed_ranges == {
0: SpeedRange(0, 0),
1: SpeedRange(10, 100 / 9),
2: SpeedRange(20, 200 / 9),
3: SpeedRange(30, 300 / 9),
4: SpeedRange(40, 400 / 9),
5: SpeedRange(50, 500 / 9),
6: SpeedRange(60, 600 / 9),
7: SpeedRange(70, 700 / 9),
8: SpeedRange(80, 800 / 9),
9: SpeedRange(90, 100),
}
def test_speed_to_homekit():
"""Test speed conversion from HA to Homekit."""
speed_mapping = HomeKitSpeedMapping(["off", "low", "high"])
assert speed_mapping.speed_to_homekit(None) is None
assert speed_mapping.speed_to_homekit("off") == 0
assert speed_mapping.speed_to_homekit("low") == 50
assert speed_mapping.speed_to_homekit("high") == 100
def test_speed_to_states():
"""Test speed conversion from Homekit to HA."""
speed_mapping = HomeKitSpeedMapping(["off", "low", "high"])
assert speed_mapping.speed_to_states(-1) == "off"
assert speed_mapping.speed_to_states(0) == "off"
assert speed_mapping.speed_to_states(33) == "off"
assert speed_mapping.speed_to_states(34) == "low"
assert speed_mapping.speed_to_states(50) == "low"
assert speed_mapping.speed_to_states(66) == "low"
assert speed_mapping.speed_to_states(67) == "high"
assert speed_mapping.speed_to_states(100) == "high"
async def test_port_is_available(hass):
"""Test we can get an available port and it is actually available."""
next_port = await hass.async_add_executor_job(
find_next_available_port, DEFAULT_CONFIG_FLOW_PORT
)
assert next_port
assert await hass.async_add_executor_job(port_is_available, next_port)
async def test_format_sw_version():
"""Test format_sw_version method."""
assert format_sw_version("soho+3.6.8+soho-release-rt120+10") == "3.6.8"
assert format_sw_version("undefined-undefined-1.6.8") == "1.6.8"
assert format_sw_version("56.0-76060") == "56.0.76060"
assert format_sw_version(3.6) == "3.6"
assert format_sw_version("unknown") is None
|
import os
import sys
from datetime import datetime
from coverage import env
# Pythons 2 and 3 differ on where to get StringIO.
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
# In py3, ConfigParser was renamed to the more-standard configparser.
# But there's a py3 backport that installs "configparser" in py2, and I don't
# want it because it has annoying deprecation warnings. So try the real py2
# import first.
try:
import ConfigParser as configparser
except ImportError:
import configparser
# What's a string called?
try:
string_class = basestring
except NameError:
string_class = str
# What's a Unicode string called?
try:
unicode_class = unicode
except NameError:
unicode_class = str
# range or xrange?
try:
range = xrange # pylint: disable=redefined-builtin
except NameError:
range = range
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
# Where do we get the thread id from?
try:
from thread import get_ident as get_thread_id
except ImportError:
from threading import get_ident as get_thread_id
try:
os.PathLike
except AttributeError:
# This is Python 2 and 3
path_types = (bytes, string_class, unicode_class)
else:
# 3.6+
path_types = (bytes, str, os.PathLike)
# shlex.quote is new, but there's an undocumented implementation in "pipes",
# who knew!?
try:
from shlex import quote as shlex_quote
except ImportError:
# Useful function, available under a different (undocumented) name
# in Python versions earlier than 3.3.
from pipes import quote as shlex_quote
try:
import reprlib
except ImportError:
import repr as reprlib
# A function to iterate listlessly over a dict's items, and one to get the
# items as a list.
try:
{}.iteritems
except AttributeError:
# Python 3
def iitems(d):
"""Produce the items from dict `d`."""
return d.items()
def litems(d):
"""Return a list of items from dict `d`."""
return list(d.items())
else:
# Python 2
def iitems(d):
"""Produce the items from dict `d`."""
return d.iteritems()
def litems(d):
"""Return a list of items from dict `d`."""
return d.items()
# Getting the `next` function from an iterator is different in 2 and 3.
try:
iter([]).next
except AttributeError:
def iternext(seq):
"""Get the `next` function for iterating over `seq`."""
return iter(seq).__next__
else:
def iternext(seq):
"""Get the `next` function for iterating over `seq`."""
return iter(seq).next
# Python 3.x is picky about bytes and strings, so provide methods to
# get them right, and make them no-ops in 2.x
if env.PY3:
def to_bytes(s):
"""Convert string `s` to bytes."""
return s.encode('utf8')
def to_string(b):
"""Convert bytes `b` to string."""
return b.decode('utf8')
def binary_bytes(byte_values):
"""Produce a byte string with the ints from `byte_values`."""
return bytes(byte_values)
def byte_to_int(byte):
"""Turn a byte indexed from a bytes object into an int."""
return byte
def bytes_to_ints(bytes_value):
"""Turn a bytes object into a sequence of ints."""
# In Python 3, iterating bytes gives ints.
return bytes_value
else:
def to_bytes(s):
"""Convert string `s` to bytes (no-op in 2.x)."""
return s
def to_string(b):
"""Convert bytes `b` to string."""
return b
def binary_bytes(byte_values):
"""Produce a byte string with the ints from `byte_values`."""
return "".join(chr(b) for b in byte_values)
def byte_to_int(byte):
"""Turn a byte indexed from a bytes object into an int."""
return ord(byte)
def bytes_to_ints(bytes_value):
"""Turn a bytes object into a sequence of ints."""
for byte in bytes_value:
yield ord(byte)
try:
# In Python 2.x, the builtins were in __builtin__
BUILTINS = sys.modules['__builtin__']
except KeyError:
# In Python 3.x, they're in builtins
BUILTINS = sys.modules['builtins']
# imp was deprecated in Python 3.3
try:
import importlib
import importlib.util
imp = None
except ImportError:
importlib = None
# We only want to use importlib if it has everything we need.
try:
importlib_util_find_spec = importlib.util.find_spec
except Exception:
import imp
importlib_util_find_spec = None
# What is the .pyc magic number for this version of Python?
try:
PYC_MAGIC_NUMBER = importlib.util.MAGIC_NUMBER
except AttributeError:
PYC_MAGIC_NUMBER = imp.get_magic()
def code_object(fn):
"""Get the code object from a function."""
try:
return fn.func_code
except AttributeError:
return fn.__code__
try:
from types import SimpleNamespace
except ImportError:
# The code from https://docs.python.org/3/library/types.html#types.SimpleNamespace
class SimpleNamespace:
"""Python implementation of SimpleNamespace, for Python 2."""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
def __eq__(self, other):
return self.__dict__ == other.__dict__
def format_local_datetime(dt):
"""Return a string with local timezone representing the date.
If python version is lower than 3.6, the time zone is not included.
"""
try:
return dt.astimezone().strftime('%Y-%m-%d %H:%M %z')
except (TypeError, ValueError):
# Datetime.astimezone in Python 3.5 can not handle naive datetime
return dt.strftime('%Y-%m-%d %H:%M')
def invalidate_import_caches():
"""Invalidate any import caches that may or may not exist."""
if importlib and hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
def import_local_file(modname, modfile=None):
"""Import a local file as a module.
Opens a file in the current directory named `modname`.py, imports it
as `modname`, and returns the module object. `modfile` is the file to
import if it isn't in the current directory.
"""
try:
from importlib.machinery import SourceFileLoader
except ImportError:
SourceFileLoader = None
if modfile is None:
modfile = modname + '.py'
if SourceFileLoader:
# pylint: disable=no-value-for-parameter, deprecated-method
mod = SourceFileLoader(modname, modfile).load_module()
else:
for suff in imp.get_suffixes(): # pragma: part covered
if suff[0] == '.py':
break
with open(modfile, 'r') as f:
# pylint: disable=undefined-loop-variable
mod = imp.load_module(modname, f, modfile, suff)
return mod
|
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE, TEMP_CELSIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import (
DATA,
DEFAULT_NAME,
DOMAIN,
SIGNAL_TADO_UPDATE_RECEIVED,
TADO_BRIDGE,
TYPE_AIR_CONDITIONING,
TYPE_HEATING,
TYPE_HOT_WATER,
)
from .entity import TadoZoneEntity
_LOGGER = logging.getLogger(__name__)
ZONE_SENSORS = {
TYPE_HEATING: [
"temperature",
"humidity",
"power",
"link",
"heating",
"tado mode",
"overlay",
"early start",
"open window",
],
TYPE_AIR_CONDITIONING: [
"temperature",
"humidity",
"power",
"link",
"ac",
"tado mode",
"overlay",
"open window",
],
TYPE_HOT_WATER: ["power", "link", "tado mode", "overlay"],
}
DEVICE_SENSORS = ["tado bridge status"]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up the Tado sensor platform."""
tado = hass.data[DOMAIN][entry.entry_id][DATA]
# Create zone sensors
zones = tado.zones
devices = tado.devices
entities = []
for zone in zones:
zone_type = zone["type"]
if zone_type not in ZONE_SENSORS:
_LOGGER.warning("Unknown zone type skipped: %s", zone_type)
continue
entities.extend(
[
TadoZoneSensor(
tado, zone["name"], zone["id"], variable, zone["devices"][0]
)
for variable in ZONE_SENSORS[zone_type]
]
)
# Create device sensors
for device in devices:
entities.extend(
[
TadoDeviceSensor(tado, device["name"], device["id"], variable, device)
for variable in DEVICE_SENSORS
]
)
if entities:
async_add_entities(entities, True)
class TadoZoneSensor(TadoZoneEntity, Entity):
"""Representation of a tado Sensor."""
def __init__(self, tado, zone_name, zone_id, zone_variable, device_info):
"""Initialize of the Tado Sensor."""
self._tado = tado
super().__init__(zone_name, device_info, tado.device_id, zone_id)
self.zone_id = zone_id
self.zone_variable = zone_variable
self._unique_id = f"{zone_variable} {zone_id} {tado.device_id}"
self._state = None
self._state_attributes = None
self._tado_zone_data = None
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "zone", self.zone_id
),
self._async_update_callback,
)
)
self._async_update_zone_data()
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.zone_name} {self.zone_variable}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._state_attributes
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
if self.zone_variable == "temperature":
return self.hass.config.units.temperature_unit
if self.zone_variable == "humidity":
return PERCENTAGE
if self.zone_variable == "heating":
return PERCENTAGE
if self.zone_variable == "ac":
return None
@property
def icon(self):
"""Icon for the sensor."""
if self.zone_variable == "temperature":
return "mdi:thermometer"
if self.zone_variable == "humidity":
return "mdi:water-percent"
@callback
def _async_update_callback(self):
"""Update and write state."""
self._async_update_zone_data()
self.async_write_ha_state()
@callback
def _async_update_zone_data(self):
"""Handle update callbacks."""
try:
self._tado_zone_data = self._tado.data["zone"][self.zone_id]
except KeyError:
return
if self.zone_variable == "temperature":
self._state = self.hass.config.units.temperature(
self._tado_zone_data.current_temp, TEMP_CELSIUS
)
self._state_attributes = {
"time": self._tado_zone_data.current_temp_timestamp,
"setting": 0, # setting is used in climate device
}
elif self.zone_variable == "humidity":
self._state = self._tado_zone_data.current_humidity
self._state_attributes = {
"time": self._tado_zone_data.current_humidity_timestamp
}
elif self.zone_variable == "power":
self._state = self._tado_zone_data.power
elif self.zone_variable == "link":
self._state = self._tado_zone_data.link
elif self.zone_variable == "heating":
self._state = self._tado_zone_data.heating_power_percentage
self._state_attributes = {
"time": self._tado_zone_data.heating_power_timestamp
}
elif self.zone_variable == "ac":
self._state = self._tado_zone_data.ac_power
self._state_attributes = {"time": self._tado_zone_data.ac_power_timestamp}
elif self.zone_variable == "tado bridge status":
self._state = self._tado_zone_data.connection
elif self.zone_variable == "tado mode":
self._state = self._tado_zone_data.tado_mode
elif self.zone_variable == "overlay":
self._state = self._tado_zone_data.overlay_active
self._state_attributes = (
{"termination": self._tado_zone_data.overlay_termination_type}
if self._tado_zone_data.overlay_active
else {}
)
elif self.zone_variable == "early start":
self._state = self._tado_zone_data.preparation
elif self.zone_variable == "open window":
self._state = bool(
self._tado_zone_data.open_window
or self._tado_zone_data.open_window_detected
)
self._state_attributes = self._tado_zone_data.open_window_attr
class TadoDeviceSensor(Entity):
"""Representation of a tado Sensor."""
def __init__(self, tado, device_name, device_id, device_variable, device_info):
"""Initialize of the Tado Sensor."""
self._tado = tado
self._device_info = device_info
self.device_name = device_name
self.device_id = device_id
self.device_variable = device_variable
self._unique_id = f"{device_variable} {device_id} {tado.device_id}"
self._state = None
self._state_attributes = None
self._tado_device_data = None
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "device", self.device_id
),
self._async_update_callback,
)
)
self._async_update_device_data()
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.device_name} {self.device_variable}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def should_poll(self):
"""Do not poll."""
return False
@callback
def _async_update_callback(self):
"""Update and write state."""
self._async_update_device_data()
self.async_write_ha_state()
@callback
def _async_update_device_data(self):
"""Handle update callbacks."""
try:
data = self._tado.data["device"][self.device_id]
except KeyError:
return
if self.device_variable == "tado bridge status":
self._state = data.get("connectionState", {}).get("value", False)
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.device_id)},
"name": self.device_name,
"manufacturer": DEFAULT_NAME,
"model": TADO_BRIDGE,
}
|
import asyncio
from datetime import timedelta
from solax import real_time_api
from solax.inverter import InverterError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT, TEMP_CELSIUS
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
DEFAULT_PORT = 80
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Platform setup."""
api = await real_time_api(config[CONF_IP_ADDRESS], config[CONF_PORT])
endpoint = RealTimeDataEndpoint(hass, api)
resp = await api.get_data()
serial = resp.serial_number
hass.async_add_job(endpoint.async_refresh)
async_track_time_interval(hass, endpoint.async_refresh, SCAN_INTERVAL)
devices = []
for sensor, (idx, unit) in api.inverter.sensor_map().items():
if unit == "C":
unit = TEMP_CELSIUS
uid = f"{serial}-{idx}"
devices.append(Inverter(uid, serial, sensor, unit))
endpoint.sensors = devices
async_add_entities(devices)
class RealTimeDataEndpoint:
"""Representation of a Sensor."""
def __init__(self, hass, api):
"""Initialize the sensor."""
self.hass = hass
self.api = api
self.ready = asyncio.Event()
self.sensors = []
async def async_refresh(self, now=None):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
try:
api_response = await self.api.get_data()
self.ready.set()
except InverterError as err:
if now is not None:
self.ready.clear()
return
raise PlatformNotReady from err
data = api_response.data
for sensor in self.sensors:
if sensor.key in data:
sensor.value = data[sensor.key]
sensor.async_schedule_update_ha_state()
class Inverter(Entity):
"""Class for a sensor."""
def __init__(self, uid, serial, key, unit):
"""Initialize an inverter sensor."""
self.uid = uid
self.serial = serial
self.key = key
self.value = None
self.unit = unit
@property
def state(self):
"""State of this inverter attribute."""
return self.value
@property
def unique_id(self):
"""Return unique id."""
return self.uid
@property
def name(self):
"""Name of this inverter attribute."""
return f"Solax {self.serial} {self.key}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self.unit
@property
def should_poll(self):
"""No polling needed."""
return False
|
import numpy as np
from ...utils import logger
from .res4 import _make_ctf_name
from .constants import CTF
from ..constants import FIFF
_kind_dict = {'nasion': CTF.CTFV_COIL_NAS, 'left ear': CTF.CTFV_COIL_LPA,
'right ear': CTF.CTFV_COIL_RPA, 'spare': CTF.CTFV_COIL_SPARE}
_coord_dict = {'relative to dewar': FIFF.FIFFV_MNE_COORD_CTF_DEVICE,
'relative to head': FIFF.FIFFV_MNE_COORD_CTF_HEAD}
def _read_one_coil_point(fid):
"""Read coil coordinate information from the hc file."""
# Descriptor
one = '#'
while len(one) > 0 and one[0] == '#':
one = fid.readline()
if len(one) == 0:
return None
one = one.strip().decode('utf-8')
if 'Unable' in one:
raise RuntimeError("HPI information not available")
# Hopefully this is an unambiguous interpretation
p = dict()
p['valid'] = ('measured' in one)
for key, val in _coord_dict.items():
if key in one:
p['coord_frame'] = val
break
else:
p['coord_frame'] = -1
for key, val in _kind_dict.items():
if key in one:
p['kind'] = val
break
else:
p['kind'] = -1
# Three coordinates
p['r'] = np.empty(3)
for ii, coord in enumerate('xyz'):
sp = fid.readline().decode('utf-8').strip()
if len(sp) == 0: # blank line
continue
sp = sp.split(' ')
if len(sp) != 3 or sp[0] != coord or sp[1] != '=':
raise RuntimeError('Bad line: %s' % one)
# We do not deal with centimeters
p['r'][ii] = float(sp[2]) / 100.0
return p
def _read_hc(directory):
"""Read the hc file to get the HPI info and to prepare for coord trans."""
fname = _make_ctf_name(directory, 'hc', raise_error=False)
if fname is None:
logger.info(' hc data not present')
return None
s = list()
with open(fname, 'rb') as fid:
while(True):
p = _read_one_coil_point(fid)
if p is None:
# First point bad indicates that the file is empty
if len(s) == 0:
logger.info('hc file empty, no data present')
return None
# Returns None if at EOF
logger.info(' hc data read.')
return s
if p['valid']:
s.append(p)
|
from asynctest import CoroutineMock
from asynctest import patch
from mock import call
from mock import Mock
from pytest import mark
from paasta_tools.mesos import framework
from paasta_tools.mesos import master
from paasta_tools.mesos import task
@mark.asyncio
async def test_frameworks():
with patch.object(
master.MesosMaster, "_framework_list", autospec=True
) as mock_framework_list:
fake_frameworks = [{"name": "test_framework1"}, {"name": "test_framework2"}]
mock_framework_list.return_value = fake_frameworks
expected_frameworks = [
framework.Framework(config) for config in fake_frameworks
]
mesos_master = master.MesosMaster({})
assert expected_frameworks == await mesos_master.frameworks()
@mark.asyncio
async def test_framework_list_includes_completed_frameworks():
with patch.object(
master.MesosMaster, "_framework_list", autospec=True
) as mock_framework_list:
fake_frameworks = [{"name": "test_framework1"}, {"name": "test_framework2"}]
mock_framework_list.return_value = fake_frameworks
expected_frameworks = [
framework.Framework(config) for config in fake_frameworks
]
mesos_master = master.MesosMaster({})
assert expected_frameworks == await mesos_master.frameworks()
@mark.asyncio
async def test__frameworks():
with patch.object(master.MesosMaster, "fetch", autospec=True) as mock_fetch:
mesos_master = master.MesosMaster({})
mock_frameworks = Mock()
mock_fetch.return_value = CoroutineMock(
json=CoroutineMock(return_value=mock_frameworks)
)
ret = await mesos_master._frameworks()
mock_fetch.assert_called_with(mesos_master, "/master/frameworks", cached=True)
assert ret == mock_frameworks
@mark.asyncio
async def test__framework_list():
mock_frameworks = Mock()
mock_completed = Mock()
with patch.object(
master.MesosMaster,
"_frameworks",
autospec=True,
return_value={
"frameworks": [mock_frameworks],
"completed_frameworks": [mock_completed],
},
):
mesos_master = master.MesosMaster({})
ret = await mesos_master._framework_list()
expected = [mock_frameworks, mock_completed]
assert list(ret) == expected
ret = await mesos_master._framework_list(active_only=True)
expected = [mock_frameworks]
assert list(ret) == expected
@mark.asyncio
async def test__task_list():
mock_task_1 = Mock()
mock_task_2 = Mock()
mock_framework = {"tasks": [mock_task_1], "completed_tasks": [mock_task_2]}
with patch.object(
master.MesosMaster,
"_framework_list",
autospec=True,
return_value=[mock_framework],
) as mock__frameworks_list:
mesos_master = master.MesosMaster({})
ret = await mesos_master._task_list()
mock__frameworks_list.assert_called_with(mesos_master, False)
expected = [mock_task_1, mock_task_2]
assert list(ret) == expected
ret = await mesos_master._task_list(active_only=True)
expected = [mock_task_1]
assert list(ret) == expected
ret = await mesos_master._task_list(active_only=False)
expected = [mock_task_1, mock_task_2]
assert list(ret) == expected
@mark.asyncio
async def test_tasks():
with patch.object(
master.MesosMaster, "_task_list", autospec=True
) as mock__task_list, patch.object(task, "Task", autospec=True) as mock_task:
mock_task_1 = {"id": "aaa"}
mock_task_2 = {"id": "bbb"}
mock__task_list.return_value = [mock_task_1, mock_task_2]
mock_task.return_value = Mock()
mesos_master = master.MesosMaster({})
ret = await mesos_master.tasks()
mock_task.assert_has_calls(
[call(mesos_master, mock_task_1), call(mesos_master, mock_task_2)]
)
mock__task_list.assert_called_with(mesos_master, False)
expected = [mock_task.return_value, mock_task.return_value]
assert list(ret) == expected
@mark.asyncio
async def test_orphan_tasks():
mesos_master = master.MesosMaster({})
mock_task_1 = Mock()
mesos_master.state = CoroutineMock(return_value={"orphan_tasks": [mock_task_1]})
assert await mesos_master.orphan_tasks() == [mock_task_1]
|
import logging
import ssl
import time
from urllib.parse import urlparse
from plexapi.client import PlexClient
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
import plexapi.myplex
import plexapi.playqueue
import plexapi.server
from requests import Session
import requests.exceptions
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.components.media_player.const import (
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_VIDEO,
)
from homeassistant.const import CONF_CLIENT_ID, CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import callback
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
CONF_IGNORE_NEW_SHARED_USERS,
CONF_IGNORE_PLEX_WEB_CLIENTS,
CONF_MONITORED_USERS,
CONF_SERVER,
CONF_USE_EPISODE_ART,
DEBOUNCE_TIMEOUT,
DEFAULT_VERIFY_SSL,
DOMAIN,
GDM_SCANNER,
PLAYER_SOURCE,
PLEX_NEW_MP_SIGNAL,
PLEX_UPDATE_MEDIA_PLAYER_SIGNAL,
PLEX_UPDATE_SENSOR_SIGNAL,
PLEXTV_THROTTLE,
X_PLEX_DEVICE_NAME,
X_PLEX_PLATFORM,
X_PLEX_PRODUCT,
X_PLEX_VERSION,
)
from .errors import (
MediaNotFound,
NoServersFound,
ServerNotSpecified,
ShouldUpdateConfigEntry,
)
from .media_search import lookup_movie, lookup_music, lookup_tv
_LOGGER = logging.getLogger(__name__)
# Set default headers sent by plexapi
plexapi.X_PLEX_DEVICE_NAME = X_PLEX_DEVICE_NAME
plexapi.X_PLEX_PLATFORM = X_PLEX_PLATFORM
plexapi.X_PLEX_PRODUCT = X_PLEX_PRODUCT
plexapi.X_PLEX_VERSION = X_PLEX_VERSION
class PlexServer:
"""Manages a single Plex server connection."""
def __init__(
self, hass, server_config, known_server_id=None, options=None, entry_id=None
):
"""Initialize a Plex server instance."""
self.hass = hass
self.entry_id = entry_id
self._plex_account = None
self._plex_server = None
self._created_clients = set()
self._known_clients = set()
self._known_idle = set()
self._url = server_config.get(CONF_URL)
self._token = server_config.get(CONF_TOKEN)
self._server_name = server_config.get(CONF_SERVER)
self._verify_ssl = server_config.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL)
self._server_id = known_server_id
self.options = options
self.server_choice = None
self._accounts = []
self._owner_username = None
self._plextv_clients = None
self._plextv_client_timestamp = 0
self._client_device_cache = {}
self._use_plex_tv = self._token is not None
self._version = None
self.async_update_platforms = Debouncer(
hass,
_LOGGER,
cooldown=DEBOUNCE_TIMEOUT,
immediate=True,
function=self._async_update_platforms,
).async_call
# Header conditionally added as it is not available in config entry v1
if CONF_CLIENT_ID in server_config:
plexapi.X_PLEX_IDENTIFIER = server_config[CONF_CLIENT_ID]
plexapi.myplex.BASE_HEADERS = plexapi.reset_base_headers()
plexapi.server.BASE_HEADERS = plexapi.reset_base_headers()
@property
def account(self):
"""Return a MyPlexAccount instance."""
if not self._plex_account and self._use_plex_tv:
try:
self._plex_account = plexapi.myplex.MyPlexAccount(token=self._token)
except (BadRequest, Unauthorized):
self._use_plex_tv = False
_LOGGER.error("Not authorized to access plex.tv with provided token")
raise
return self._plex_account
def plextv_clients(self):
"""Return available clients linked to Plex account."""
if self.account is None:
return []
now = time.time()
if now - self._plextv_client_timestamp > PLEXTV_THROTTLE:
self._plextv_client_timestamp = now
self._plextv_clients = [
x
for x in self.account.resources()
if "player" in x.provides and x.presence and x.publicAddressMatches
]
_LOGGER.debug(
"Current available clients from plex.tv: %s", self._plextv_clients
)
return self._plextv_clients
def connect(self):
"""Connect to a Plex server directly, obtaining direct URL if necessary."""
config_entry_update_needed = False
def _connect_with_token():
available_servers = [
(x.name, x.clientIdentifier)
for x in self.account.resources()
if "server" in x.provides
]
if not available_servers:
raise NoServersFound
if not self._server_name and len(available_servers) > 1:
raise ServerNotSpecified(available_servers)
self.server_choice = (
self._server_name if self._server_name else available_servers[0][0]
)
self._plex_server = self.account.resource(self.server_choice).connect(
timeout=10
)
def _connect_with_url():
session = None
if self._url.startswith("https") and not self._verify_ssl:
session = Session()
session.verify = False
self._plex_server = plexapi.server.PlexServer(
self._url, self._token, session
)
def _update_plexdirect_hostname():
matching_servers = [
x.name
for x in self.account.resources()
if x.clientIdentifier == self._server_id
]
if matching_servers:
self._plex_server = self.account.resource(matching_servers[0]).connect(
timeout=10
)
return True
_LOGGER.error("Attempt to update plex.direct hostname failed")
return False
if self._url:
try:
_connect_with_url()
except requests.exceptions.SSLError as error:
while error and not isinstance(error, ssl.SSLCertVerificationError):
error = error.__context__ # pylint: disable=no-member
if isinstance(error, ssl.SSLCertVerificationError):
domain = urlparse(self._url).netloc.split(":")[0]
if domain.endswith("plex.direct") and error.args[0].startswith(
f"hostname '{domain}' doesn't match"
):
_LOGGER.warning(
"Plex SSL certificate's hostname changed, updating"
)
if _update_plexdirect_hostname():
config_entry_update_needed = True
else:
raise Unauthorized( # pylint: disable=raise-missing-from
"New certificate cannot be validated with provided token"
)
else:
raise
else:
raise
else:
_connect_with_token()
try:
system_accounts = self._plex_server.systemAccounts()
except Unauthorized:
_LOGGER.warning(
"Plex account has limited permissions, shared account filtering will not be available"
)
else:
self._accounts = [
account.name for account in system_accounts if account.name
]
_LOGGER.debug("Linked accounts: %s", self.accounts)
owner_account = [
account.name for account in system_accounts if account.accountID == 1
]
if owner_account:
self._owner_username = owner_account[0]
_LOGGER.debug("Server owner found: '%s'", self._owner_username)
self._version = self._plex_server.version
if config_entry_update_needed:
raise ShouldUpdateConfigEntry
@callback
def async_refresh_entity(self, machine_identifier, device, session):
"""Forward refresh dispatch to media_player."""
unique_id = f"{self.machine_identifier}:{machine_identifier}"
_LOGGER.debug("Refreshing %s", unique_id)
async_dispatcher_send(
self.hass,
PLEX_UPDATE_MEDIA_PLAYER_SIGNAL.format(unique_id),
device,
session,
)
def _fetch_platform_data(self):
"""Fetch all data from the Plex server in a single method."""
return (
self._plex_server.clients(),
self._plex_server.sessions(),
self.plextv_clients(),
)
async def _async_update_platforms(self):
"""Update the platform entities."""
_LOGGER.debug("Updating devices")
available_clients = {}
ignored_clients = set()
new_clients = set()
monitored_users = self.accounts
known_accounts = set(self.option_monitored_users)
if known_accounts:
monitored_users = {
user
for user in self.option_monitored_users
if self.option_monitored_users[user]["enabled"]
}
if not self.option_ignore_new_shared_users:
for new_user in self.accounts - known_accounts:
monitored_users.add(new_user)
try:
devices, sessions, plextv_clients = await self.hass.async_add_executor_job(
self._fetch_platform_data
)
except plexapi.exceptions.Unauthorized:
_LOGGER.debug(
"Token has expired for '%s', reloading integration", self.friendly_name
)
await self.hass.config_entries.async_reload(self.entry_id)
return
except (
plexapi.exceptions.BadRequest,
requests.exceptions.RequestException,
) as ex:
_LOGGER.error(
"Could not connect to Plex server: %s (%s)", self.friendly_name, ex
)
return
def process_device(source, device):
self._known_idle.discard(device.machineIdentifier)
available_clients.setdefault(device.machineIdentifier, {"device": device})
available_clients[device.machineIdentifier].setdefault(
PLAYER_SOURCE, source
)
if device.machineIdentifier not in ignored_clients:
if self.option_ignore_plexweb_clients and device.product == "Plex Web":
ignored_clients.add(device.machineIdentifier)
if device.machineIdentifier not in self._known_clients:
_LOGGER.debug(
"Ignoring %s %s: %s",
"Plex Web",
source,
device.machineIdentifier,
)
return
if device.machineIdentifier not in (
self._created_clients | ignored_clients | new_clients
):
new_clients.add(device.machineIdentifier)
_LOGGER.debug(
"New %s from %s: %s",
device.product,
source,
device.machineIdentifier,
)
for device in devices:
process_device("PMS", device)
def connect_to_client(source, baseurl, machine_identifier, name="Unknown"):
"""Connect to a Plex client and return a PlexClient instance."""
try:
client = PlexClient(
server=self._plex_server,
baseurl=baseurl,
token=self._plex_server.createToken(),
)
except requests.exceptions.ConnectionError:
_LOGGER.error(
"Direct client connection failed, will try again: %s (%s)",
name,
baseurl,
)
except Unauthorized:
_LOGGER.error(
"Direct client connection unauthorized, ignoring: %s (%s)",
name,
baseurl,
)
self._client_device_cache[machine_identifier] = None
else:
self._client_device_cache[client.machineIdentifier] = client
process_device(source, client)
def connect_to_resource(resource):
"""Connect to a plex.tv resource and return a Plex client."""
try:
client = resource.connect(timeout=3)
_LOGGER.debug("plex.tv resource connection successful: %s", client)
except NotFound:
_LOGGER.error("plex.tv resource connection failed: %s", resource.name)
else:
client.proxyThroughServer(value=False, server=self._plex_server)
self._client_device_cache[client.machineIdentifier] = client
process_device("plex.tv", client)
def connect_new_clients():
"""Create connections to newly discovered clients."""
for gdm_entry in self.hass.data[DOMAIN][GDM_SCANNER].entries:
machine_identifier = gdm_entry["data"]["Resource-Identifier"]
if machine_identifier in self._client_device_cache:
client = self._client_device_cache[machine_identifier]
if client is not None:
process_device("GDM", client)
elif machine_identifier not in available_clients:
baseurl = (
f"http://{gdm_entry['from'][0]}:{gdm_entry['data']['Port']}"
)
name = gdm_entry["data"]["Name"]
connect_to_client("GDM", baseurl, machine_identifier, name)
for plextv_client in plextv_clients:
if plextv_client.clientIdentifier in self._client_device_cache:
client = self._client_device_cache[plextv_client.clientIdentifier]
if client is not None:
process_device("plex.tv", client)
elif plextv_client.clientIdentifier not in available_clients:
connect_to_resource(plextv_client)
await self.hass.async_add_executor_job(connect_new_clients)
for session in sessions:
if session.TYPE == "photo":
_LOGGER.debug("Photo session detected, skipping: %s", session)
continue
session_username = session.usernames[0]
for player in session.players:
if session_username and session_username not in monitored_users:
ignored_clients.add(player.machineIdentifier)
_LOGGER.debug(
"Ignoring %s client owned by '%s'",
player.product,
session_username,
)
continue
process_device("session", player)
available_clients[player.machineIdentifier]["session"] = session
new_entity_configs = []
for client_id, client_data in available_clients.items():
if client_id in ignored_clients:
continue
if client_id in new_clients:
new_entity_configs.append(client_data)
self._created_clients.add(client_id)
else:
self.async_refresh_entity(
client_id, client_data["device"], client_data.get("session")
)
self._known_clients.update(new_clients | ignored_clients)
idle_clients = (
self._known_clients - self._known_idle - ignored_clients
).difference(available_clients)
for client_id in idle_clients:
self.async_refresh_entity(client_id, None, None)
self._known_idle.add(client_id)
self._client_device_cache.pop(client_id, None)
if new_entity_configs:
async_dispatcher_send(
self.hass,
PLEX_NEW_MP_SIGNAL.format(self.machine_identifier),
new_entity_configs,
)
async_dispatcher_send(
self.hass,
PLEX_UPDATE_SENSOR_SIGNAL.format(self.machine_identifier),
sessions,
)
@property
def plex_server(self):
"""Return the plexapi PlexServer instance."""
return self._plex_server
@property
def accounts(self):
"""Return accounts associated with the Plex server."""
return set(self._accounts)
@property
def owner(self):
"""Return the Plex server owner username."""
return self._owner_username
@property
def version(self):
"""Return the version of the Plex server."""
return self._version
@property
def friendly_name(self):
"""Return name of connected Plex server."""
return self._plex_server.friendlyName
@property
def machine_identifier(self):
"""Return unique identifier of connected Plex server."""
return self._plex_server.machineIdentifier
@property
def url_in_use(self):
"""Return URL used for connected Plex server."""
return self._plex_server._baseurl # pylint: disable=protected-access
@property
def option_ignore_new_shared_users(self):
"""Return ignore_new_shared_users option."""
return self.options[MP_DOMAIN].get(CONF_IGNORE_NEW_SHARED_USERS, False)
@property
def option_use_episode_art(self):
"""Return use_episode_art option."""
return self.options[MP_DOMAIN].get(CONF_USE_EPISODE_ART, False)
@property
def option_monitored_users(self):
"""Return dict of monitored users option."""
return self.options[MP_DOMAIN].get(CONF_MONITORED_USERS, {})
@property
def option_ignore_plexweb_clients(self):
"""Return ignore_plex_web_clients option."""
return self.options[MP_DOMAIN].get(CONF_IGNORE_PLEX_WEB_CLIENTS, False)
@property
def library(self):
"""Return library attribute from server object."""
return self._plex_server.library
def playlist(self, title):
"""Return playlist from server object."""
return self._plex_server.playlist(title)
def playlists(self):
"""Return available playlists from server object."""
return self._plex_server.playlists()
def create_playqueue(self, media, **kwargs):
"""Create playqueue on Plex server."""
return plexapi.playqueue.PlayQueue.create(self._plex_server, media, **kwargs)
def fetch_item(self, item):
"""Fetch item from Plex server."""
return self._plex_server.fetchItem(item)
def lookup_media(self, media_type, **kwargs):
"""Lookup a piece of media."""
media_type = media_type.lower()
if isinstance(kwargs.get("plex_key"), int):
key = kwargs["plex_key"]
try:
return self.fetch_item(key)
except NotFound:
_LOGGER.error("Media for key %s not found", key)
return None
if media_type == MEDIA_TYPE_PLAYLIST:
try:
playlist_name = kwargs["playlist_name"]
return self.playlist(playlist_name)
except KeyError:
_LOGGER.error("Must specify 'playlist_name' for this search")
return None
except NotFound:
_LOGGER.error(
"Playlist '%s' not found",
playlist_name,
)
return None
try:
library_name = kwargs.pop("library_name")
library_section = self.library.section(library_name)
except KeyError:
_LOGGER.error("Must specify 'library_name' for this search")
return None
except NotFound:
_LOGGER.error("Library '%s' not found", library_name)
return None
try:
if media_type == MEDIA_TYPE_EPISODE:
return lookup_tv(library_section, **kwargs)
if media_type == MEDIA_TYPE_MOVIE:
return lookup_movie(library_section, **kwargs)
if media_type == MEDIA_TYPE_MUSIC:
return lookup_music(library_section, **kwargs)
if media_type == MEDIA_TYPE_VIDEO:
# Legacy method for compatibility
try:
video_name = kwargs["video_name"]
return library_section.get(video_name)
except KeyError:
_LOGGER.error("Must specify 'video_name' for this search")
return None
except NotFound as err:
raise MediaNotFound(f"Video {video_name}") from err
except MediaNotFound as failed_item:
_LOGGER.error("%s not found in %s", failed_item, library_name)
return None
|
from dataclasses import dataclass
import datetime
import pytest
from homeassistant.components import climate, humidifier, sensor
from homeassistant.components.demo.sensor import DemoSensor
import homeassistant.components.prometheus as prometheus
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONTENT_TYPE_TEXT_PLAIN,
DEGREE,
DEVICE_CLASS_POWER,
ENERGY_KILO_WATT_HOUR,
EVENT_STATE_CHANGED,
)
from homeassistant.core import split_entity_id
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
import tests.async_mock as mock
PROMETHEUS_PATH = "homeassistant.components.prometheus"
@dataclass
class FilterTest:
"""Class for capturing a filter test."""
id: str
should_pass: bool
async def prometheus_client(hass, hass_client):
"""Initialize an hass_client with Prometheus component."""
await async_setup_component(hass, prometheus.DOMAIN, {prometheus.DOMAIN: {}})
await async_setup_component(hass, sensor.DOMAIN, {"sensor": [{"platform": "demo"}]})
await async_setup_component(
hass, climate.DOMAIN, {"climate": [{"platform": "demo"}]}
)
await hass.async_block_till_done()
await async_setup_component(
hass, humidifier.DOMAIN, {"humidifier": [{"platform": "demo"}]}
)
sensor1 = DemoSensor(
None, "Television Energy", 74, None, ENERGY_KILO_WATT_HOUR, None
)
sensor1.hass = hass
sensor1.entity_id = "sensor.television_energy"
await sensor1.async_update_ha_state()
sensor2 = DemoSensor(
None, "Radio Energy", 14, DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR, None
)
sensor2.hass = hass
sensor2.entity_id = "sensor.radio_energy"
with mock.patch(
"homeassistant.util.dt.utcnow",
return_value=datetime.datetime(1970, 1, 2, tzinfo=dt_util.UTC),
):
await sensor2.async_update_ha_state()
sensor3 = DemoSensor(
None, "Electricity price", 0.123, None, f"SEK/{ENERGY_KILO_WATT_HOUR}", None
)
sensor3.hass = hass
sensor3.entity_id = "sensor.electricity_price"
await sensor3.async_update_ha_state()
sensor4 = DemoSensor(None, "Wind Direction", 25, None, DEGREE, None)
sensor4.hass = hass
sensor4.entity_id = "sensor.wind_direction"
await sensor4.async_update_ha_state()
sensor5 = DemoSensor(
None,
"SPS30 PM <1µm Weight concentration",
3.7069,
None,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
None,
)
sensor5.hass = hass
sensor5.entity_id = "sensor.sps30_pm_1um_weight_concentration"
await sensor5.async_update_ha_state()
return await hass_client()
async def test_view(hass, hass_client):
"""Test prometheus metrics view."""
client = await prometheus_client(hass, hass_client)
resp = await client.get(prometheus.API_ENDPOINT)
assert resp.status == 200
assert resp.headers["content-type"] == CONTENT_TYPE_TEXT_PLAIN
body = await resp.text()
body = body.split("\n")
assert len(body) > 3
assert "# HELP python_info Python platform information" in body
assert (
"# HELP python_gc_objects_collected_total "
"Objects collected during gc" in body
)
assert (
'temperature_c{domain="sensor",'
'entity="sensor.outside_temperature",'
'friendly_name="Outside Temperature"} 15.6' in body
)
assert (
'battery_level_percent{domain="sensor",'
'entity="sensor.outside_temperature",'
'friendly_name="Outside Temperature"} 12.0' in body
)
assert (
'current_temperature_c{domain="climate",'
'entity="climate.heatpump",'
'friendly_name="HeatPump"} 25.0' in body
)
assert (
'humidifier_target_humidity_percent{domain="humidifier",'
'entity="humidifier.humidifier",'
'friendly_name="Humidifier"} 68.0' in body
)
assert (
'humidifier_state{domain="humidifier",'
'entity="humidifier.dehumidifier",'
'friendly_name="Dehumidifier"} 1.0' in body
)
assert (
'humidifier_mode{domain="humidifier",'
'entity="humidifier.hygrostat",'
'friendly_name="Hygrostat",'
'mode="home"} 1.0' in body
)
assert (
'humidifier_mode{domain="humidifier",'
'entity="humidifier.hygrostat",'
'friendly_name="Hygrostat",'
'mode="eco"} 0.0' in body
)
assert (
'humidity_percent{domain="sensor",'
'entity="sensor.outside_humidity",'
'friendly_name="Outside Humidity"} 54.0' in body
)
assert (
'sensor_unit_kwh{domain="sensor",'
'entity="sensor.television_energy",'
'friendly_name="Television Energy"} 74.0' in body
)
assert (
'power_kwh{domain="sensor",'
'entity="sensor.radio_energy",'
'friendly_name="Radio Energy"} 14.0' in body
)
assert (
'entity_available{domain="sensor",'
'entity="sensor.radio_energy",'
'friendly_name="Radio Energy"} 1.0' in body
)
assert (
'last_updated_time_seconds{domain="sensor",'
'entity="sensor.radio_energy",'
'friendly_name="Radio Energy"} 86400.0' in body
)
assert (
'sensor_unit_sek_per_kwh{domain="sensor",'
'entity="sensor.electricity_price",'
'friendly_name="Electricity price"} 0.123' in body
)
assert (
'sensor_unit_u0xb0{domain="sensor",'
'entity="sensor.wind_direction",'
'friendly_name="Wind Direction"} 25.0' in body
)
assert (
'sensor_unit_u0xb5g_per_mu0xb3{domain="sensor",'
'entity="sensor.sps30_pm_1um_weight_concentration",'
'friendly_name="SPS30 PM <1µm Weight concentration"} 3.7069' in body
)
@pytest.fixture(name="mock_client")
def mock_client_fixture():
"""Mock the prometheus client."""
with mock.patch(f"{PROMETHEUS_PATH}.prometheus_client") as client:
counter_client = mock.MagicMock()
client.Counter = mock.MagicMock(return_value=counter_client)
setattr(counter_client, "labels", mock.MagicMock(return_value=mock.MagicMock()))
yield counter_client
@pytest.fixture
def mock_bus(hass):
"""Mock the event bus listener."""
hass.bus.listen = mock.MagicMock()
@pytest.mark.usefixtures("mock_bus")
async def test_minimal_config(hass, mock_client):
"""Test the minimal config and defaults of component."""
config = {prometheus.DOMAIN: {}}
assert await async_setup_component(hass, prometheus.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
@pytest.mark.usefixtures("mock_bus")
async def test_full_config(hass, mock_client):
"""Test the full config of component."""
config = {
prometheus.DOMAIN: {
"namespace": "ns",
"default_metric": "m",
"override_metric": "m",
"component_config": {"fake.test": {"override_metric": "km"}},
"component_config_glob": {"fake.time_*": {"override_metric": "h"}},
"component_config_domain": {"climate": {"override_metric": "°C"}},
"filter": {
"include_domains": ["climate"],
"include_entity_globs": ["fake.time_*"],
"include_entities": ["fake.test"],
"exclude_domains": ["script"],
"exclude_entity_globs": ["climate.excluded_*"],
"exclude_entities": ["fake.time_excluded"],
},
}
}
assert await async_setup_component(hass, prometheus.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
def make_event(entity_id):
"""Make a mock event for test."""
domain = split_entity_id(entity_id)[0]
state = mock.MagicMock(
state="not blank",
domain=domain,
entity_id=entity_id,
object_id="entity",
attributes={},
)
return mock.MagicMock(data={"new_state": state}, time_fired=12345)
async def _setup(hass, filter_config):
"""Shared set up for filtering tests."""
config = {prometheus.DOMAIN: {"filter": filter_config}}
assert await async_setup_component(hass, prometheus.DOMAIN, config)
await hass.async_block_till_done()
return hass.bus.listen.call_args_list[0][0][1]
@pytest.mark.usefixtures("mock_bus")
async def test_allowlist(hass, mock_client):
"""Test an allowlist only config."""
handler_method = await _setup(
hass,
{
"include_domains": ["fake"],
"include_entity_globs": ["test.included_*"],
"include_entities": ["not_real.included"],
},
)
tests = [
FilterTest("climate.excluded", False),
FilterTest("fake.included", True),
FilterTest("test.excluded_test", False),
FilterTest("test.included_test", True),
FilterTest("not_real.included", True),
FilterTest("not_real.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = mock_client.labels.call_count == 1
assert test.should_pass == was_called
mock_client.labels.reset_mock()
@pytest.mark.usefixtures("mock_bus")
async def test_denylist(hass, mock_client):
"""Test a denylist only config."""
handler_method = await _setup(
hass,
{
"exclude_domains": ["fake"],
"exclude_entity_globs": ["test.excluded_*"],
"exclude_entities": ["not_real.excluded"],
},
)
tests = [
FilterTest("fake.excluded", False),
FilterTest("light.included", True),
FilterTest("test.excluded_test", False),
FilterTest("test.included_test", True),
FilterTest("not_real.included", True),
FilterTest("not_real.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = mock_client.labels.call_count == 1
assert test.should_pass == was_called
mock_client.labels.reset_mock()
@pytest.mark.usefixtures("mock_bus")
async def test_filtered_denylist(hass, mock_client):
"""Test a denylist config with a filtering allowlist."""
handler_method = await _setup(
hass,
{
"include_entities": ["fake.included", "test.excluded_test"],
"exclude_domains": ["fake"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["not_real.excluded"],
},
)
tests = [
FilterTest("fake.excluded", False),
FilterTest("fake.included", True),
FilterTest("alt_fake.excluded_test", False),
FilterTest("test.excluded_test", True),
FilterTest("not_real.excluded", False),
FilterTest("not_real.included", True),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = mock_client.labels.call_count == 1
assert test.should_pass == was_called
mock_client.labels.reset_mock()
|
from __future__ import absolute_import
import unittest
from .common_imports import etree, HelperTestCase, make_doctest
class ETreeSchematronTestCase(HelperTestCase):
def test_schematron(self):
tree_valid = self.parse('<AAA><BBB/><CCC/></AAA>')
tree_invalid = self.parse('<AAA><BBB/><CCC/><DDD/></AAA>')
schema = self.parse('''\
<schema xmlns="http://purl.oclc.org/dsdl/schematron" >
<pattern name="Open model">
<rule context="AAA">
<assert test="BBB"> BBB element is not present</assert>
<assert test="CCC"> CCC element is not present</assert>
</rule>
</pattern>
<pattern name="Closed model">
<rule context="AAA">
<assert test="BBB"> BBB element is not present</assert>
<assert test="CCC"> CCC element is not present</assert>
<assert test="count(BBB|CCC) = count (*)">There is an extra element</assert>
</rule>
</pattern>
</schema>
''')
schema = etree.Schematron(schema)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.error_log.filter_from_errors())
self.assertTrue(schema.validate(tree_valid)) # repeat valid
self.assertFalse(schema.error_log.filter_from_errors()) # repeat valid
def test_schematron_elementtree_error(self):
self.assertRaises(ValueError, etree.Schematron, etree.ElementTree())
def test_schematron_invalid_schema(self):
schema = self.parse('''\
<schema xmlns="http://purl.oclc.org/dsdl/schematron" >
<pattern name="Open model">
</pattern>
</schema>
''')
self.assertRaises(etree.SchematronParseError,
etree.Schematron, schema)
def test_schematron_invalid_schema_empty(self):
schema = self.parse('''\
<schema xmlns="http://purl.oclc.org/dsdl/schematron" />
''')
self.assertRaises(etree.SchematronParseError,
etree.Schematron, schema)
def test_schematron_invalid_schema_namespace(self):
# segfault
schema = self.parse('''\
<schema xmlns="mynamespace" />
''')
self.assertRaises(etree.SchematronParseError,
etree.Schematron, schema)
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ETreeSchematronTestCase)])
suite.addTests(
[make_doctest('../../../doc/validation.txt')])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
import logging
from homeassistant import config_entries
from homeassistant.helpers import config_entry_oauth2_flow
from .const import DOMAIN
class OAuth2FlowHandler(
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
):
"""Config flow to handle Home Connect OAuth2 authentication."""
DOMAIN = DOMAIN
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
|
from datetime import timedelta
import logging
from pmsensor import co2sensor
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_TEMPERATURE,
CONCENTRATION_PARTS_PER_MILLION,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util.temperature import celsius_to_fahrenheit
_LOGGER = logging.getLogger(__name__)
CONF_SERIAL_DEVICE = "serial_device"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
DEFAULT_NAME = "CO2 Sensor"
ATTR_CO2_CONCENTRATION = "co2_concentration"
SENSOR_TEMPERATURE = "temperature"
SENSOR_CO2 = "co2"
SENSOR_TYPES = {
SENSOR_TEMPERATURE: ["Temperature", None],
SENSOR_CO2: ["CO2", CONCENTRATION_PARTS_PER_MILLION],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_SERIAL_DEVICE): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_CO2]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available CO2 sensors."""
try:
co2sensor.read_mh_z19(config.get(CONF_SERIAL_DEVICE))
except OSError as err:
_LOGGER.error(
"Could not open serial connection to %s (%s)",
config.get(CONF_SERIAL_DEVICE),
err,
)
return False
SENSOR_TYPES[SENSOR_TEMPERATURE][1] = hass.config.units.temperature_unit
data = MHZClient(co2sensor, config.get(CONF_SERIAL_DEVICE))
dev = []
name = config.get(CONF_NAME)
for variable in config[CONF_MONITORED_CONDITIONS]:
dev.append(MHZ19Sensor(data, variable, SENSOR_TYPES[variable][1], name))
add_entities(dev, True)
return True
class MHZ19Sensor(Entity):
"""Representation of an CO2 sensor."""
def __init__(self, mhz_client, sensor_type, temp_unit, name):
"""Initialize a new PM sensor."""
self._mhz_client = mhz_client
self._sensor_type = sensor_type
self._temp_unit = temp_unit
self._name = name
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._ppm = None
self._temperature = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name}: {SENSOR_TYPES[self._sensor_type][0]}"
@property
def state(self):
"""Return the state of the sensor."""
return self._ppm if self._sensor_type == SENSOR_CO2 else self._temperature
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Read from sensor and update the state."""
self._mhz_client.update()
data = self._mhz_client.data
self._temperature = data.get(SENSOR_TEMPERATURE)
if self._temperature is not None and self._temp_unit == TEMP_FAHRENHEIT:
self._temperature = round(celsius_to_fahrenheit(self._temperature), 1)
self._ppm = data.get(SENSOR_CO2)
@property
def device_state_attributes(self):
"""Return the state attributes."""
result = {}
if self._sensor_type == SENSOR_TEMPERATURE and self._ppm is not None:
result[ATTR_CO2_CONCENTRATION] = self._ppm
if self._sensor_type == SENSOR_CO2 and self._temperature is not None:
result[ATTR_TEMPERATURE] = self._temperature
return result
class MHZClient:
"""Get the latest data from the MH-Z sensor."""
def __init__(self, co2sens, serial):
"""Initialize the sensor."""
self.co2sensor = co2sens
self._serial = serial
self.data = {}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data the MH-Z19 sensor."""
self.data = {}
try:
result = self.co2sensor.read_mh_z19_with_temperature(self._serial)
if result is None:
return
co2, temperature = result
except OSError as err:
_LOGGER.error(
"Could not open serial connection to %s (%s)", self._serial, err
)
return
if temperature is not None:
self.data[SENSOR_TEMPERATURE] = temperature
if co2 is not None and 0 < co2 <= 5000:
self.data[SENSOR_CO2] = co2
|
from datetime import timedelta
import json
from homeassistant.components.metoffice.const import DOMAIN
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.util import utcnow
from . import NewDateTime
from .const import (
METOFFICE_CONFIG_KINGSLYNN,
METOFFICE_CONFIG_WAVERTREE,
WAVERTREE_SENSOR_RESULTS,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_site_cannot_connect(hass, requests_mock, legacy_patchable_time):
"""Test we handle cannot connect error."""
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get("weather.met_office_wavertree") is None
for sensor_id in WAVERTREE_SENSOR_RESULTS:
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
sensor = hass.states.get(f"sensor.wavertree_{sensor_name}")
assert sensor is None
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_site_cannot_update(hass, requests_mock, legacy_patchable_time):
"""Test we handle cannot connect error."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
entity = hass.states.get("weather.met_office_wavertree")
assert entity
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
future_time = utcnow() + timedelta(minutes=20)
async_fire_time_changed(hass, future_time)
await hass.async_block_till_done()
entity = hass.states.get("weather.met_office_wavertree")
assert entity.state == STATE_UNAVAILABLE
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_one_weather_site_running(hass, requests_mock, legacy_patchable_time):
"""Test the Met Office weather platform."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
text=wavertree_hourly,
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
# Wavertree weather platform expected results
entity = hass.states.get("weather.met_office_wavertree")
assert entity
assert entity.state == "sunny"
assert entity.attributes.get("temperature") == 17
assert entity.attributes.get("wind_speed") == 9
assert entity.attributes.get("wind_bearing") == "SSE"
assert entity.attributes.get("visibility") == "Good - 10-20"
assert entity.attributes.get("humidity") == 50
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_two_weather_sites_running(hass, requests_mock, legacy_patchable_time):
"""Test we handle two different weather sites both running."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
)
requests_mock.get(
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
entry2 = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_KINGSLYNN,
)
entry2.add_to_hass(hass)
await hass.config_entries.async_setup(entry2.entry_id)
await hass.async_block_till_done()
# Wavertree weather platform expected results
entity = hass.states.get("weather.met_office_wavertree")
assert entity
assert entity.state == "sunny"
assert entity.attributes.get("temperature") == 17
assert entity.attributes.get("wind_speed") == 9
assert entity.attributes.get("wind_bearing") == "SSE"
assert entity.attributes.get("visibility") == "Good - 10-20"
assert entity.attributes.get("humidity") == 50
# King's Lynn weather platform expected results
entity = hass.states.get("weather.met_office_king_s_lynn")
assert entity
assert entity.state == "sunny"
assert entity.attributes.get("temperature") == 14
assert entity.attributes.get("wind_speed") == 2
assert entity.attributes.get("wind_bearing") == "E"
assert entity.attributes.get("visibility") == "Very Good - 20-40"
assert entity.attributes.get("humidity") == 60
|
import json
from unittest import TestCase
from scattertext.viz import VizDataAdapter
PAYLOAD = {"info": {"not_category_name": "Republican", "category_name": "Democratic"},
"data": [{"y": 0.33763837638376387, "term": "crises", "ncat25k": 0,
"cat25k": 1, "x": 0.0, "s": 0.878755930416447},
{"y": 0.5, "term": "something else", "ncat25k": 0,
"cat25k": 1, "x": 0.0,
"s": 0.5}]}
def make_viz_data_adapter():
return VizDataAdapter(PAYLOAD)
class TestVizDataAdapter(TestCase):
def test_to_javascript(self):
js_str = make_viz_data_adapter().to_javascript()
self.assertEqual(js_str[:34], 'function getDataAndInfo() { return')
self.assertEqual(js_str[-3:], '; }')
json_str = js_str[34:-3]
self.assertEqual(PAYLOAD, json.loads(json_str))
def test_to_json(self):
json_str = make_viz_data_adapter().to_json()
self.assertEqual(PAYLOAD, json.loads(json_str))
|
import pandas as pd
from scattertext.termscoring.RankDifference import RankDifference
from scattertext.Common import QUALITATIVE_COLORS
from scattertext.termranking import AbsoluteFrequencyRanker
class CategoryColorAssigner(object):
def __init__(self,
corpus,
scorer=RankDifference(),
ranker=AbsoluteFrequencyRanker,
use_non_text_features=False,
color_palette=QUALITATIVE_COLORS):
'''
Assigns scores to colors for categories
:param corpus: TermDocMatrix
:param scorer: scorer
:param color_palette: list of colors [[red, green, blue], ...]
'''
self.corpus = corpus
self.scorer = scorer
self.color_palette = color_palette
my_ranker = ranker(corpus)
if use_non_text_features:
my_ranker.use_non_text_features()
tdf = my_ranker.get_ranks()
tdf_sum = tdf.sum(axis=1)
term_scores = {}
for cat in tdf.columns:
term_scores[cat[:-5]] = pd.Series(self.scorer.get_scores(tdf[cat], tdf_sum - tdf[cat]), index=tdf.index)
self.term_cat = pd.DataFrame(term_scores).idxmax(axis=1)
ranked_list_categories = pd.Series(corpus.get_category_names_by_row()).value_counts().index
self.category_colors = pd.Series(self.color_palette[:len(ranked_list_categories)],
index=ranked_list_categories)
def get_category_colors(self):
return self.category_colors
def get_term_colors(self):
'''
:return: dict, term -> color
'''
# print(self.category_colors)
# print(self.term_cat)
term_color = pd.Series(self.category_colors[self.term_cat].values, index=self.term_cat.index)
def get_hex_color(color):
return ''.join([part if len(part) == 2 else '0' + part
for part in [hex(part)[2:] for part in color]])
return term_color.apply(get_hex_color).to_dict()
|
from __future__ import annotations
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from .repo_manager import Candidate
__all__ = [
"DownloaderException",
"GitException",
"InvalidRepoName",
"CopyingError",
"ExistingGitRepo",
"MissingGitRepo",
"CloningError",
"CurrentHashError",
"HardResetError",
"UpdateError",
"GitDiffError",
"NoRemoteURL",
"UnknownRevision",
"AmbiguousRevision",
"PipError",
]
class DownloaderException(Exception):
"""
Base class for Downloader exceptions.
"""
pass
class GitException(DownloaderException):
"""
Generic class for git exceptions.
"""
def __init__(self, message: str, git_command: str) -> None:
self.git_command = git_command
super().__init__(f"Git command failed: {git_command}\nError message: {message}")
class InvalidRepoName(DownloaderException):
"""
Throw when a repo name is invalid. Check
the message for a more detailed reason.
"""
pass
class CopyingError(DownloaderException):
"""
Throw when there was an issue
during copying of module's files.
"""
pass
class ExistingGitRepo(DownloaderException):
"""
Thrown when trying to clone into a folder where a
git repo already exists.
"""
pass
class MissingGitRepo(DownloaderException):
"""
Thrown when a git repo is expected to exist but
does not.
"""
pass
class CloningError(GitException):
"""
Thrown when git clone returns a non zero exit code.
"""
pass
class CurrentHashError(GitException):
"""
Thrown when git returns a non zero exit code attempting
to determine the current commit hash.
"""
pass
class HardResetError(GitException):
"""
Thrown when there is an issue trying to execute a hard reset
(usually prior to a repo update).
"""
pass
class UpdateError(GitException):
"""
Thrown when git pull returns a non zero error code.
"""
pass
class GitDiffError(GitException):
"""
Thrown when a git diff fails.
"""
pass
class NoRemoteURL(GitException):
"""
Thrown when no remote URL exists for a repo.
"""
pass
class UnknownRevision(GitException):
"""
Thrown when specified revision cannot be found.
"""
pass
class AmbiguousRevision(GitException):
"""
Thrown when specified revision is ambiguous.
"""
def __init__(self, message: str, git_command: str, candidates: List[Candidate]) -> None:
super().__init__(message, git_command)
self.candidates = candidates
class PipError(DownloaderException):
"""
Thrown when pip returns a non-zero return code.
"""
pass
|
import pytest
from homeassistant.util import async_ as hasync
from tests.async_mock import MagicMock, Mock, patch
@patch("asyncio.coroutines.iscoroutine")
@patch("concurrent.futures.Future")
@patch("threading.get_ident")
def test_fire_coroutine_threadsafe_from_inside_event_loop(
mock_ident, _, mock_iscoroutine
):
"""Testing calling fire_coroutine_threadsafe from inside an event loop."""
coro = MagicMock()
loop = MagicMock()
loop._thread_ident = None
mock_ident.return_value = 5
mock_iscoroutine.return_value = True
hasync.fire_coroutine_threadsafe(coro, loop)
assert len(loop.call_soon_threadsafe.mock_calls) == 1
loop._thread_ident = 5
mock_ident.return_value = 5
mock_iscoroutine.return_value = True
with pytest.raises(RuntimeError):
hasync.fire_coroutine_threadsafe(coro, loop)
assert len(loop.call_soon_threadsafe.mock_calls) == 1
loop._thread_ident = 1
mock_ident.return_value = 5
mock_iscoroutine.return_value = False
with pytest.raises(TypeError):
hasync.fire_coroutine_threadsafe(coro, loop)
assert len(loop.call_soon_threadsafe.mock_calls) == 1
loop._thread_ident = 1
mock_ident.return_value = 5
mock_iscoroutine.return_value = True
hasync.fire_coroutine_threadsafe(coro, loop)
assert len(loop.call_soon_threadsafe.mock_calls) == 2
@patch("concurrent.futures.Future")
@patch("threading.get_ident")
def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _):
"""Testing calling run_callback_threadsafe from inside an event loop."""
callback = MagicMock()
loop = MagicMock()
loop._thread_ident = None
mock_ident.return_value = 5
hasync.run_callback_threadsafe(loop, callback)
assert len(loop.call_soon_threadsafe.mock_calls) == 1
loop._thread_ident = 5
mock_ident.return_value = 5
with pytest.raises(RuntimeError):
hasync.run_callback_threadsafe(loop, callback)
assert len(loop.call_soon_threadsafe.mock_calls) == 1
loop._thread_ident = 1
mock_ident.return_value = 5
hasync.run_callback_threadsafe(loop, callback)
assert len(loop.call_soon_threadsafe.mock_calls) == 2
async def test_check_loop_async():
"""Test check_loop detects when called from event loop without integration context."""
with pytest.raises(RuntimeError):
hasync.check_loop()
async def test_check_loop_async_integration(caplog):
"""Test check_loop detects when called from event loop from integration context."""
with patch(
"homeassistant.util.async_.extract_stack",
return_value=[
Mock(
filename="/home/paulus/homeassistant/core.py",
lineno="23",
line="do_something()",
),
Mock(
filename="/home/paulus/homeassistant/components/hue/light.py",
lineno="23",
line="self.light.is_on",
),
Mock(
filename="/home/paulus/aiohue/lights.py",
lineno="2",
line="something()",
),
],
):
hasync.check_loop()
assert (
"Detected I/O inside the event loop. This is causing stability issues. Please report issue for hue doing I/O at homeassistant/components/hue/light.py, line 23: self.light.is_on"
in caplog.text
)
async def test_check_loop_async_custom(caplog):
"""Test check_loop detects when called from event loop with custom component context."""
with patch(
"homeassistant.util.async_.extract_stack",
return_value=[
Mock(
filename="/home/paulus/homeassistant/core.py",
lineno="23",
line="do_something()",
),
Mock(
filename="/home/paulus/config/custom_components/hue/light.py",
lineno="23",
line="self.light.is_on",
),
Mock(
filename="/home/paulus/aiohue/lights.py",
lineno="2",
line="something()",
),
],
):
hasync.check_loop()
assert (
"Detected I/O inside the event loop. This is causing stability issues. Please report issue to the custom component author for hue doing I/O at custom_components/hue/light.py, line 23: self.light.is_on"
in caplog.text
)
def test_check_loop_sync(caplog):
"""Test check_loop does nothing when called from thread."""
hasync.check_loop()
assert "Detected I/O inside the event loop" not in caplog.text
def test_protect_loop_sync():
"""Test protect_loop calls check_loop."""
calls = []
with patch("homeassistant.util.async_.check_loop") as mock_loop:
hasync.protect_loop(calls.append)(1)
assert len(mock_loop.mock_calls) == 1
assert calls == [1]
|
import asyncio
from haffmpeg.camera import CameraMjpeg
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.const import CONF_NAME
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
import homeassistant.helpers.config_validation as cv
from . import CONF_EXTRA_ARGUMENTS, CONF_INPUT, DATA_FFMPEG
DEFAULT_NAME = "FFmpeg"
DEFAULT_ARGUMENTS = "-pred 1"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_INPUT): cv.string,
vol.Optional(CONF_EXTRA_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a FFmpeg camera."""
async_add_entities([FFmpegCamera(hass, config)])
class FFmpegCamera(Camera):
"""An implementation of an FFmpeg camera."""
def __init__(self, hass, config):
"""Initialize a FFmpeg camera."""
super().__init__()
self._manager = hass.data[DATA_FFMPEG]
self._name = config.get(CONF_NAME)
self._input = config.get(CONF_INPUT)
self._extra_arguments = config.get(CONF_EXTRA_ARGUMENTS)
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
async def stream_source(self):
"""Return the stream source."""
return self._input.split(" ")[-1]
async def async_camera_image(self):
"""Return a still image response from the camera."""
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
image = await asyncio.shield(
ffmpeg.get_image(
self._input, output_format=IMAGE_JPEG, extra_cmd=self._extra_arguments
)
)
return image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
await stream.open_camera(self._input, extra_cmd=self._extra_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._manager.ffmpeg_stream_content_type,
)
finally:
await stream.close()
@property
def name(self):
"""Return the name of this camera."""
return self._name
|
import librouteros
from homeassistant import config_entries
from homeassistant.components import mikrotik
from . import ARP_DATA, DHCP_DATA, MOCK_DATA, MOCK_OPTIONS, WIRELESS_DATA
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def setup_mikrotik_entry(hass, **kwargs):
"""Set up Mikrotik intergation successfully."""
support_wireless = kwargs.get("support_wireless", True)
dhcp_data = kwargs.get("dhcp_data", DHCP_DATA)
wireless_data = kwargs.get("wireless_data", WIRELESS_DATA)
def mock_command(self, cmd, params=None):
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.IS_WIRELESS]:
return support_wireless
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.DHCP]:
return dhcp_data
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.WIRELESS]:
return wireless_data
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.ARP]:
return ARP_DATA
return {}
config_entry = MockConfigEntry(
domain=mikrotik.DOMAIN, data=MOCK_DATA, options=MOCK_OPTIONS
)
config_entry.add_to_hass(hass)
if "force_dhcp" in kwargs:
config_entry.options = {**config_entry.options, "force_dhcp": True}
if "arp_ping" in kwargs:
config_entry.options = {**config_entry.options, "arp_ping": True}
with patch("librouteros.connect"), patch.object(
mikrotik.hub.MikrotikData, "command", new=mock_command
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return hass.data[mikrotik.DOMAIN][config_entry.entry_id]
async def test_hub_setup_successful(hass):
"""Successful setup of Mikrotik hub."""
with patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup",
return_value=True,
) as forward_entry_setup:
hub = await setup_mikrotik_entry(hass)
assert hub.config_entry.data == {
mikrotik.CONF_NAME: "Mikrotik",
mikrotik.CONF_HOST: "0.0.0.0",
mikrotik.CONF_USERNAME: "user",
mikrotik.CONF_PASSWORD: "pass",
mikrotik.CONF_PORT: 8278,
mikrotik.CONF_VERIFY_SSL: False,
}
assert hub.config_entry.options == {
mikrotik.hub.CONF_FORCE_DHCP: False,
mikrotik.CONF_ARP_PING: False,
mikrotik.CONF_DETECTION_TIME: 300,
}
assert hub.api.available is True
assert hub.signal_update == "mikrotik-update-0.0.0.0"
assert forward_entry_setup.mock_calls[0][1] == (hub.config_entry, "device_tracker")
async def test_hub_setup_failed(hass):
"""Failed setup of Mikrotik hub."""
config_entry = MockConfigEntry(domain=mikrotik.DOMAIN, data=MOCK_DATA)
config_entry.add_to_hass(hass)
# error when connection fails
with patch(
"librouteros.connect", side_effect=librouteros.exceptions.ConnectionClosed
):
await hass.config_entries.async_setup(config_entry.entry_id)
assert config_entry.state == config_entries.ENTRY_STATE_SETUP_RETRY
# error when username or password is invalid
config_entry = MockConfigEntry(domain=mikrotik.DOMAIN, data=MOCK_DATA)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup"
) as forward_entry_setup, patch(
"librouteros.connect",
side_effect=librouteros.exceptions.TrapError("invalid user name or password"),
):
result = await hass.config_entries.async_setup(config_entry.entry_id)
assert result is False
assert len(forward_entry_setup.mock_calls) == 0
async def test_update_failed(hass):
"""Test failing to connect during update."""
hub = await setup_mikrotik_entry(hass)
with patch.object(
mikrotik.hub.MikrotikData, "command", side_effect=mikrotik.errors.CannotConnect
):
await hub.async_update()
assert hub.api.available is False
async def test_hub_not_support_wireless(hass):
"""Test updating hub devices when hub doesn't support wireless interfaces."""
# test that the devices are constructed from dhcp data
hub = await setup_mikrotik_entry(hass, support_wireless=False)
assert hub.api.devices["00:00:00:00:00:01"]._params == DHCP_DATA[0]
assert hub.api.devices["00:00:00:00:00:01"]._wireless_params is None
assert hub.api.devices["00:00:00:00:00:02"]._params == DHCP_DATA[1]
assert hub.api.devices["00:00:00:00:00:02"]._wireless_params is None
async def test_hub_support_wireless(hass):
"""Test updating hub devices when hub support wireless interfaces."""
# test that the device list is from wireless data list
hub = await setup_mikrotik_entry(hass)
assert hub.api.support_wireless is True
assert hub.api.devices["00:00:00:00:00:01"]._params == DHCP_DATA[0]
assert hub.api.devices["00:00:00:00:00:01"]._wireless_params == WIRELESS_DATA[0]
# devices not in wireless list will not be added
assert "00:00:00:00:00:02" not in hub.api.devices
async def test_force_dhcp(hass):
"""Test updating hub devices with forced dhcp method."""
# test that the devices are constructed from dhcp data
hub = await setup_mikrotik_entry(hass, force_dhcp=True)
assert hub.api.support_wireless is True
assert hub.api.devices["00:00:00:00:00:01"]._params == DHCP_DATA[0]
assert hub.api.devices["00:00:00:00:00:01"]._wireless_params == WIRELESS_DATA[0]
# devices not in wireless list are added from dhcp
assert hub.api.devices["00:00:00:00:00:02"]._params == DHCP_DATA[1]
assert hub.api.devices["00:00:00:00:00:02"]._wireless_params is None
async def test_arp_ping(hass):
"""Test arp ping devices to confirm they are connected."""
# test device show as home if arp ping returns value
with patch.object(mikrotik.hub.MikrotikData, "do_arp_ping", return_value=True):
hub = await setup_mikrotik_entry(hass, arp_ping=True, force_dhcp=True)
assert hub.api.devices["00:00:00:00:00:01"].last_seen is not None
assert hub.api.devices["00:00:00:00:00:02"].last_seen is not None
# test device show as away if arp ping times out
with patch.object(mikrotik.hub.MikrotikData, "do_arp_ping", return_value=False):
hub = await setup_mikrotik_entry(hass, arp_ping=True, force_dhcp=True)
assert hub.api.devices["00:00:00:00:00:01"].last_seen is not None
# this device is not wireless so it will show as away
assert hub.api.devices["00:00:00:00:00:02"].last_seen is None
|
from aiohue.groups import Groups
from aiohue.lights import Lights
from aiohue.scenes import Scenes
from aiohue.sensors import Sensors
import pytest
from homeassistant import config_entries
from homeassistant.components import hue
from homeassistant.components.hue import sensor_base as hue_sensor_base
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def setup_component(hass):
"""Hue component."""
with patch.object(hue, "async_setup_entry", return_value=True):
assert (
await async_setup_component(
hass,
hue.DOMAIN,
{},
)
is True
)
async def test_hue_activate_scene_both_responds(
hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2
):
"""Test that makes both bridges successfully activate a scene."""
await setup_component(hass)
await setup_bridge(hass, mock_bridge1, mock_config_entry1)
await setup_bridge(hass, mock_bridge2, mock_config_entry2)
with patch.object(
mock_bridge1, "hue_activate_scene", return_value=None
) as mock_hue_activate_scene1, patch.object(
mock_bridge2, "hue_activate_scene", return_value=None
) as mock_hue_activate_scene2:
await hass.services.async_call(
"hue",
"hue_activate_scene",
{"group_name": "group_2", "scene_name": "my_scene"},
blocking=True,
)
mock_hue_activate_scene1.assert_called_once()
mock_hue_activate_scene2.assert_called_once()
async def test_hue_activate_scene_one_responds(
hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2
):
"""Test that makes only one bridge successfully activate a scene."""
await setup_component(hass)
await setup_bridge(hass, mock_bridge1, mock_config_entry1)
await setup_bridge(hass, mock_bridge2, mock_config_entry2)
with patch.object(
mock_bridge1, "hue_activate_scene", return_value=None
) as mock_hue_activate_scene1, patch.object(
mock_bridge2, "hue_activate_scene", return_value=False
) as mock_hue_activate_scene2:
await hass.services.async_call(
"hue",
"hue_activate_scene",
{"group_name": "group_2", "scene_name": "my_scene"},
blocking=True,
)
mock_hue_activate_scene1.assert_called_once()
mock_hue_activate_scene2.assert_called_once()
async def test_hue_activate_scene_zero_responds(
hass, mock_bridge1, mock_bridge2, mock_config_entry1, mock_config_entry2
):
"""Test that makes no bridge successfully activate a scene."""
await setup_component(hass)
await setup_bridge(hass, mock_bridge1, mock_config_entry1)
await setup_bridge(hass, mock_bridge2, mock_config_entry2)
with patch.object(
mock_bridge1, "hue_activate_scene", return_value=False
) as mock_hue_activate_scene1, patch.object(
mock_bridge2, "hue_activate_scene", return_value=False
) as mock_hue_activate_scene2:
await hass.services.async_call(
"hue",
"hue_activate_scene",
{"group_name": "group_2", "scene_name": "my_scene"},
blocking=True,
)
# both were retried
assert mock_hue_activate_scene1.call_count == 2
assert mock_hue_activate_scene2.call_count == 2
async def setup_bridge(hass, mock_bridge, config_entry):
"""Load the Hue light platform with the provided bridge."""
mock_bridge.config_entry = config_entry
hass.data[hue.DOMAIN][config_entry.entry_id] = mock_bridge
await hass.config_entries.async_forward_entry_setup(config_entry, "light")
# To flush out the service call to update the group
await hass.async_block_till_done()
@pytest.fixture
def mock_config_entry1(hass):
"""Mock a config entry."""
return create_config_entry()
@pytest.fixture
def mock_config_entry2(hass):
"""Mock a config entry."""
return create_config_entry()
def create_config_entry():
"""Mock a config entry."""
return config_entries.ConfigEntry(
1,
hue.DOMAIN,
"Mock Title",
{"host": "mock-host"},
"test",
config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
)
@pytest.fixture
def mock_bridge1(hass):
"""Mock a Hue bridge."""
return create_mock_bridge(hass)
@pytest.fixture
def mock_bridge2(hass):
"""Mock a Hue bridge."""
return create_mock_bridge(hass)
def create_mock_bridge(hass):
"""Create a mock Hue bridge."""
bridge = Mock(
hass=hass,
available=True,
authorized=True,
allow_unreachable=False,
allow_groups=False,
api=Mock(),
reset_jobs=[],
spec=hue.HueBridge,
)
bridge.sensor_manager = hue_sensor_base.SensorManager(bridge)
bridge.mock_requests = []
async def mock_request(method, path, **kwargs):
kwargs["method"] = method
kwargs["path"] = path
bridge.mock_requests.append(kwargs)
return {}
async def async_request_call(task):
await task()
bridge.async_request_call = async_request_call
bridge.api.config.apiversion = "9.9.9"
bridge.api.lights = Lights({}, mock_request)
bridge.api.groups = Groups({}, mock_request)
bridge.api.sensors = Sensors({}, mock_request)
bridge.api.scenes = Scenes({}, mock_request)
return bridge
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from perfkitbenchmarker import errors
from perfkitbenchmarker import providers
from perfkitbenchmarker.configs import spec
import six
class ConfigOptionDecoder(six.with_metaclass(abc.ABCMeta, object)):
"""Verifies and decodes a config option value.
Attributes:
option: None or string. Name of the config option.
required: boolean. True if the config option is required. False if not.
"""
def __init__(self, option=None, **kwargs):
"""Initializes a ConfigOptionDecoder.
Args:
option: None or string. Name of the config option.
**kwargs: May optionally contain a 'default' key mapping to a value or
callable object. If a value is provided, the config option is
optional, and the provided value is the default if the user does not
set a value for the config option. If a callable object is provided,
the config option is optional, and the provided object is called to
determine the value if the user does not set a value for the config
option. If not provided, the config option is required.
"""
self.option = option
self.required = 'default' not in kwargs
if not self.required:
self._default = kwargs.pop('default')
assert not kwargs, ('__init__() received unexpected keyword arguments: '
'{0}'.format(kwargs))
def _GetOptionFullName(self, component_full_name):
"""Returns the fully qualified name of a config option.
Args:
component_full_name: string. Fully qualified name of a configurable object
to which the option belongs.
"""
return (component_full_name if self.option is None
else '{0}.{1}'.format(component_full_name, self.option))
@property
def default(self):
"""Gets the config option's default value.
Returns:
Default value of an optional config option.
"""
assert not self.required, (
'Attempted to get the default value of required config option '
'"{0}".'.format(self.option))
if hasattr(self._default, '__call__'):
return self._default()
return self._default
@abc.abstractmethod
def Decode(self, value, component_full_name, flag_values):
"""Verifies and decodes a config option value.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
The decoded value.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
raise NotImplementedError()
class EnumDecoder(ConfigOptionDecoder):
"""Verifies that the config options value is in the allowed set.
Passes through the value unmodified
"""
def __init__(self, valid_values, **kwargs):
"""Initializes the EnumVerifier.
Args:
valid_values: list of the allowed values
**kwargs: Keyword arguments to pass to the base class.
"""
super(EnumDecoder, self).__init__(**kwargs)
self.valid_values = valid_values
def Decode(self, value, component_full_name, flag_values):
"""Verifies that the provided value is in the allowed set.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the
configurable component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be
propagated to the BaseSpec constructors.
Returns:
The valid value.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
if value in self.valid_values:
return value
else:
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}". Value must be one of the following: '
'{2}.'.format(self._GetOptionFullName(component_full_name), value,
', '.join(str(t) for t in self.valid_values)))
class TypeVerifier(ConfigOptionDecoder):
"""Verifies that a config option value's type belongs to an allowed set.
Passes value through unmodified.
"""
def __init__(self, valid_types, none_ok=False, **kwargs):
"""Initializes a TypeVerifier.
Args:
valid_types: tuple of allowed types.
none_ok: boolean. If True, None is also an allowed option value.
**kwargs: Keyword arguments to pass to the base class.
"""
super(TypeVerifier, self).__init__(**kwargs)
if none_ok:
self._valid_types = (type(None),) + valid_types
else:
self._valid_types = valid_types
def Decode(self, value, component_full_name, flag_values):
"""Verifies that the provided value is of an allowed type.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
The valid value.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
if not isinstance(value, self._valid_types):
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}" (of type "{2}"). Value must be one of the '
'following types: {3}.'.format(
self._GetOptionFullName(component_full_name), value,
value.__class__.__name__,
', '.join(t.__name__ for t in self._valid_types)))
return value
class BooleanDecoder(TypeVerifier):
"""Verifies and decodes a config option value when a boolean is expected."""
def __init__(self, **kwargs):
super(BooleanDecoder, self).__init__((bool,), **kwargs)
class IntDecoder(TypeVerifier):
"""Verifies and decodes a config option value when an integer is expected.
Attributes:
max: None or int. If provided, it specifies the maximum accepted value.
min: None or int. If provided, it specifies the minimum accepted value.
"""
def __init__(self, max=None, min=None, **kwargs):
super(IntDecoder, self).__init__((int,), **kwargs)
self.max = max
self.min = min
def Decode(self, value, component_full_name, flag_values):
"""Verifies that the provided value is an int.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
int. The valid value.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
value = super(IntDecoder, self).Decode(value, component_full_name,
flag_values)
if value is not None:
if self.max is not None and value > self.max:
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}". Value must be at most {2}.'.format(
self._GetOptionFullName(component_full_name), value, self.max))
if self.min is not None and value < self.min:
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}". Value must be at least {2}.'.format(
self._GetOptionFullName(component_full_name), value, self.min))
return value
class FloatDecoder(TypeVerifier):
"""Verifies and decodes a config option value when a float is expected.
Attributes:
max: None or float. If provided, it specifies the maximum accepted value.
min: None or float. If provided, it specifies the minimum accepted value.
"""
def __init__(self, max=None, min=None, **kwargs):
super(FloatDecoder, self).__init__((float, int), **kwargs)
self.max = max
self.min = min
def Decode(self, value, component_full_name, flag_values):
"""Verifies that the provided value is a float.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
float. The valid value.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
value = super(FloatDecoder, self).Decode(value, component_full_name,
flag_values)
if value is not None:
if self.max is not None and value > self.max:
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}". Value must be at most {2}.'.format(
self._GetOptionFullName(component_full_name), value, self.max))
if self.min is not None and value < self.min:
raise errors.Config.InvalidValue(
'Invalid {0} value: "{1}". Value must be at least {2}.'.format(
self._GetOptionFullName(component_full_name), value, self.min))
return value
class StringDecoder(TypeVerifier):
"""Verifies and decodes a config option value when a string is expected."""
def __init__(self, **kwargs):
super(StringDecoder, self).__init__(six.string_types, **kwargs)
class ListDecoder(TypeVerifier):
"""Verifies and decodes a config option value when a list is expected."""
def __init__(self, item_decoder, **kwargs):
"""Initializes a ListDecoder.
Args:
item_decoder: ConfigOptionDecoder. Used to decode the items of an input
list.
**kwargs: Keyword arguments to pass to the base class.
"""
super(ListDecoder, self).__init__((list,), **kwargs)
self._item_decoder = item_decoder
def Decode(self, value, component_full_name, flag_values):
"""Verifies that the provided value is a list with appropriate items.
Args:
value: The value specified in the config.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
None if the input value was None. Otherwise, a list containing the decoded
value of each item in the input list.
Raises:
errors.Config.InvalidValue upon invalid input value.
"""
input_list = super(ListDecoder, self).Decode(value, component_full_name,
flag_values)
if input_list is None:
return None
list_full_name = self._GetOptionFullName(component_full_name)
result = []
for index, input_item in enumerate(input_list):
item_full_name = '{0}[{1}]'.format(list_full_name, index)
result.append(self._item_decoder.Decode(input_item, item_full_name,
flag_values))
return result
class _PerCloudConfigSpec(spec.BaseSpec):
"""Contains one config dict attribute per cloud provider.
The name of each attribute is the name of the cloud provider.
"""
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword arguments
to construct in order to decode the named option.
"""
result = super(_PerCloudConfigSpec, cls)._GetOptionDecoderConstructions()
for cloud in providers.VALID_CLOUDS:
result[cloud] = TypeVerifier, {
'default': None,
'valid_types': (dict,)
}
return result
class PerCloudConfigDecoder(TypeVerifier):
"""Decodes the disk_spec or vm_spec option of a VM group config object."""
def __init__(self, **kwargs):
super(PerCloudConfigDecoder, self).__init__(valid_types=(dict,), **kwargs)
def Decode(self, value, component_full_name, flag_values):
"""Decodes the disk_spec or vm_spec option of a VM group config object.
Args:
value: None or dict mapping cloud provider name string to a dict.
component_full_name: string. Fully qualified name of the configurable
component containing the config option.
flag_values: flags.FlagValues. Runtime flag values to be propagated to
BaseSpec constructors.
Returns:
_PerCloudConfigSpec decoded from the input dict.
"""
input_dict = super(PerCloudConfigDecoder, self).Decode(
value, component_full_name, flag_values)
return None if input_dict is None else _PerCloudConfigSpec(
self._GetOptionFullName(component_full_name),
flag_values=flag_values,
**input_dict)
|
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_MESSAGE,
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN, SERVICE_SEND_MESSAGE
CONF_DEFAULT_ROOM = "default_room"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_DEFAULT_ROOM): cv.string})
def get_service(hass, config, discovery_info=None):
"""Get the Matrix notification service."""
return MatrixNotificationService(config[CONF_DEFAULT_ROOM])
class MatrixNotificationService(BaseNotificationService):
"""Send notifications to a Matrix room."""
def __init__(self, default_room):
"""Set up the Matrix notification service."""
self._default_room = default_room
def send_message(self, message="", **kwargs):
"""Send the message to the Matrix server."""
target_rooms = kwargs.get(ATTR_TARGET) or [self._default_room]
service_data = {ATTR_TARGET: target_rooms, ATTR_MESSAGE: message}
return self.hass.services.call(
DOMAIN, SERVICE_SEND_MESSAGE, service_data=service_data
)
|
from typing import Callable, Dict, Optional
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_FAHRENHEIT,
TIME_MINUTES,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import PairedSensorEntity, ValveControllerEntity
from .const import (
API_SENSOR_PAIRED_SENSOR_STATUS,
API_SYSTEM_DIAGNOSTICS,
API_SYSTEM_ONBOARD_SENSOR_STATUS,
CONF_UID,
DATA_COORDINATOR,
DATA_UNSUB_DISPATCHER_CONNECT,
DOMAIN,
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED,
)
SENSOR_KIND_BATTERY = "battery"
SENSOR_KIND_TEMPERATURE = "temperature"
SENSOR_KIND_UPTIME = "uptime"
SENSOR_ATTRS_MAP = {
SENSOR_KIND_BATTERY: ("Battery", DEVICE_CLASS_BATTERY, None, PERCENTAGE),
SENSOR_KIND_TEMPERATURE: (
"Temperature",
DEVICE_CLASS_TEMPERATURE,
None,
TEMP_FAHRENHEIT,
),
SENSOR_KIND_UPTIME: ("Uptime", None, "mdi:timer", TIME_MINUTES),
}
PAIRED_SENSOR_SENSORS = [SENSOR_KIND_BATTERY, SENSOR_KIND_TEMPERATURE]
VALVE_CONTROLLER_SENSORS = [SENSOR_KIND_TEMPERATURE, SENSOR_KIND_UPTIME]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable
) -> None:
"""Set up Guardian switches based on a config entry."""
@callback
def add_new_paired_sensor(uid: str) -> None:
"""Add a new paired sensor."""
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
API_SENSOR_PAIRED_SENSOR_STATUS
][uid]
entities = []
for kind in PAIRED_SENSOR_SENSORS:
name, device_class, icon, unit = SENSOR_ATTRS_MAP[kind]
entities.append(
PairedSensorSensor(
entry, coordinator, kind, name, device_class, icon, unit
)
)
async_add_entities(entities, True)
# Handle adding paired sensors after HASS startup:
hass.data[DOMAIN][DATA_UNSUB_DISPATCHER_CONNECT][entry.entry_id].append(
async_dispatcher_connect(
hass,
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED.format(entry.data[CONF_UID]),
add_new_paired_sensor,
)
)
sensors = []
# Add all valve controller-specific binary sensors:
for kind in VALVE_CONTROLLER_SENSORS:
name, device_class, icon, unit = SENSOR_ATTRS_MAP[kind]
sensors.append(
ValveControllerSensor(
entry,
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id],
kind,
name,
device_class,
icon,
unit,
)
)
# Add all paired sensor-specific binary sensors:
for coordinator in hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
API_SENSOR_PAIRED_SENSOR_STATUS
].values():
for kind in PAIRED_SENSOR_SENSORS:
name, device_class, icon, unit = SENSOR_ATTRS_MAP[kind]
sensors.append(
PairedSensorSensor(
entry, coordinator, kind, name, device_class, icon, unit
)
)
async_add_entities(sensors)
class PairedSensorSensor(PairedSensorEntity):
"""Define a binary sensor related to a Guardian valve controller."""
def __init__(
self,
entry: ConfigEntry,
coordinator: DataUpdateCoordinator,
kind: str,
name: str,
device_class: Optional[str],
icon: Optional[str],
unit: Optional[str],
) -> None:
"""Initialize."""
super().__init__(entry, coordinator, kind, name, device_class, icon)
self._state = None
self._unit = unit
@property
def available(self) -> bool:
"""Return whether the entity is available."""
return self.coordinator.last_update_success
@property
def state(self) -> str:
"""Return the sensor state."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of this entity, if any."""
return self._unit
@callback
def _async_update_from_latest_data(self) -> None:
"""Update the entity."""
if self._kind == SENSOR_KIND_BATTERY:
self._state = self.coordinator.data["battery"]
elif self._kind == SENSOR_KIND_TEMPERATURE:
self._state = self.coordinator.data["temperature"]
class ValveControllerSensor(ValveControllerEntity):
"""Define a generic Guardian sensor."""
def __init__(
self,
entry: ConfigEntry,
coordinators: Dict[str, DataUpdateCoordinator],
kind: str,
name: str,
device_class: Optional[str],
icon: Optional[str],
unit: Optional[str],
) -> None:
"""Initialize."""
super().__init__(entry, coordinators, kind, name, device_class, icon)
self._state = None
self._unit = unit
@property
def available(self) -> bool:
"""Return whether the entity is available."""
if self._kind == SENSOR_KIND_TEMPERATURE:
return self.coordinators[
API_SYSTEM_ONBOARD_SENSOR_STATUS
].last_update_success
if self._kind == SENSOR_KIND_UPTIME:
return self.coordinators[API_SYSTEM_DIAGNOSTICS].last_update_success
return False
@property
def state(self) -> str:
"""Return the sensor state."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of this entity, if any."""
return self._unit
async def _async_continue_entity_setup(self) -> None:
"""Register API interest (and related tasks) when the entity is added."""
if self._kind == SENSOR_KIND_TEMPERATURE:
self.async_add_coordinator_update_listener(API_SYSTEM_ONBOARD_SENSOR_STATUS)
@callback
def _async_update_from_latest_data(self) -> None:
"""Update the entity."""
if self._kind == SENSOR_KIND_TEMPERATURE:
self._state = self.coordinators[API_SYSTEM_ONBOARD_SENSOR_STATUS].data[
"temperature"
]
elif self._kind == SENSOR_KIND_UPTIME:
self._state = self.coordinators[API_SYSTEM_DIAGNOSTICS].data["uptime"]
|
import logging
import math
from homeassistant.components.fan import (
DOMAIN as FAN_DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
_LOGGER = logging.getLogger(__name__)
SUPPORTED_FEATURES = SUPPORT_SET_SPEED
# Value will first be divided to an integer
VALUE_TO_SPEED = {0: SPEED_OFF, 1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH}
SPEED_TO_VALUE = {SPEED_OFF: 0, SPEED_LOW: 1, SPEED_MEDIUM: 50, SPEED_HIGH: 99}
SPEED_LIST = [*SPEED_TO_VALUE]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Fan from Config Entry."""
@callback
def async_add_fan(values):
"""Add Z-Wave Fan."""
fan = ZwaveFan(values)
async_add_entities([fan])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(hass, f"{DOMAIN}_new_{FAN_DOMAIN}", async_add_fan)
)
class ZwaveFan(ZWaveDeviceEntity, FanEntity):
"""Representation of a Z-Wave fan."""
def __init__(self, values):
"""Initialize the fan."""
super().__init__(values)
self._previous_speed = None
async def async_set_speed(self, speed):
"""Set the speed of the fan."""
if speed not in SPEED_TO_VALUE:
_LOGGER.warning("Invalid speed received: %s", speed)
return
self._previous_speed = speed
self.values.primary.send_value(SPEED_TO_VALUE[speed])
async def async_turn_on(self, speed=None, **kwargs):
"""Turn the device on."""
if speed is None:
# Value 255 tells device to return to previous value
self.values.primary.send_value(255)
else:
await self.async_set_speed(speed)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
self.values.primary.send_value(0)
@property
def is_on(self):
"""Return true if device is on (speed above 0)."""
return self.values.primary.value > 0
@property
def speed(self):
"""Return the current speed.
The Z-Wave speed value is a byte 0-255. 255 means previous value.
The normal range of the speed is 0-99. 0 means off.
"""
value = math.ceil(self.values.primary.value * 3 / 100)
return VALUE_TO_SPEED.get(value, self._previous_speed)
@property
def speed_list(self):
"""Get the list of available speeds."""
return SPEED_LIST
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
|
from homeassistant.components.climate import ATTR_TEMPERATURE
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODES,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_IDLE,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
)
from .common import setup_ozw
async def test_climate(hass, climate_data, sent_messages, climate_msg, caplog):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=climate_data)
# Test multi-setpoint thermostat (node 7 in dump)
# mode is heat, this should be single setpoint
state = hass.states.get("climate.ct32_thermostat_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
]
assert state.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 23.1
assert state.attributes[ATTR_TEMPERATURE] == 21.1
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes[ATTR_FAN_MODE] == "Auto Low"
assert state.attributes[ATTR_FAN_MODES] == ["Auto Low", "On Low"]
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{"entity_id": "climate.ct32_thermostat_mode", "temperature": 26.1},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
# Celsius is converted to Fahrenheit here!
assert round(msg["payload"]["Value"], 2) == 78.98
assert msg["payload"]["ValueIDKey"] == 281475099443218
# Test hvac_mode with set_temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.ct32_thermostat_mode",
"temperature": 24.1,
"hvac_mode": "cool",
},
blocking=True,
)
assert len(sent_messages) == 3 # 2 messages
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
# Celsius is converted to Fahrenheit here!
assert round(msg["payload"]["Value"], 2) == 75.38
assert msg["payload"]["ValueIDKey"] == 281475099443218
# Test set mode
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": "climate.ct32_thermostat_mode", "hvac_mode": HVAC_MODE_HEAT_COOL},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 3, "ValueIDKey": 122683412}
# Test set missing mode
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": "climate.ct32_thermostat_mode", "hvac_mode": "fan_only"},
blocking=True,
)
assert len(sent_messages) == 4
assert "Received an invalid hvac mode: fan_only" in caplog.text
# Test set fan mode
await hass.services.async_call(
"climate",
"set_fan_mode",
{"entity_id": "climate.ct32_thermostat_mode", "fan_mode": "On Low"},
blocking=True,
)
assert len(sent_messages) == 5
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 1, "ValueIDKey": 122748948}
# Test set invalid fan mode
await hass.services.async_call(
"climate",
"set_fan_mode",
{"entity_id": "climate.ct32_thermostat_mode", "fan_mode": "invalid fan mode"},
blocking=True,
)
assert len(sent_messages) == 5
assert "Received an invalid fan mode: invalid fan mode" in caplog.text
# Test incoming mode change to auto,
# resulting in multiple setpoints
receive_message(climate_msg)
await hass.async_block_till_done()
state = hass.states.get("climate.ct32_thermostat_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT_COOL
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes[ATTR_TARGET_TEMP_LOW] == 21.1
assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 25.6
# Test setting high/low temp on multiple setpoints
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.ct32_thermostat_mode",
"target_temp_low": 20,
"target_temp_high": 25,
},
blocking=True,
)
assert len(sent_messages) == 7 # 2 messages !
msg = sent_messages[-2] # low setpoint
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert round(msg["payload"]["Value"], 2) == 68.0
assert msg["payload"]["ValueIDKey"] == 281475099443218
msg = sent_messages[-1] # high setpoint
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert round(msg["payload"]["Value"], 2) == 77.0
assert msg["payload"]["ValueIDKey"] == 562950076153874
# Test basic/single-setpoint thermostat (node 16 in dump)
state = hass.states.get("climate.komforthaus_spirit_z_wave_plus_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
]
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 17.3
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 19
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes[ATTR_PRESET_MODES] == [
"none",
"Heat Eco",
"Full Power",
"Manufacturer Specific",
]
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 8
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475250438162,
}
# Test set preset mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "Heat Eco",
},
blocking=True,
)
assert len(sent_messages) == 9
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 11,
"ValueIDKey": 273678356,
}
# Test set preset mode None
# This preset should set and return to current hvac mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "none",
},
blocking=True,
)
assert len(sent_messages) == 10
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 1,
"ValueIDKey": 273678356,
}
# Test set invalid preset mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "invalid preset mode",
},
blocking=True,
)
assert len(sent_messages) == 10
assert "Received an invalid preset mode: invalid preset mode" in caplog.text
# test thermostat device without a mode commandclass
state = hass.states.get("climate.danfoss_living_connect_z_v1_06_014g0013_heating_1")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_HEAT,
]
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 21
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes.get(ATTR_PRESET_MODE) is None
assert state.attributes.get(ATTR_PRESET_MODES) is None
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.danfoss_living_connect_z_v1_06_014g0013_heating_1",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 11
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475116220434,
}
await hass.services.async_call(
"climate",
"set_hvac_mode",
{
"entity_id": "climate.danfoss_living_connect_z_v1_06_014g0013_heating_1",
"hvac_mode": HVAC_MODE_HEAT,
},
blocking=True,
)
assert len(sent_messages) == 11
assert "does not support setting a mode" in caplog.text
# test thermostat device without a mode commandclass
state = hass.states.get("climate.secure_srt321_zwave_stat_tx_heating_1")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_HEAT,
]
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 29.0
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 16
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes.get(ATTR_PRESET_MODE) is None
assert state.attributes.get(ATTR_PRESET_MODES) is None
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.secure_srt321_zwave_stat_tx_heating_1",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 12
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475267215378,
}
await hass.services.async_call(
"climate",
"set_hvac_mode",
{
"entity_id": "climate.secure_srt321_zwave_stat_tx_heating_1",
"hvac_mode": HVAC_MODE_HEAT,
},
blocking=True,
)
assert len(sent_messages) == 12
assert "does not support setting a mode" in caplog.text
|
import asyncio
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN
WAIT_UNTIL_CHANGE = 3
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the cloud binary sensors."""
if discovery_info is None:
return
cloud = hass.data[DOMAIN]
async_add_entities([CloudRemoteBinary(cloud)])
class CloudRemoteBinary(BinarySensorEntity):
"""Representation of an Cloud Remote UI Connection binary sensor."""
def __init__(self, cloud):
"""Initialize the binary sensor."""
self.cloud = cloud
self._unsub_dispatcher = None
@property
def name(self) -> str:
"""Return the name of the binary sensor, if any."""
return "Remote UI"
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "cloud-remote-ui-connectivity"
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.cloud.remote.is_connected
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_CONNECTIVITY
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.cloud.remote.certificate is not None
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state."""
return False
async def async_added_to_hass(self):
"""Register update dispatcher."""
async def async_state_update(data):
"""Update callback."""
await asyncio.sleep(WAIT_UNTIL_CHANGE)
self.async_write_ha_state()
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, DISPATCHER_REMOTE_UPDATE, async_state_update
)
async def async_will_remove_from_hass(self):
"""Register update dispatcher."""
if self._unsub_dispatcher is not None:
self._unsub_dispatcher()
self._unsub_dispatcher = None
|
import pytest
BINDINGS = {'prompt': {'<Ctrl-a>': 'message-info ctrla',
'a': 'message-info a',
'ba': 'message-info ba',
'ax': 'message-info ax',
'ccc': 'message-info ccc',
'yY': 'yank -s',
'0': 'message-info 0',
'1': 'message-info 1'},
'command': {'foo': 'message-info bar',
'<Ctrl+X>': 'message-info ctrlx'},
'normal': {'a': 'message-info a', 'ba': 'message-info ba'}}
MAPPINGS = {
'x': 'a',
'b': 'a',
}
@pytest.fixture
def keyinput_bindings(config_stub, key_config_stub):
"""Register some test bindings."""
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = dict(BINDINGS)
config_stub.val.bindings.key_mappings = dict(MAPPINGS)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.