text
stringlengths 213
32.3k
|
---|
import pytest
from unittest.mock import Mock
from kombu import Connection, Producer
from kombu import pools
from kombu.connection import ConnectionPool
from kombu.utils.collections import eqhash
class test_ProducerPool:
Pool = pools.ProducerPool
class MyPool(pools.ProducerPool):
def __init__(self, *args, **kwargs):
self.instance = Mock()
pools.ProducerPool.__init__(self, *args, **kwargs)
def Producer(self, connection):
return self.instance
def setup(self):
self.connections = Mock()
self.pool = self.Pool(self.connections, limit=10)
def test_close_resource(self):
self.pool.close_resource(Mock(name='resource'))
def test_releases_connection_when_Producer_raises(self):
self.pool.Producer = Mock()
self.pool.Producer.side_effect = IOError()
acq = self.pool._acquire_connection = Mock()
conn = acq.return_value = Mock()
with pytest.raises(IOError):
self.pool.create_producer()
conn.release.assert_called_with()
def test_prepare_release_connection_on_error(self):
pp = Mock()
p = pp.return_value = Mock()
p.revive.side_effect = IOError()
acq = self.pool._acquire_connection = Mock()
conn = acq.return_value = Mock()
p._channel = None
with pytest.raises(IOError):
self.pool.prepare(pp)
conn.release.assert_called_with()
def test_release_releases_connection(self):
p = Mock()
p.__connection__ = Mock()
self.pool.release(p)
p.__connection__.release.assert_called_with()
p.__connection__ = None
self.pool.release(p)
def test_init(self):
assert self.pool.connections is self.connections
def test_Producer(self):
assert isinstance(self.pool.Producer(Mock()), Producer)
def test_acquire_connection(self):
self.pool._acquire_connection()
self.connections.acquire.assert_called_with(block=True)
def test_new(self):
promise = self.pool.new()
producer = promise()
assert isinstance(producer, Producer)
self.connections.acquire.assert_called_with(block=True)
def test_setup_unlimited(self):
pool = self.Pool(self.connections, limit=None)
pool.setup()
assert not pool._resource.queue
def test_setup(self):
assert len(self.pool._resource.queue) == self.pool.limit
first = self.pool._resource.get_nowait()
producer = first()
assert isinstance(producer, Producer)
def test_prepare(self):
connection = self.connections.acquire.return_value = Mock()
pool = self.MyPool(self.connections, limit=10)
pool.instance._channel = None
first = pool._resource.get_nowait()
producer = pool.prepare(first)
self.connections.acquire.assert_called()
producer.revive.assert_called_with(connection)
def test_prepare_channel_already_created(self):
self.connections.acquire.return_value = Mock()
pool = self.MyPool(self.connections, limit=10)
pool.instance._channel = Mock()
first = pool._resource.get_nowait()
self.connections.acquire.reset()
producer = pool.prepare(first)
producer.revive.assert_not_called()
def test_prepare_not_callable(self):
x = Producer(Mock)
self.pool.prepare(x)
def test_release(self):
p = Mock()
p.channel = Mock()
p.__connection__ = Mock()
self.pool.release(p)
p.__connection__.release.assert_called_with()
assert p.channel is None
class test_PoolGroup:
Group = pools.PoolGroup
class MyGroup(pools.PoolGroup):
def create(self, resource, limit):
return resource, limit
def test_interface_create(self):
g = self.Group()
with pytest.raises(NotImplementedError):
g.create(Mock(), 10)
def test_getitem_using_global_limit(self):
g = self.MyGroup(limit=pools.use_global_limit)
res = g['foo']
assert res == ('foo', pools.get_limit())
def test_getitem_using_custom_limit(self):
g = self.MyGroup(limit=102456)
res = g['foo']
assert res == ('foo', 102456)
def test_delitem(self):
g = self.MyGroup()
g['foo']
del(g['foo'])
assert 'foo' not in g
def test_Connections(self):
conn = Connection('memory://')
p = pools.connections[conn]
assert p
assert isinstance(p, ConnectionPool)
assert p.connection is conn
assert p.limit == pools.get_limit()
def test_Producers(self):
conn = Connection('memory://')
p = pools.producers[conn]
assert p
assert isinstance(p, pools.ProducerPool)
assert p.connections is pools.connections[conn]
assert p.limit == p.connections.limit
assert p.limit == pools.get_limit()
def test_all_groups(self):
conn = Connection('memory://')
pools.connections[conn]
assert list(pools._all_pools())
def test_reset(self):
pools.reset()
class MyGroup(dict):
clear_called = False
def clear(self):
self.clear_called = True
p1 = pools.connections['foo'] = Mock()
g1 = MyGroup()
pools._groups.append(g1)
pools.reset()
p1.force_close_all.assert_called_with()
assert g1.clear_called
p1 = pools.connections['foo'] = Mock()
p1.force_close_all.side_effect = KeyError()
pools.reset()
def test_set_limit(self):
pools.reset()
pools.set_limit(34576)
limit = pools.get_limit()
assert limit == 34576
conn = Connection('memory://')
pool = pools.connections[conn]
with pool.acquire():
pools.set_limit(limit + 1)
assert pools.get_limit() == limit + 1
limit = pools.get_limit()
with pytest.raises(RuntimeError):
pools.set_limit(limit - 1)
pools.set_limit(limit - 1, force=True)
assert pools.get_limit() == limit - 1
pools.set_limit(pools.get_limit())
def test_remove_limit(self):
conn = Connection('memory://')
pool = pools.connections[conn]
pool.limit = 10
with pool.acquire():
pool.limit = 0
class test_fun_PoolGroup:
def test_connections_behavior(self):
c1u = 'memory://localhost:123'
c2u = 'memory://localhost:124'
c1 = Connection(c1u)
c2 = Connection(c2u)
c3 = Connection(c1u)
assert eqhash(c1) != eqhash(c2)
assert eqhash(c1) == eqhash(c3)
c4 = Connection(c1u, transport_options={'confirm_publish': True})
assert eqhash(c3) != eqhash(c4)
p1 = pools.connections[c1]
p2 = pools.connections[c2]
p3 = pools.connections[c3]
assert p1 is not p2
assert p1 is p3
r1 = p1.acquire()
assert p1._dirty
assert p3._dirty
assert not p2._dirty
r1.release()
assert not p1._dirty
assert not p3._dirty
|
import re
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import docker
FLAGS = flags.FLAGS
flags.DEFINE_integer('cloudsuite_data_serving_rec_count',
1000,
'Record count in the database.',
lower_bound=1)
flags.DEFINE_integer('cloudsuite_data_serving_op_count',
1000,
'Operation count to be executed.',
lower_bound=1)
BENCHMARK_NAME = 'cloudsuite_data_serving'
BENCHMARK_CONFIG = """
cloudsuite_data_serving:
description: >
Run YCSB client against Cassandra servers.
Specify record count and operation count with
--cloudsuite_data_serving_rec_count and
--cloudsuite_data_serving_op_count.
vm_groups:
server_seed:
vm_spec: *default_single_core
vm_count: 1
servers:
vm_spec: *default_single_core
vm_count: 1
client:
vm_spec: *default_single_core
vm_count: 1
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Prepare docker containers and set the dataset up.
Install docker. Pull the required images from DockerHub.
Create a table into server-seed and load the dataset.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
server_seed = benchmark_spec.vm_groups['server_seed'][0]
servers = benchmark_spec.vm_groups['servers']
client = benchmark_spec.vm_groups['client'][0]
def PrepareCommon(vm):
if not docker.IsInstalled(vm):
vm.Install('docker')
def PrepareServerSeed(vm):
PrepareCommon(vm)
vm.Install('cloudsuite/data-serving:server')
vm.RemoteCommand('sudo docker run -d --name cassandra-server-seed '
'--net host cloudsuite/data-serving:server')
def PrepareServer(vm):
PrepareCommon(vm)
vm.Install('cloudsuite/data-serving:server')
start_server_cmd = ('sudo docker run -d --name cassandra-server '
'-e CASSANDRA_SEEDS=%s --net host '
'cloudsuite/data-serving:server' %
server_seed.internal_ip)
vm.RemoteCommand(start_server_cmd)
def PrepareClient(vm):
PrepareCommon(vm)
vm.Install('cloudsuite/data-serving:client')
target_arg_tuples = ([(PrepareServerSeed, [server_seed], {})] +
[(PrepareServer, [vm], {}) for vm in servers] +
[(PrepareClient, [client], {})])
vm_util.RunParallelThreads(target_arg_tuples, len(target_arg_tuples))
def Run(benchmark_spec):
"""Run the data_serving benchmark.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
server_seed = benchmark_spec.vm_groups['server_seed'][0]
servers = benchmark_spec.vm_groups['servers']
client = benchmark_spec.vm_groups['client'][0]
results = []
server_ips_arr = []
server_ips_arr.append(server_seed.internal_ip)
for vm in servers:
server_ips_arr.append(vm.internal_ip)
server_ips = ','.join(server_ips_arr)
rec_count_cfg = '-e RECORDCOUNT=%d' % FLAGS.cloudsuite_data_serving_rec_count
op_count_cfg = '-e OPERATIONCOUNT=%d' % FLAGS.cloudsuite_data_serving_op_count
benchmark_cmd = ('sudo docker run %s %s --rm --name cassandra-client'
' --net host cloudsuite/data-serving:client %s' %
(rec_count_cfg, op_count_cfg, server_ips))
stdout, _ = client.RemoteCommand(benchmark_cmd, should_log=True)
def GetResults(match_str, result_label, result_metric):
matches = re.findall(match_str, stdout)
if len(matches) != 1:
raise errors.Benchmarks.RunError('Expected to find result label: %s' %
result_label)
results.append(sample.Sample(result_label, float(matches[0]),
result_metric))
GetResults('\[OVERALL\], RunTime\(ms\), (\d+.?\d*)',
'OVERALL RunTime', 'ms')
GetResults('\[OVERALL\], Throughput\(ops\/sec\), (\d+.?\d*)',
'OVERALL Throughput', 'ops/sec')
GetResults('\[CLEANUP\], Operations, (\d+.?\d*)',
'CLEANUP Operations', 'ops')
GetResults('\[CLEANUP\], AverageLatency\(us\), (\d+.?\d*)',
'CLEANUP AverageLatency', 'us')
GetResults('\[CLEANUP\], MinLatency\(us\), (\d+.?\d*)',
'CLEANUP MinLatency', 'us')
GetResults('\[CLEANUP\], MaxLatency\(us\), (\d+.?\d*)',
'CLEANUP MaxLatency', 'us')
GetResults('\[CLEANUP\], 95thPercentileLatency\(ms\), (\d+.?\d*)',
'CLEANUP 95thPercentileLatency', 'ms')
GetResults('\[CLEANUP\], 99thPercentileLatency\(ms\), (\d+.?\d*)',
'CLEANUP 99thPercentileLatency', 'ms')
GetResults('\[READ\], Operations, (\d+.?\d*)',
'READ Operations', 'ops')
GetResults('\[READ\], AverageLatency\(us\), (\d+.?\d*)',
'READ AverageLatency', 'us')
GetResults('\[READ\], MinLatency\(us\), (\d+.?\d*)',
'READ MinLatency', 'us')
GetResults('\[READ\], MaxLatency\(us\), (\d+.?\d*)',
'READ MaxLatency', 'us')
GetResults('\[READ\], 95thPercentileLatency\(ms\), (\d+.?\d*)',
'READ 95thPercentileLatency', 'ms')
GetResults('\[READ\], 99thPercentileLatency\(ms\), (\d+.?\d*)',
'READ 99thPercentileLatency', 'ms')
GetResults('\[UPDATE\], Operations, (\d+.?\d*)',
'UPDATE Operations', 'us')
GetResults('\[UPDATE\], AverageLatency\(us\), (\d+.?\d*)',
'UPDATE AverageLatency', 'us')
GetResults('\[UPDATE\], MinLatency\(us\), (\d+.?\d*)',
'UPDATE MinLatency', 'us')
GetResults('\[UPDATE\], MaxLatency\(us\), (\d+.?\d*)',
'UPDATE MaxLatency', 'us')
GetResults('\[UPDATE\], 95thPercentileLatency\(ms\), (\d+.?\d*)',
'UPDATE 95thPercentileLatency', 'ms')
GetResults('\[UPDATE\], 99thPercentileLatency\(ms\), (\d+.?\d*)',
'UPDATE 99thPercentileLatency', 'ms')
return results
def Cleanup(benchmark_spec):
"""Stop and remove docker containers. Remove images.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
server_seed = benchmark_spec.vm_groups['server_seed'][0]
servers = benchmark_spec.vm_groups['servers']
def CleanupServerCommon(vm):
vm.RemoteCommand('sudo docker rm cassandra-server')
def CleanupServerSeed(vm):
vm.RemoteCommand('sudo docker stop cassandra-server-seed')
CleanupServerCommon(vm)
def CleanupServer(vm):
vm.RemoteCommand('sudo docker stop cassandra-server')
CleanupServerCommon(vm)
target_arg_tuples = ([(CleanupServerSeed, [server_seed], {})] +
[(CleanupServer, [vm], {}) for vm in servers])
vm_util.RunParallelThreads(target_arg_tuples, len(target_arg_tuples))
|
import datetime
import json
import logging
from urllib.parse import urljoin
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, HTTP_OK, HTTP_UNAUTHORIZED
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = datetime.timedelta(seconds=30)
API_URL = "https://backend.sigfox.com/api/"
CONF_API_LOGIN = "api_login"
CONF_API_PASSWORD = "api_password"
DEFAULT_NAME = "sigfox"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_LOGIN): cv.string,
vol.Required(CONF_API_PASSWORD): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sigfox sensor."""
api_login = config[CONF_API_LOGIN]
api_password = config[CONF_API_PASSWORD]
name = config[CONF_NAME]
try:
sigfox = SigfoxAPI(api_login, api_password)
except ValueError:
return False
auth = sigfox.auth
devices = sigfox.devices
sensors = []
for device in devices:
sensors.append(SigfoxDevice(device, auth, name))
add_entities(sensors, True)
def epoch_to_datetime(epoch_time):
"""Take an ms since epoch and return datetime string."""
return datetime.datetime.fromtimestamp(epoch_time).isoformat()
class SigfoxAPI:
"""Class for interacting with the SigFox API."""
def __init__(self, api_login, api_password):
"""Initialise the API object."""
self._auth = requests.auth.HTTPBasicAuth(api_login, api_password)
if self.check_credentials():
device_types = self.get_device_types()
self._devices = self.get_devices(device_types)
def check_credentials(self):
"""Check API credentials are valid."""
url = urljoin(API_URL, "devicetypes")
response = requests.get(url, auth=self._auth, timeout=10)
if response.status_code != HTTP_OK:
if response.status_code == HTTP_UNAUTHORIZED:
_LOGGER.error("Invalid credentials for Sigfox API")
else:
_LOGGER.error(
"Unable to login to Sigfox API, error code %s",
str(response.status_code),
)
raise ValueError("Sigfox integration not set up")
return True
def get_device_types(self):
"""Get a list of device types."""
url = urljoin(API_URL, "devicetypes")
response = requests.get(url, auth=self._auth, timeout=10)
device_types = []
for device in json.loads(response.text)["data"]:
device_types.append(device["id"])
return device_types
def get_devices(self, device_types):
"""Get the device_id of each device registered."""
devices = []
for unique_type in device_types:
location_url = f"devicetypes/{unique_type}/devices"
url = urljoin(API_URL, location_url)
response = requests.get(url, auth=self._auth, timeout=10)
devices_data = json.loads(response.text)["data"]
for device in devices_data:
devices.append(device["id"])
return devices
@property
def auth(self):
"""Return the API authentication."""
return self._auth
@property
def devices(self):
"""Return the list of device_id."""
return self._devices
class SigfoxDevice(Entity):
"""Class for single sigfox device."""
def __init__(self, device_id, auth, name):
"""Initialise the device object."""
self._device_id = device_id
self._auth = auth
self._message_data = {}
self._name = f"{name}_{device_id}"
self._state = None
def get_last_message(self):
"""Return the last message from a device."""
device_url = f"devices/{self._device_id}/messages?limit=1"
url = urljoin(API_URL, device_url)
response = requests.get(url, auth=self._auth, timeout=10)
data = json.loads(response.text)["data"][0]
payload = bytes.fromhex(data["data"]).decode("utf-8")
lat = data["rinfos"][0]["lat"]
lng = data["rinfos"][0]["lng"]
snr = data["snr"]
epoch_time = data["time"]
return {
"lat": lat,
"lng": lng,
"payload": payload,
"snr": snr,
"time": epoch_to_datetime(epoch_time),
}
def update(self):
"""Fetch the latest device message."""
self._message_data = self.get_last_message()
self._state = self._message_data["payload"]
@property
def name(self):
"""Return the HA name of the sensor."""
return self._name
@property
def state(self):
"""Return the payload of the last message."""
return self._state
@property
def device_state_attributes(self):
"""Return other details about the last message."""
return self._message_data
|
import json
import logging
from pexpect import exceptions, pxssh
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_SSH_PORT = 22
UNIFI_COMMAND = 'mca-dump | tr -d "\n"'
UNIFI_SSID_TABLE = "vap_table"
UNIFI_CLIENT_TABLE = "sta_table"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_SSH_PORT): cv.port,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a Unifi direct scanner."""
scanner = UnifiDeviceScanner(config[DOMAIN])
if not scanner.connected:
return False
return scanner
class UnifiDeviceScanner(DeviceScanner):
"""This class queries Unifi wireless access point."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.port = config[CONF_PORT]
self.ssh = None
self.connected = False
self.last_results = {}
self._connect()
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
result = _response_to_json(self._get_update())
if result:
self.last_results = result
return self.last_results.keys()
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
hostname = next(
(
value.get("hostname")
for key, value in self.last_results.items()
if key.upper() == device.upper()
),
None,
)
if hostname is not None:
hostname = str(hostname)
return hostname
def _connect(self):
"""Connect to the Unifi AP SSH server."""
self.ssh = pxssh.pxssh()
try:
self.ssh.login(
self.host, self.username, password=self.password, port=self.port
)
self.connected = True
except exceptions.EOF:
_LOGGER.error("Connection refused. SSH enabled?")
self._disconnect()
def _disconnect(self):
"""Disconnect the current SSH connection."""
try:
self.ssh.logout()
except Exception: # pylint: disable=broad-except
pass
finally:
self.ssh = None
self.connected = False
def _get_update(self):
try:
if not self.connected:
self._connect()
# If we still aren't connected at this point
# don't try to send anything to the AP.
if not self.connected:
return None
self.ssh.sendline(UNIFI_COMMAND)
self.ssh.prompt()
return self.ssh.before
except pxssh.ExceptionPxssh as err:
_LOGGER.error("Unexpected SSH error: %s", str(err))
self._disconnect()
return None
except (AssertionError, exceptions.EOF) as err:
_LOGGER.error("Connection to AP unavailable: %s", str(err))
self._disconnect()
return None
def _response_to_json(response):
try:
json_response = json.loads(str(response)[31:-1].replace("\\", ""))
_LOGGER.debug(str(json_response))
ssid_table = json_response.get(UNIFI_SSID_TABLE)
active_clients = {}
for ssid in ssid_table:
client_table = ssid.get(UNIFI_CLIENT_TABLE)
for client in client_table:
active_clients[client.get("mac")] = client
return active_clients
except (ValueError, TypeError):
_LOGGER.error("Failed to decode response from AP")
return {}
|
from lemur.plugins.bases import SourcePlugin
class TestSourcePlugin(SourcePlugin):
title = "Test"
slug = "test-source"
description = "Enables testing"
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur.git"
def __init__(self, *args, **kwargs):
super(TestSourcePlugin, self).__init__(*args, **kwargs)
def get_certificates(self):
return
def update_endpoint(self, endpoint, certificate):
return
|
import itest_utils
from behave import then
from behave import when
from marathon import MarathonApp
from paasta_tools.utils import _run
from paasta_tools.utils import remove_ansi_escape_sequences
CONTAINER = {"type": "DOCKER", "docker": {"network": "BRIDGE", "image": "busybox"}}
@when("all zookeepers are unavailable")
def all_zookeepers_unavailable(context):
pass
@when("all mesos masters are unavailable")
def all_mesos_masters_unavailable(context):
pass
@when('an app with id "{app_id}" using high memory is launched')
def run_paasta_metastatus_high_mem(context, app_id):
context.marathon_clients.current[0].create_app(
app_id,
MarathonApp(cmd="/bin/sleep 100000", mem=490, instances=3, container=CONTAINER),
)
@when('an app with id "{app_id}" using high disk is launched')
def run_paasta_metastatus_high_disk(context, app_id):
context.marathon_clients.current[0].create_app(
app_id,
MarathonApp(cmd="/bin/sleep 100000", disk=95, instances=3, container=CONTAINER),
)
@when('an app with id "{app_id}" using high cpu is launched')
def run_paasta_metastatus_high_cpu(context, app_id):
context.marathon_clients.current[0].create_app(
app_id,
MarathonApp(
cmd="/bin/sleep 100000", cpus=9.1, instances=3, container=CONTAINER
),
)
@when('a task belonging to the app with id "{app_id}" is in the task list')
def marathon_task_is_ready(context, app_id):
"""Wait for a task with a matching task name to be ready. time out in 60 seconds """
marathon_tasks_are_ready(context, 1, app_id)
@when('{num:d} tasks belonging to the app with id "{app_id}" are in the task list')
def marathon_tasks_are_ready(context, num, app_id):
"""Wait for the specified number of tasks with matching task names to be ready. time out in 60 seconds """
itest_utils.wait_for_app_to_launch_tasks(
context.marathon_clients.current[0], app_id, num
)
@then(
'paasta_metastatus{flags} exits with return code "{expected_return_code}" and output "{expected_output}"'
)
def check_metastatus_return_code_with_flags(
context, flags, expected_return_code, expected_output
):
# We don't want to invoke the "paasta metastatus" wrapper because by
# default it will check every cluster. This is also the way sensu invokes
# this check.
cmd = "python -m paasta_tools.paasta_metastatus%s" % flags
print("Running cmd %s" % (cmd))
exit_code, output = _run(cmd)
# we don't care about the colouring here, so remove any ansi escape sequences
escaped_output = remove_ansi_escape_sequences(output)
print(f"Got exitcode {exit_code} with output:\n{output}")
print()
assert exit_code == int(expected_return_code)
assert expected_output in escaped_output
@then(
'paasta_metastatus exits with return code "{expected_return_code}" and output "{expected_output}"'
)
def check_metastatus_return_code_no_flags(
context, expected_return_code, expected_output
):
check_metastatus_return_code_with_flags(
context=context,
flags="",
expected_return_code=expected_return_code,
expected_output=expected_output,
)
|
from . import nodes
from .compiler import CodeGenerator
class TrackingCodeGenerator(CodeGenerator):
"""We abuse the code generator for introspection."""
def __init__(self, environment):
CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
self.undeclared_identifiers = set()
def write(self, x):
"""Don't write."""
def enter_frame(self, frame):
"""Remember all undeclared identifiers."""
CodeGenerator.enter_frame(self, frame)
for _, (action, param) in frame.symbols.loads.items():
if action == "resolve" and param not in self.environment.globals:
self.undeclared_identifiers.add(param)
def find_undeclared_variables(ast):
"""Returns a set of all variables in the AST that will be looked up from
the context at runtime. Because at compile time it's not known which
variables will be used depending on the path the execution takes at
runtime, all variables are returned.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
>>> meta.find_undeclared_variables(ast) == {'bar'}
True
.. admonition:: Implementation
Internally the code generator is used for finding undeclared variables.
This is good to know because the code generator might raise a
:exc:`TemplateAssertionError` during compilation and as a matter of
fact this function can currently raise that exception as well.
"""
codegen = TrackingCodeGenerator(ast.environment)
codegen.visit(ast)
return codegen.undeclared_identifiers
def find_referenced_templates(ast):
"""Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast))
['layout.html', None]
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed.
"""
for node in ast.find_all(
(nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
):
if not isinstance(node.template, nodes.Const):
# a tuple with some non consts in there
if isinstance(node.template, (nodes.Tuple, nodes.List)):
for template_name in node.template.items:
# something const, only yield the strings and ignore
# non-string consts that really just make no sense
if isinstance(template_name, nodes.Const):
if isinstance(template_name.value, str):
yield template_name.value
# something dynamic in there
else:
yield None
# something dynamic we don't know about here
else:
yield None
continue
# constant is a basestring, direct template name
if isinstance(node.template.value, str):
yield node.template.value
# a tuple or list (latter *should* not happen) made of consts,
# yield the consts that are strings. We could warn here for
# non string values
elif isinstance(node, nodes.Include) and isinstance(
node.template.value, (tuple, list)
):
for template_name in node.template.value:
if isinstance(template_name, str):
yield template_name
# something else we don't care about, we could warn here
else:
yield None
|
from test import CollectorTestCase
from test import get_collector_config
from snmp import SNMPCollector
class TestSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('SNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = SNMPCollector(config, None)
def test_import(self):
self.assertTrue(SNMPCollector)
|
import mock
from paasta_tools import marathon_dashboard
from paasta_tools.marathon_tools import MarathonClients
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.utils import SystemPaastaConfig
@mock.patch("paasta_tools.marathon_dashboard.load_system_paasta_config", autospec=True)
def test_main(mock_load_system_paasta_config):
soa_dir = "/fake/soa/dir"
cluster = "fake_cluster"
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{}, "fake_directory"
)
with mock.patch(
"paasta_tools.marathon_dashboard.create_marathon_dashboard",
autospec=True,
return_value={},
) as create_marathon_dashboard:
marathon_dashboard.main(("--soa-dir", soa_dir, "--cluster", cluster))
create_marathon_dashboard.assert_called_once_with(
cluster=cluster, soa_dir=soa_dir
)
@mock.patch("paasta_tools.marathon_dashboard.load_system_paasta_config", autospec=True)
@mock.patch("paasta_tools.marathon_dashboard.PaastaServiceConfigLoader", autospec=True)
@mock.patch("paasta_tools.marathon_dashboard.get_services_for_cluster", autospec=True)
def test_create_marathon_dashboard(
mock_get_services_for_cluster, mock_pscl, mock_load_system_paasta_config
):
soa_dir = "/fake/soa/dir"
cluster = "fake_cluster"
mock_load_system_paasta_config.return_value = SystemPaastaConfig(
{"dashboard_links": {}}, "fake_directory"
)
mock_get_services_for_cluster.return_value = [
("fake_service", "foo"),
("fake_service", "bar"),
]
mock_pscl.return_value.instance_configs.return_value = [
MarathonServiceConfig("fake_service", "fake_cluster", "foo", {}, {}, soa_dir),
MarathonServiceConfig("fake_service", "fake_cluster", "bar", {}, {}, soa_dir),
]
mock_client = mock.Mock(servers=["hi"])
mock_clients = MarathonClients(current=[mock_client], previous=[mock_client])
expected_output = {
"fake_cluster": [
{"service": "fake_service", "instance": "foo", "shard_url": "hi"},
{"service": "fake_service", "instance": "bar", "shard_url": "hi"},
]
}
assert (
marathon_dashboard.create_marathon_dashboard(
cluster=cluster, soa_dir=soa_dir, marathon_clients=mock_clients
)
== expected_output
)
|
from typing import Any
from homeassistant.components.scene import Scene
from .const import DATA_BROKERS, DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add switches for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
async_add_entities([SmartThingsScene(scene) for scene in broker.scenes.values()])
class SmartThingsScene(Scene):
"""Define a SmartThings scene."""
def __init__(self, scene):
"""Init the scene class."""
self._scene = scene
async def async_activate(self, **kwargs: Any) -> None:
"""Activate scene."""
await self._scene.execute()
@property
def device_state_attributes(self):
"""Get attributes about the state."""
return {
"icon": self._scene.icon,
"color": self._scene.color,
"location_id": self._scene.location_id,
}
@property
def name(self) -> str:
"""Return the name of the device."""
return self._scene.name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._scene.scene_id
|
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Fan states."""
hass.states.async_set("fan.entity_off", "off", {})
hass.states.async_set("fan.entity_on", "on", {})
hass.states.async_set("fan.entity_speed", "on", {"speed": "high"})
hass.states.async_set("fan.entity_oscillating", "on", {"oscillating": True})
hass.states.async_set("fan.entity_direction", "on", {"direction": "forward"})
turn_on_calls = async_mock_service(hass, "fan", "turn_on")
turn_off_calls = async_mock_service(hass, "fan", "turn_off")
set_direction_calls = async_mock_service(hass, "fan", "set_direction")
oscillate_calls = async_mock_service(hass, "fan", "oscillate")
set_speed_calls = async_mock_service(hass, "fan", "set_speed")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("fan.entity_off", "off"),
State("fan.entity_on", "on"),
State("fan.entity_speed", "on", {"speed": "high"}),
State("fan.entity_oscillating", "on", {"oscillating": True}),
State("fan.entity_direction", "on", {"direction": "forward"}),
],
)
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(set_direction_calls) == 0
assert len(oscillate_calls) == 0
assert len(set_speed_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("fan.entity_off", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(set_direction_calls) == 0
assert len(oscillate_calls) == 0
assert len(set_speed_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("fan.entity_on", "off"),
State("fan.entity_off", "on"),
State("fan.entity_speed", "on", {"speed": "low"}),
State("fan.entity_oscillating", "on", {"oscillating": False}),
State("fan.entity_direction", "on", {"direction": "reverse"}),
# Should not raise
State("fan.non_existing", "on"),
],
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "fan"
assert turn_on_calls[0].data == {"entity_id": "fan.entity_off"}
assert len(set_direction_calls) == 1
assert set_direction_calls[0].domain == "fan"
assert set_direction_calls[0].data == {
"entity_id": "fan.entity_direction",
"direction": "reverse",
}
assert len(oscillate_calls) == 1
assert oscillate_calls[0].domain == "fan"
assert oscillate_calls[0].data == {
"entity_id": "fan.entity_oscillating",
"oscillating": False,
}
assert len(set_speed_calls) == 1
assert set_speed_calls[0].domain == "fan"
assert set_speed_calls[0].data == {"entity_id": "fan.entity_speed", "speed": "low"}
assert len(turn_off_calls) == 1
assert turn_off_calls[0].domain == "fan"
assert turn_off_calls[0].data == {"entity_id": "fan.entity_on"}
|
from collections import namedtuple
import datetime
import logging
from hangups import user, hangouts_pb2
logger = logging.getLogger(__name__)
##############################################################################
# Message parsing utils
##############################################################################
def from_timestamp(microsecond_timestamp):
"""Convert a microsecond timestamp to a UTC datetime instance."""
# Create datetime without losing precision from floating point (yes, this
# is actually needed):
return datetime.datetime.fromtimestamp(
microsecond_timestamp // 1000000, datetime.timezone.utc
).replace(microsecond=(microsecond_timestamp % 1000000))
def to_timestamp(datetime_timestamp):
"""Convert UTC datetime to microsecond timestamp used by Hangouts."""
return int(datetime_timestamp.timestamp() * 1000000)
def from_participantid(participant_id):
"""Convert hangouts_pb2.ParticipantId to UserID."""
return user.UserID(
chat_id=participant_id.chat_id,
gaia_id=participant_id.gaia_id
)
def to_participantid(user_id):
"""Convert UserID to hangouts_pb2.ParticipantId."""
return hangouts_pb2.ParticipantId(
chat_id=user_id.chat_id,
gaia_id=user_id.gaia_id
)
##############################################################################
# Message types and parsers
##############################################################################
TypingStatusMessage = namedtuple(
'TypingStatusMessage', ['conv_id', 'user_id', 'timestamp', 'status']
)
"""
A notification about a user's typing status in a conversation.
Args:
conv_id (str): ID of the conversation.
user_id (hangups.user.UserID): ID of the affected user.
timestamp (datetime.datetime): When the notification was generated.
status: The new status; one of ``TYPING_TYPE_STARTED``,
``TYPING_TYPE_PAUSED``, or ``TYPING_TYPE_STOPPED``.
"""
def parse_typing_status_message(p):
"""Return TypingStatusMessage from hangouts_pb2.SetTypingNotification.
The same status may be sent multiple times consecutively, and when a
message is sent the typing status will not change to stopped.
"""
return TypingStatusMessage(
conv_id=p.conversation_id.id,
user_id=from_participantid(p.sender_id),
timestamp=from_timestamp(p.timestamp),
status=p.type,
)
WatermarkNotification = namedtuple(
'WatermarkNotification', ['conv_id', 'user_id', 'read_timestamp']
)
"""A notification about a user's watermark (read timestamp).
Args:
conv_id (str): ID of the conversation.
user_id (hangups.user.UserID): ID of the affected user.
read_timestamp (datetime.datetime): The new watermark.
"""
def parse_watermark_notification(p):
"""Return WatermarkNotification from hangouts_pb2.WatermarkNotification."""
return WatermarkNotification(
conv_id=p.conversation_id.id,
user_id=from_participantid(p.sender_id),
read_timestamp=from_timestamp(
p.latest_read_timestamp
),
)
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
DOMAIN as LIGHT_DOMAIN,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SIGNAL_ADD_ENTITIES
from .insteon_entity import InsteonEntity
from .utils import async_add_insteon_entities
MAX_BRIGHTNESS = 255
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Insteon lights from a config entry."""
def add_entities(discovery_info=None):
"""Add the Insteon entities for the platform."""
async_add_insteon_entities(
hass, LIGHT_DOMAIN, InsteonDimmerEntity, async_add_entities, discovery_info
)
signal = f"{SIGNAL_ADD_ENTITIES}_{LIGHT_DOMAIN}"
async_dispatcher_connect(hass, signal, add_entities)
add_entities()
class InsteonDimmerEntity(InsteonEntity, LightEntity):
"""A Class for an Insteon light entity."""
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._insteon_device_group.value
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return bool(self.brightness)
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
async def async_turn_on(self, **kwargs):
"""Turn light on."""
if ATTR_BRIGHTNESS in kwargs:
brightness = int(kwargs[ATTR_BRIGHTNESS])
await self._insteon_device.async_on(
on_level=brightness, group=self._insteon_device_group.group
)
else:
await self._insteon_device.async_on(group=self._insteon_device_group.group)
async def async_turn_off(self, **kwargs):
"""Turn light off."""
await self._insteon_device.async_off(self._insteon_device_group.group)
|
import urwid
import os
class ExampleTreeWidget(urwid.TreeWidget):
""" Display widget for leaf nodes """
def get_display_text(self):
return self.get_node().get_value()['name']
class ExampleNode(urwid.TreeNode):
""" Data storage object for leaf nodes """
def load_widget(self):
return ExampleTreeWidget(self)
class ExampleParentNode(urwid.ParentNode):
""" Data storage object for interior/parent nodes """
def load_widget(self):
return ExampleTreeWidget(self)
def load_child_keys(self):
data = self.get_value()
return range(len(data['children']))
def load_child_node(self, key):
"""Return either an ExampleNode or ExampleParentNode"""
childdata = self.get_value()['children'][key]
childdepth = self.get_depth() + 1
if 'children' in childdata:
childclass = ExampleParentNode
else:
childclass = ExampleNode
return childclass(childdata, parent=self, key=key, depth=childdepth)
class ExampleTreeBrowser:
palette = [
('body', 'black', 'light gray'),
('focus', 'light gray', 'dark blue', 'standout'),
('head', 'yellow', 'black', 'standout'),
('foot', 'light gray', 'black'),
('key', 'light cyan', 'black','underline'),
('title', 'white', 'black', 'bold'),
('flag', 'dark gray', 'light gray'),
('error', 'dark red', 'light gray'),
]
footer_text = [
('title', "Example Data Browser"), " ",
('key', "UP"), ",", ('key', "DOWN"), ",",
('key', "PAGE UP"), ",", ('key', "PAGE DOWN"),
" ",
('key', "+"), ",",
('key', "-"), " ",
('key', "LEFT"), " ",
('key', "HOME"), " ",
('key', "END"), " ",
('key', "Q"),
]
def __init__(self, data=None):
self.topnode = ExampleParentNode(data)
self.listbox = urwid.TreeListBox(urwid.TreeWalker(self.topnode))
self.listbox.offset_rows = 1
self.header = urwid.Text( "" )
self.footer = urwid.AttrWrap( urwid.Text( self.footer_text ),
'foot')
self.view = urwid.Frame(
urwid.AttrWrap( self.listbox, 'body' ),
header=urwid.AttrWrap(self.header, 'head' ),
footer=self.footer )
def main(self):
"""Run the program."""
self.loop = urwid.MainLoop(self.view, self.palette,
unhandled_input=self.unhandled_input)
self.loop.run()
def unhandled_input(self, k):
if k in ('q','Q'):
raise urwid.ExitMainLoop()
def get_example_tree():
""" generate a quick 100 leaf tree for demo purposes """
retval = {"name":"parent","children":[]}
for i in range(10):
retval['children'].append({"name":"child " + str(i)})
retval['children'][i]['children']=[]
for j in range(10):
retval['children'][i]['children'].append({"name":"grandchild " +
str(i) + "." + str(j)})
return retval
def main():
sample = get_example_tree()
ExampleTreeBrowser(sample).main()
if __name__=="__main__":
main()
|
import enum
from django.conf import settings
from django.db import models
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
postgresql_engine_names = [
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
]
if settings.DATABASES['default']['ENGINE'] in postgresql_engine_names:
from django.contrib.postgres.fields import JSONField as _JSONField
else:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': dict})
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['default']
return name, path, args, kwargs
class ChoiceEnumMeta(enum.EnumMeta):
def __call__(cls, value, *args, **kwargs):
if isinstance(value, str):
try:
value = cls.__members__[value]
except KeyError:
pass # let the super method complain
return super().__call__(value, *args, **kwargs)
def __new__(metacls, classname, bases, classdict):
labels = {}
for key in classdict._member_names:
source_value = classdict[key]
if isinstance(source_value, (list, tuple)):
try:
val, labels[key] = source_value
except ValueError:
raise ValueError("Invalid ChoiceEnum member '{}'".format(key))
else:
val = source_value
labels[key] = key.replace("_", " ").title()
# Use dict.__setitem__() to suppress defenses against
# double assignment in enum's classdict
dict.__setitem__(classdict, key, val)
cls = super().__new__(metacls, classname, bases, classdict)
for key, label in labels.items():
getattr(cls, key).label = label
return cls
@property
def choices(cls):
return [(k.value, k.label) for k in cls]
@property
def default(cls):
try:
return next(iter(cls))
except StopIteration:
return None
class ChoiceEnum(enum.Enum, metaclass=ChoiceEnumMeta):
"""
Utility class to handle choices in Django model and/or form fields.
Usage:
class Color(ChoiceEnum):
WHITE = 0, "White"
RED = 1, "Red"
GREEN = 2, "Green"
BLUE = 3, "Blue"
green = Color.GREEN
color = forms.ChoiceField(
choices=Color.choices,
default=Color.default,
)
"""
def __str__(self):
return force_str(self.label)
class ChoiceEnumField(models.PositiveSmallIntegerField):
description = _("Customer recognition state")
def __init__(self, *args, **kwargs):
self.enum_type = kwargs.pop('enum_type', ChoiceEnum) # fallback is required form migrations
if not issubclass(self.enum_type, ChoiceEnum):
raise ValueError("enum_type must be a subclass of `ChoiceEnum`.")
kwargs.update(choices=self.enum_type.choices)
kwargs.setdefault('default', self.enum_type.default)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if 'choices' in kwargs:
del kwargs['choices']
if kwargs['default'] is self.enum_type.default:
del kwargs['default']
elif isinstance(kwargs['default'], self.enum_type):
kwargs['default'] = kwargs['default'].value
return name, path, args, kwargs
def from_db_value(self, value, expression, connection):
try:
return self.enum_type(value)
except ValueError:
return value
def get_prep_value(self, state):
if isinstance(state, self.enum_type):
return state.value
return state
def to_python(self, state):
return self.enum_type(state)
def value_to_string(self, obj):
value = getattr(obj, self.name, obj)
if not isinstance(value, self.enum_type):
raise ValueError("Value must be of type {}".format(self.enum_type))
return value.name
|
from datetime import timedelta
import logging
import vasttrafik
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util.dt import now
_LOGGER = logging.getLogger(__name__)
ATTR_ACCESSIBILITY = "accessibility"
ATTR_DIRECTION = "direction"
ATTR_LINE = "line"
ATTR_TRACK = "track"
ATTRIBUTION = "Data provided by Västtrafik"
CONF_DELAY = "delay"
CONF_DEPARTURES = "departures"
CONF_FROM = "from"
CONF_HEADING = "heading"
CONF_LINES = "lines"
CONF_KEY = "key"
CONF_SECRET = "secret"
DEFAULT_DELAY = 0
ICON = "mdi:train"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_KEY): cv.string,
vol.Required(CONF_SECRET): cv.string,
vol.Optional(CONF_DEPARTURES): [
{
vol.Required(CONF_FROM): cv.string,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): cv.positive_int,
vol.Optional(CONF_HEADING): cv.string,
vol.Optional(CONF_LINES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NAME): cv.string,
}
],
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the departure sensor."""
planner = vasttrafik.JournyPlanner(config.get(CONF_KEY), config.get(CONF_SECRET))
sensors = []
for departure in config.get(CONF_DEPARTURES):
sensors.append(
VasttrafikDepartureSensor(
planner,
departure.get(CONF_NAME),
departure.get(CONF_FROM),
departure.get(CONF_HEADING),
departure.get(CONF_LINES),
departure.get(CONF_DELAY),
)
)
add_entities(sensors, True)
class VasttrafikDepartureSensor(Entity):
"""Implementation of a Vasttrafik Departure Sensor."""
def __init__(self, planner, name, departure, heading, lines, delay):
"""Initialize the sensor."""
self._planner = planner
self._name = name or departure
self._departure = planner.location_name(departure)[0]
self._heading = planner.location_name(heading)[0] if heading else None
self._lines = lines if lines else None
self._delay = timedelta(minutes=delay)
self._departureboard = None
self._state = None
self._attributes = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def state(self):
"""Return the next departure time."""
return self._state
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the departure board."""
try:
self._departureboard = self._planner.departureboard(
self._departure["id"],
direction=self._heading["id"] if self._heading else None,
date=now() + self._delay,
)
except vasttrafik.Error:
_LOGGER.debug("Unable to read departure board, updating token")
self._planner.update_token()
if not self._departureboard:
_LOGGER.debug(
"No departures from %s heading %s",
self._departure["name"],
self._heading["name"] if self._heading else "ANY",
)
self._state = None
self._attributes = {}
else:
for departure in self._departureboard:
line = departure.get("sname")
if not self._lines or line in self._lines:
if "rtTime" in self._departureboard[0]:
self._state = self._departureboard[0]["rtTime"]
else:
self._state = self._departureboard[0]["time"]
params = {
ATTR_ACCESSIBILITY: departure.get("accessibility"),
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_DIRECTION: departure.get("direction"),
ATTR_LINE: departure.get("sname"),
ATTR_TRACK: departure.get("track"),
}
self._attributes = {k: v for k, v in params.items() if v}
break
|
from typing import Any, Callable, Optional, Sequence, cast
import voluptuous as vol
from homeassistant.components import switch
from homeassistant.components.light import PLATFORM_SCHEMA, LightEntity
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_ENTITY_ID,
CONF_NAME,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.core import CALLBACK_TYPE, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.typing import (
ConfigType,
DiscoveryInfoType,
HomeAssistantType,
)
# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs
DEFAULT_NAME = "Light Switch"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_ENTITY_ID): cv.entity_domain(switch.DOMAIN),
}
)
async def async_setup_platform(
hass: HomeAssistantType,
config: ConfigType,
async_add_entities: Callable[[Sequence[Entity], bool], None],
discovery_info: Optional[DiscoveryInfoType] = None,
) -> None:
"""Initialize Light Switch platform."""
registry = await hass.helpers.entity_registry.async_get_registry()
wrapped_switch = registry.async_get(config[CONF_ENTITY_ID])
unique_id = wrapped_switch.unique_id if wrapped_switch else None
async_add_entities(
[
LightSwitch(
cast(str, config.get(CONF_NAME)),
config[CONF_ENTITY_ID],
unique_id,
)
],
True,
)
class LightSwitch(LightEntity):
"""Represents a Switch as a Light."""
def __init__(self, name: str, switch_entity_id: str, unique_id: str) -> None:
"""Initialize Light Switch."""
self._name = name
self._switch_entity_id = switch_entity_id
self._unique_id = unique_id
self._is_on = False
self._available = False
self._async_unsub_state_changed: Optional[CALLBACK_TYPE] = None
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def is_on(self) -> bool:
"""Return true if light switch is on."""
return self._is_on
@property
def available(self) -> bool:
"""Return true if light switch is on."""
return self._available
@property
def should_poll(self) -> bool:
"""No polling needed for a light switch."""
return False
@property
def unique_id(self):
"""Return the unique id of the light switch."""
return self._unique_id
async def async_turn_on(self, **kwargs):
"""Forward the turn_on command to the switch in this light switch."""
data = {ATTR_ENTITY_ID: self._switch_entity_id}
await self.hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_ON,
data,
blocking=True,
context=self._context,
)
async def async_turn_off(self, **kwargs):
"""Forward the turn_off command to the switch in this light switch."""
data = {ATTR_ENTITY_ID: self._switch_entity_id}
await self.hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_OFF,
data,
blocking=True,
context=self._context,
)
async def async_update(self):
"""Query the switch in this light switch and determine the state."""
switch_state = self.hass.states.get(self._switch_entity_id)
if switch_state is None:
self._available = False
return
self._is_on = switch_state.state == STATE_ON
self._available = switch_state.state != STATE_UNAVAILABLE
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
@callback
def async_state_changed_listener(*_: Any) -> None:
"""Handle child updates."""
self.async_schedule_update_ha_state(True)
assert self.hass is not None
self._async_unsub_state_changed = async_track_state_change_event(
self.hass, [self._switch_entity_id], async_state_changed_listener
)
async def async_will_remove_from_hass(self):
"""Handle removal from Home Assistant."""
if self._async_unsub_state_changed is not None:
self._async_unsub_state_changed()
self._async_unsub_state_changed = None
self._available = False
|
import filelock
import numpy as np
import os
from chainer.dataset import download
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv import utils
root = 'pfnet/chainercv/cub'
url = 'http://www.vision.caltech.edu/visipedia-data/CUB-200-2011/'\
'CUB_200_2011.tgz'
prob_map_url = 'http://www.vision.caltech.edu/visipedia-data/'\
'CUB-200-2011/segmentations.tgz'
def get_cub():
# To support ChainerMN, the target directory should be locked.
with filelock.FileLock(os.path.join(download.get_dataset_directory(
'pfnet/chainercv/.lock'), 'cub.lock')):
data_root = download.get_dataset_directory(root)
base_path = os.path.join(data_root, 'CUB_200_2011')
if os.path.exists(base_path):
# skip downloading
return base_path
download_file_path = utils.cached_download(url)
ext = os.path.splitext(url)[1]
utils.extractall(download_file_path, data_root, ext)
return base_path
def get_cub_prob_map():
# To support ChainerMN, the target directory should be locked.
with filelock.FileLock(os.path.join(download.get_dataset_directory(
'pfnet/chainercv/.lock'), 'cub.lock')):
data_root = download.get_dataset_directory(root)
base_path = os.path.join(data_root, 'segmentations')
if os.path.exists(base_path):
# skip downloading
return base_path
prob_map_download_file_path = utils.cached_download(prob_map_url)
prob_map_ext = os.path.splitext(prob_map_url)[1]
utils.extractall(
prob_map_download_file_path, data_root, prob_map_ext)
return base_path
class CUBDatasetBase(GetterDataset):
"""Base class for CUB dataset.
"""
def __init__(self, data_dir='auto', prob_map_dir='auto'):
super(CUBDatasetBase, self).__init__()
if data_dir == 'auto':
data_dir = get_cub()
if prob_map_dir == 'auto':
prob_map_dir = get_cub_prob_map()
self.data_dir = data_dir
self.prob_map_dir = prob_map_dir
imgs_file = os.path.join(data_dir, 'images.txt')
bbs_file = os.path.join(data_dir, 'bounding_boxes.txt')
self.paths = [
line.strip().split()[1] for line in open(imgs_file)]
# (x_min, y_min, width, height)
bbs = np.array([
tuple(map(float, line.split()[1:5]))
for line in open(bbs_file)])
# (x_min, y_min, width, height) -> (x_min, y_min, x_max, y_max)
bbs[:, 2:] += bbs[:, :2]
# (x_min, y_min, width, height) -> (y_min, x_min, y_max, x_max)
bbs[:] = bbs[:, [1, 0, 3, 2]]
self.bbs = bbs.astype(np.float32)
self.prob_map_paths = [
os.path.join(self.prob_map_dir, os.path.splitext(path)[0] + '.png')
for path in self.paths]
self.add_getter('bbox', self._get_bbox)
self.add_getter('prob_map', self._get_prob_map)
def __len__(self):
return len(self.paths)
def _get_bbox(self, i):
return self.bbs[i][None]
def _get_prob_map(self, i):
prob_map = utils.read_label(self.prob_map_paths[i], dtype=np.uint8)
prob_map = prob_map.astype(np.float32) / 255 # [0, 255] -> [0, 1]
return prob_map
cub_label_names = (
'Black_footed_Albatross',
'Laysan_Albatross',
'Sooty_Albatross',
'Groove_billed_Ani',
'Crested_Auklet',
'Least_Auklet',
'Parakeet_Auklet',
'Rhinoceros_Auklet',
'Brewer_Blackbird',
'Red_winged_Blackbird',
'Rusty_Blackbird',
'Yellow_headed_Blackbird',
'Bobolink',
'Indigo_Bunting',
'Lazuli_Bunting',
'Painted_Bunting',
'Cardinal',
'Spotted_Catbird',
'Gray_Catbird',
'Yellow_breasted_Chat',
'Eastern_Towhee',
'Chuck_will_Widow',
'Brandt_Cormorant',
'Red_faced_Cormorant',
'Pelagic_Cormorant',
'Bronzed_Cowbird',
'Shiny_Cowbird',
'Brown_Creeper',
'American_Crow',
'Fish_Crow',
'Black_billed_Cuckoo',
'Mangrove_Cuckoo',
'Yellow_billed_Cuckoo',
'Gray_crowned_Rosy_Finch',
'Purple_Finch',
'Northern_Flicker',
'Acadian_Flycatcher',
'Great_Crested_Flycatcher',
'Least_Flycatcher',
'Olive_sided_Flycatcher',
'Scissor_tailed_Flycatcher',
'Vermilion_Flycatcher',
'Yellow_bellied_Flycatcher',
'Frigatebird',
'Northern_Fulmar',
'Gadwall',
'American_Goldfinch',
'European_Goldfinch',
'Boat_tailed_Grackle',
'Eared_Grebe',
'Horned_Grebe',
'Pied_billed_Grebe',
'Western_Grebe',
'Blue_Grosbeak',
'Evening_Grosbeak',
'Pine_Grosbeak',
'Rose_breasted_Grosbeak',
'Pigeon_Guillemot',
'California_Gull',
'Glaucous_winged_Gull',
'Heermann_Gull',
'Herring_Gull',
'Ivory_Gull',
'Ring_billed_Gull',
'Slaty_backed_Gull',
'Western_Gull',
'Anna_Hummingbird',
'Ruby_throated_Hummingbird',
'Rufous_Hummingbird',
'Green_Violetear',
'Long_tailed_Jaeger',
'Pomarine_Jaeger',
'Blue_Jay',
'Florida_Jay',
'Green_Jay',
'Dark_eyed_Junco',
'Tropical_Kingbird',
'Gray_Kingbird',
'Belted_Kingfisher',
'Green_Kingfisher',
'Pied_Kingfisher',
'Ringed_Kingfisher',
'White_breasted_Kingfisher',
'Red_legged_Kittiwake',
'Horned_Lark',
'Pacific_Loon',
'Mallard',
'Western_Meadowlark',
'Hooded_Merganser',
'Red_breasted_Merganser',
'Mockingbird',
'Nighthawk',
'Clark_Nutcracker',
'White_breasted_Nuthatch',
'Baltimore_Oriole',
'Hooded_Oriole',
'Orchard_Oriole',
'Scott_Oriole',
'Ovenbird',
'Brown_Pelican',
'White_Pelican',
'Western_Wood_Pewee',
'Sayornis',
'American_Pipit',
'Whip_poor_Will',
'Horned_Puffin',
'Common_Raven',
'White_necked_Raven',
'American_Redstart',
'Geococcyx',
'Loggerhead_Shrike',
'Great_Grey_Shrike',
'Baird_Sparrow',
'Black_throated_Sparrow',
'Brewer_Sparrow',
'Chipping_Sparrow',
'Clay_colored_Sparrow',
'House_Sparrow',
'Field_Sparrow',
'Fox_Sparrow',
'Grasshopper_Sparrow',
'Harris_Sparrow',
'Henslow_Sparrow',
'Le_Conte_Sparrow',
'Lincoln_Sparrow',
'Nelson_Sharp_tailed_Sparrow',
'Savannah_Sparrow',
'Seaside_Sparrow',
'Song_Sparrow',
'Tree_Sparrow',
'Vesper_Sparrow',
'White_crowned_Sparrow',
'White_throated_Sparrow',
'Cape_Glossy_Starling',
'Bank_Swallow',
'Barn_Swallow',
'Cliff_Swallow',
'Tree_Swallow',
'Scarlet_Tanager',
'Summer_Tanager',
'Artic_Tern',
'Black_Tern',
'Caspian_Tern',
'Common_Tern',
'Elegant_Tern',
'Forsters_Tern',
'Least_Tern',
'Green_tailed_Towhee',
'Brown_Thrasher',
'Sage_Thrasher',
'Black_capped_Vireo',
'Blue_headed_Vireo',
'Philadelphia_Vireo',
'Red_eyed_Vireo',
'Warbling_Vireo',
'White_eyed_Vireo',
'Yellow_throated_Vireo',
'Bay_breasted_Warbler',
'Black_and_white_Warbler',
'Black_throated_Blue_Warbler',
'Blue_winged_Warbler',
'Canada_Warbler',
'Cape_May_Warbler',
'Cerulean_Warbler',
'Chestnut_sided_Warbler',
'Golden_winged_Warbler',
'Hooded_Warbler',
'Kentucky_Warbler',
'Magnolia_Warbler',
'Mourning_Warbler',
'Myrtle_Warbler',
'Nashville_Warbler',
'Orange_crowned_Warbler',
'Palm_Warbler',
'Pine_Warbler',
'Prairie_Warbler',
'Prothonotary_Warbler',
'Swainson_Warbler',
'Tennessee_Warbler',
'Wilson_Warbler',
'Worm_eating_Warbler',
'Yellow_Warbler',
'Northern_Waterthrush',
'Louisiana_Waterthrush',
'Bohemian_Waxwing',
'Cedar_Waxwing',
'American_Three_toed_Woodpecker',
'Pileated_Woodpecker',
'Red_bellied_Woodpecker',
'Red_cockaded_Woodpecker',
'Red_headed_Woodpecker',
'Downy_Woodpecker',
'Bewick_Wren',
'Cactus_Wren',
'Carolina_Wren',
'House_Wren',
'Marsh_Wren',
'Rock_Wren',
'Winter_Wren',
'Common_Yellowthroat',
)
|
from datetime import timedelta
import logging
import pytest
from homeassistant.components import duckdns
from homeassistant.components.duckdns import async_track_time_interval_backoff
from homeassistant.loader import bind_hass
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.common import async_fire_time_changed
DOMAIN = "bla"
TOKEN = "abcdefgh"
_LOGGER = logging.getLogger(__name__)
INTERVAL = duckdns.INTERVAL
@bind_hass
async def async_set_txt(hass, txt):
"""Set the txt record. Pass in None to remove it.
This is a legacy helper method. Do not use it for new tests.
"""
await hass.services.async_call(
duckdns.DOMAIN, duckdns.SERVICE_SET_TXT, {duckdns.ATTR_TXT: txt}, blocking=True
)
@pytest.fixture
def setup_duckdns(hass, aioclient_mock):
"""Fixture that sets up DuckDNS."""
aioclient_mock.get(
duckdns.UPDATE_URL, params={"domains": DOMAIN, "token": TOKEN}, text="OK"
)
hass.loop.run_until_complete(
async_setup_component(
hass, duckdns.DOMAIN, {"duckdns": {"domain": DOMAIN, "access_token": TOKEN}}
)
)
async def test_setup(hass, aioclient_mock):
"""Test setup works if update passes."""
aioclient_mock.get(
duckdns.UPDATE_URL, params={"domains": DOMAIN, "token": TOKEN}, text="OK"
)
result = await async_setup_component(
hass, duckdns.DOMAIN, {"duckdns": {"domain": DOMAIN, "access_token": TOKEN}}
)
await hass.async_block_till_done()
assert result
assert aioclient_mock.call_count == 1
async_fire_time_changed(hass, utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
async def test_setup_backoff(hass, aioclient_mock):
"""Test setup fails if first update fails."""
aioclient_mock.get(
duckdns.UPDATE_URL, params={"domains": DOMAIN, "token": TOKEN}, text="KO"
)
result = await async_setup_component(
hass, duckdns.DOMAIN, {"duckdns": {"domain": DOMAIN, "access_token": TOKEN}}
)
assert result
await hass.async_block_till_done()
assert aioclient_mock.call_count == 1
# Copy of the DuckDNS intervals from duckdns/__init__.py
intervals = (
INTERVAL,
timedelta(minutes=1),
timedelta(minutes=5),
timedelta(minutes=15),
timedelta(minutes=30),
)
tme = utcnow()
await hass.async_block_till_done()
_LOGGER.debug("Backoff...")
for idx in range(1, len(intervals)):
tme += intervals[idx]
async_fire_time_changed(hass, tme)
await hass.async_block_till_done()
assert aioclient_mock.call_count == idx + 1
async def test_service_set_txt(hass, aioclient_mock, setup_duckdns):
"""Test set txt service call."""
# Empty the fixture mock requests
aioclient_mock.clear_requests()
aioclient_mock.get(
duckdns.UPDATE_URL,
params={"domains": DOMAIN, "token": TOKEN, "txt": "some-txt"},
text="OK",
)
assert aioclient_mock.call_count == 0
await async_set_txt(hass, "some-txt")
assert aioclient_mock.call_count == 1
async def test_service_clear_txt(hass, aioclient_mock, setup_duckdns):
"""Test clear txt service call."""
# Empty the fixture mock requests
aioclient_mock.clear_requests()
aioclient_mock.get(
duckdns.UPDATE_URL,
params={"domains": DOMAIN, "token": TOKEN, "txt": "", "clear": "true"},
text="OK",
)
assert aioclient_mock.call_count == 0
await async_set_txt(hass, None)
assert aioclient_mock.call_count == 1
async def test_async_track_time_interval_backoff(hass):
"""Test setup fails if first update fails."""
ret_val = False
call_count = 0
tme = None
async def _return(now):
nonlocal call_count, ret_val, tme
if tme is None:
tme = now
call_count += 1
return ret_val
intervals = (
INTERVAL,
INTERVAL * 2,
INTERVAL * 5,
INTERVAL * 9,
INTERVAL * 10,
INTERVAL * 11,
INTERVAL * 12,
)
async_track_time_interval_backoff(hass, _return, intervals)
await hass.async_block_till_done()
assert call_count == 1
_LOGGER.debug("Backoff...")
for idx in range(1, len(intervals)):
tme += intervals[idx]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == idx + 1
_LOGGER.debug("Max backoff reached - intervals[-1]")
for _idx in range(1, 10):
tme += intervals[-1]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == idx + 1 + _idx
_LOGGER.debug("Reset backoff")
call_count = 0
ret_val = True
tme += intervals[-1]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == 1
_LOGGER.debug("No backoff - intervals[0]")
for _idx in range(2, 10):
tme += intervals[0]
async_fire_time_changed(hass, tme + timedelta(seconds=0.1))
await hass.async_block_till_done()
assert call_count == _idx
|
import os.path
import sys
import codecs
import argparse
from lark import Lark, InlineTransformer
nearley_grammar = r"""
start: (ruledef|directive)+
directive: "@" NAME (STRING|NAME)
| "@" JS -> js_code
ruledef: NAME "->" expansions
| NAME REGEXP "->" expansions -> macro
expansions: expansion ("|" expansion)*
expansion: expr+ js
?expr: item (":" /[+*?]/)?
?item: rule|string|regexp|null
| "(" expansions ")"
rule: NAME
string: STRING
regexp: REGEXP
null: "null"
JS: /{%.*?%}/s
js: JS?
NAME: /[a-zA-Z_$]\w*/
COMMENT: /#[^\n]*/
REGEXP: /\[.*?\]/
STRING: _STRING "i"?
%import common.ESCAPED_STRING -> _STRING
%import common.WS
%ignore WS
%ignore COMMENT
"""
nearley_grammar_parser = Lark(nearley_grammar, parser='earley', lexer='standard')
def _get_rulename(name):
name = {'_': '_ws_maybe', '__':'_ws'}.get(name, name)
return 'n_' + name.replace('$', '__DOLLAR__').lower()
class NearleyToLark(InlineTransformer):
def __init__(self):
self._count = 0
self.extra_rules = {}
self.extra_rules_rev = {}
self.alias_js_code = {}
def _new_function(self, code):
name = 'alias_%d' % self._count
self._count += 1
self.alias_js_code[name] = code
return name
def _extra_rule(self, rule):
if rule in self.extra_rules_rev:
return self.extra_rules_rev[rule]
name = 'xrule_%d' % len(self.extra_rules)
assert name not in self.extra_rules
self.extra_rules[name] = rule
self.extra_rules_rev[rule] = name
return name
def rule(self, name):
return _get_rulename(name)
def ruledef(self, name, exps):
return '!%s: %s' % (_get_rulename(name), exps)
def expr(self, item, op):
rule = '(%s)%s' % (item, op)
return self._extra_rule(rule)
def regexp(self, r):
return '/%s/' % r
def null(self):
return ''
def string(self, s):
return self._extra_rule(s)
def expansion(self, *x):
x, js = x[:-1], x[-1]
if js.children:
js_code ,= js.children
js_code = js_code[2:-2]
alias = '-> ' + self._new_function(js_code)
else:
alias = ''
return ' '.join(x) + alias
def expansions(self, *x):
return '%s' % ('\n |'.join(x))
def start(self, *rules):
return '\n'.join(filter(None, rules))
def _nearley_to_lark(g, builtin_path, n2l, js_code, folder_path, includes):
rule_defs = []
tree = nearley_grammar_parser.parse(g)
for statement in tree.children:
if statement.data == 'directive':
directive, arg = statement.children
if directive in ('builtin', 'include'):
folder = builtin_path if directive == 'builtin' else folder_path
path = os.path.join(folder, arg[1:-1])
if path not in includes:
includes.add(path)
with codecs.open(path, encoding='utf8') as f:
text = f.read()
rule_defs += _nearley_to_lark(text, builtin_path, n2l, js_code, os.path.abspath(os.path.dirname(path)), includes)
else:
assert False, directive
elif statement.data == 'js_code':
code ,= statement.children
code = code[2:-2]
js_code.append(code)
elif statement.data == 'macro':
pass # TODO Add support for macros!
elif statement.data == 'ruledef':
rule_defs.append( n2l.transform(statement) )
else:
raise Exception("Unknown statement: %s" % statement)
return rule_defs
def create_code_for_nearley_grammar(g, start, builtin_path, folder_path, es6=False):
import js2py
emit_code = []
def emit(x=None):
if x:
emit_code.append(x)
emit_code.append('\n')
js_code = ['function id(x) {return x[0];}']
n2l = NearleyToLark()
rule_defs = _nearley_to_lark(g, builtin_path, n2l, js_code, folder_path, set())
lark_g = '\n'.join(rule_defs)
lark_g += '\n'+'\n'.join('!%s: %s' % item for item in n2l.extra_rules.items())
emit('from lark import Lark, Transformer')
emit()
emit('grammar = ' + repr(lark_g))
emit()
for alias, code in n2l.alias_js_code.items():
js_code.append('%s = (%s);' % (alias, code))
if es6:
emit(js2py.translate_js6('\n'.join(js_code)))
else:
emit(js2py.translate_js('\n'.join(js_code)))
emit('class TransformNearley(Transformer):')
for alias in n2l.alias_js_code:
emit(" %s = var.get('%s').to_python()" % (alias, alias))
emit(" __default__ = lambda self, n, c, m: c if c else None")
emit()
emit('parser = Lark(grammar, start="n_%s", maybe_placeholders=False)' % start)
emit('def parse(text):')
emit(' return TransformNearley().transform(parser.parse(text))')
return ''.join(emit_code)
def main(fn, start, nearley_lib, es6=False):
with codecs.open(fn, encoding='utf8') as f:
grammar = f.read()
return create_code_for_nearley_grammar(grammar, start, os.path.join(nearley_lib, 'builtin'), os.path.abspath(os.path.dirname(fn)), es6=es6)
def get_arg_parser():
parser = argparse.ArgumentParser(description='Reads a Nearley grammar (with js functions), and outputs an equivalent lark parser.')
parser.add_argument('nearley_grammar', help='Path to the file containing the nearley grammar')
parser.add_argument('start_rule', help='Rule within the nearley grammar to make the base rule')
parser.add_argument('nearley_lib', help='Path to root directory of nearley codebase (used for including builtins)')
parser.add_argument('--es6', help='Enable experimental ES6 support', action='store_true')
return parser
if __name__ == '__main__':
parser = get_arg_parser()
args = parser.parse_args()
print(main(fn=args.nearley_grammar, start=args.start_rule, nearley_lib=args.nearley_lib, es6=args.es6))
|
import unittest
from chainercv.chainer_experimental.datasets.sliceable import SliceableDataset
from chainercv.chainer_experimental.datasets.sliceable import TransformDataset
from chainercv.utils import testing
class SampleDataset(SliceableDataset):
def __len__(self):
return 10
@property
def keys(self):
return ('item0', 'item1', 'item2')
def get_example_by_keys(self, i, key_indices):
return tuple(
'{:s}({:d})'.format(self.keys[key_index], i)
for key_index in key_indices)
@testing.parameterize(*testing.product_dict(
[
{'iterable': tuple},
{'iterable': list},
],
[
{
'keys': 'item1',
'func': lambda in_data: 'transformed_' + in_data[1],
'expected_sample': 'transformed_item1(3)',
},
{
'keys': ('item1',),
'func': lambda in_data: ('transformed_' + in_data[1],),
'expected_sample': ('transformed_item1(3)',),
},
{
'keys': ('item0', 'item2'),
'func': lambda in_data: (
'transformed_' + in_data[0],
'transformed_' + in_data[2]),
'expected_sample':
('transformed_item0(3)', 'transformed_item2(3)'),
},
],
))
class TestTransformDataset(unittest.TestCase):
def setUp(self):
self.dataset = SampleDataset()
def _check(self, dataset, expected_keys):
self.assertIsInstance(dataset, SliceableDataset)
self.assertEqual(len(dataset), len(self.dataset))
self.assertEqual(dataset.keys, expected_keys)
self.assertEqual(dataset[3], self.expected_sample)
def test_transform(self):
if isinstance(self.keys, tuple):
keys = self.iterable(self.keys)
else:
keys = self.keys
dataset = TransformDataset(self.dataset, keys, self.func)
self._check(dataset, self.keys)
def test_transform_compat(self):
if isinstance(self.keys, tuple):
expected_keys = (None,) * len(self.keys)
else:
expected_keys = None
dataset = TransformDataset(self.dataset, self.func)
self._check(dataset, expected_keys)
testing.run_module(__name__, __file__)
|
from __future__ import print_function
import unittest
import boto3
from botocore.stub import Stubber
from credstash import KeyService, KmsError
class TestKeyService(unittest.TestCase):
def test_generate_key_data_success(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {}
with Stubber(kms_client) as stubber:
stubber.add_response('generate_data_key', {
'CiphertextBlob': b'ciphertext',
'Plaintext': b'plaintext',
'KeyId': 'string'
}, expected_params = {
'KeyId': key_id,
'EncryptionContext': encryption_context,
'NumberOfBytes': 1
})
key_service = KeyService(kms_client, key_id, encryption_context)
response = key_service.generate_key_data(1)
self.assertEqual(response[0], b'plaintext')
self.assertEqual(response[1], b'ciphertext')
def test_generate_key_data_error(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {}
with Stubber(kms_client) as stubber:
stubber.add_client_error(
'generate_key_data',
'KeyUnavailableException',
'The request was rejected because the specified CMK was not available. The request can be retried.',
500,
expected_params={
'KeyId': key_id,
'EncryptionContext': encryption_context,
'NumberOfBytes': 1
})
key_service = KeyService(kms_client, key_id, encryption_context)
with self.assertRaises(KmsError) as e:
key_service.generate_key_data(1)
self.assertEqual(e, KmsError("Could not generate key using KMS key %s (Details: %s)" % (key_id, 'The request was rejected because the specified CMK was not available. The request can be retried.')))
def test_decrypt_success(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {}
with Stubber(kms_client) as stubber:
stubber.add_response('decrypt', {
'KeyId': 'key_id',
'Plaintext': b'plaintext'
}, expected_params = {
'CiphertextBlob': 'encoded_key',
'EncryptionContext': encryption_context
})
key_service = KeyService(kms_client, key_id, encryption_context)
response = key_service.decrypt('encoded_key')
self.assertEqual(response, b'plaintext')
def test_decrypt_error(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {}
with Stubber(kms_client) as stubber:
stubber.add_client_error(
'decrypt',
'NotFoundException',
'The request was rejected because the specified entity or resource could not be found.',
400,
expected_params = {
'CiphertextBlob': 'encoded_key',
'EncryptionContext': encryption_context
})
key_service = KeyService(kms_client, key_id, encryption_context)
with self.assertRaises(KmsError) as e:
response = key_service.decrypt('encoded_key')
self.assertEqual(e, KmsError("Decryption error The request was rejected because the specified entity or resource could not be found."))
def test_decrypt_invalid_ciphertext_error_no_context(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {}
with Stubber(kms_client) as stubber:
stubber.add_client_error(
'decrypt',
'InvalidCiphertextException',
'The request was rejected because the specified ciphertext, or additional authenticated data incorporated into the ciphertext, such as the encryption context, is corrupted, missing, or otherwise invalid.',
400,
expected_params = {
'CiphertextBlob': 'encoded_key',
'EncryptionContext': encryption_context
})
key_service = KeyService(kms_client, key_id, encryption_context)
with self.assertRaises(KmsError) as e:
msg = ("Could not decrypt hmac key with KMS. The credential may "
"require that an encryption context be provided to decrypt "
"it.")
response = key_service.decrypt('encoded_key')
self.assertEqual(e, KmsError(msg))
def test_decrypt_invalid_ciphertext_error_with_context(self):
kms_client = boto3.client('kms')
key_id = "test"
encryption_context = {
'key': 'value'
}
with Stubber(kms_client) as stubber:
stubber.add_client_error(
'decrypt',
'InvalidCiphertextException',
'The request was rejected because the specified ciphertext, or additional authenticated data incorporated into the ciphertext, such as the encryption context, is corrupted, missing, or otherwise invalid.',
400,
expected_params = {
'CiphertextBlob': 'encoded_key',
'EncryptionContext': encryption_context
})
key_service = KeyService(kms_client, key_id, encryption_context)
with self.assertRaises(KmsError) as e:
msg = ("Could not decrypt hmac key with KMS. The encryption "
"context provided may not match the one used when the "
"credential was stored.")
response = key_service.decrypt('encoded_key')
self.assertEqual(e, KmsError(msg))
|
import logging
from yeelight import (
BulbException,
BulbType,
HSVTransition,
LightType,
PowerMode,
RGBTransition,
SceneClass,
SleepTransition,
TemperatureTransition,
transitions,
)
from yeelight.flow import Flow
from yeelight.main import _MODEL_SPECS
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_KELVIN,
ATTR_RGB_COLOR,
ATTR_TRANSITION,
FLASH_LONG,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.components.yeelight import (
ATTR_COUNT,
ATTR_TRANSITIONS,
CONF_CUSTOM_EFFECTS,
CONF_FLOW_PARAMS,
CONF_MODE_MUSIC,
CONF_NIGHTLIGHT_SWITCH,
CONF_SAVE_ON_CHANGE,
CONF_TRANSITION,
DEFAULT_MODE_MUSIC,
DEFAULT_NIGHTLIGHT_SWITCH,
DEFAULT_SAVE_ON_CHANGE,
DEFAULT_TRANSITION,
DOMAIN,
YEELIGHT_HSV_TRANSACTION,
YEELIGHT_RGB_TRANSITION,
YEELIGHT_SLEEP_TRANSACTION,
YEELIGHT_TEMPERATURE_TRANSACTION,
)
from homeassistant.components.yeelight.light import (
ATTR_MINUTES,
ATTR_MODE,
EFFECT_DISCO,
EFFECT_FACEBOOK,
EFFECT_FAST_RANDOM_LOOP,
EFFECT_STOP,
EFFECT_TWITTER,
EFFECT_WHATSAPP,
SERVICE_SET_AUTO_DELAY_OFF_SCENE,
SERVICE_SET_COLOR_FLOW_SCENE,
SERVICE_SET_COLOR_SCENE,
SERVICE_SET_COLOR_TEMP_SCENE,
SERVICE_SET_HSV_SCENE,
SERVICE_SET_MODE,
SERVICE_START_FLOW,
SUPPORT_YEELIGHT,
SUPPORT_YEELIGHT_RGB,
SUPPORT_YEELIGHT_WHITE_TEMP,
YEELIGHT_COLOR_EFFECT_LIST,
YEELIGHT_MONO_EFFECT_LIST,
YEELIGHT_TEMP_ONLY_EFFECT_LIST,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
from homeassistant.util.color import (
color_hs_to_RGB,
color_hs_to_xy,
color_RGB_to_hs,
color_RGB_to_xy,
color_temperature_kelvin_to_mired,
color_temperature_mired_to_kelvin,
)
from . import (
ENTITY_LIGHT,
ENTITY_NIGHTLIGHT,
IP_ADDRESS,
MODULE,
NAME,
PROPERTIES,
UNIQUE_NAME,
_mocked_bulb,
_patch_discovery,
)
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry
CONFIG_ENTRY_DATA = {
CONF_HOST: IP_ADDRESS,
CONF_TRANSITION: DEFAULT_TRANSITION,
CONF_MODE_MUSIC: DEFAULT_MODE_MUSIC,
CONF_SAVE_ON_CHANGE: DEFAULT_SAVE_ON_CHANGE,
CONF_NIGHTLIGHT_SWITCH: DEFAULT_NIGHTLIGHT_SWITCH,
}
async def test_services(hass: HomeAssistant, caplog):
"""Test Yeelight services."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
**CONFIG_ENTRY_DATA,
CONF_MODE_MUSIC: True,
CONF_SAVE_ON_CHANGE: True,
CONF_NIGHTLIGHT_SWITCH: True,
},
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with _patch_discovery(MODULE), patch(f"{MODULE}.Bulb", return_value=mocked_bulb):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
async def _async_test_service(service, data, method, payload=None, domain=DOMAIN):
err_count = len([x for x in caplog.records if x.levelno == logging.ERROR])
# success
mocked_method = MagicMock()
setattr(type(mocked_bulb), method, mocked_method)
await hass.services.async_call(domain, service, data, blocking=True)
if payload is None:
mocked_method.assert_called_once()
elif type(payload) == list:
mocked_method.assert_called_once_with(*payload)
else:
mocked_method.assert_called_once_with(**payload)
assert (
len([x for x in caplog.records if x.levelno == logging.ERROR]) == err_count
)
# failure
mocked_method = MagicMock(side_effect=BulbException)
setattr(type(mocked_bulb), method, mocked_method)
await hass.services.async_call(domain, service, data, blocking=True)
assert (
len([x for x in caplog.records if x.levelno == logging.ERROR])
== err_count + 1
)
# turn_on
brightness = 100
color_temp = 200
transition = 1
await hass.services.async_call(
"light",
SERVICE_TURN_ON,
{
ATTR_ENTITY_ID: ENTITY_LIGHT,
ATTR_BRIGHTNESS: brightness,
ATTR_COLOR_TEMP: color_temp,
ATTR_FLASH: FLASH_LONG,
ATTR_EFFECT: EFFECT_STOP,
ATTR_TRANSITION: transition,
},
blocking=True,
)
mocked_bulb.turn_on.assert_called_once_with(
duration=transition * 1000,
light_type=LightType.Main,
power_mode=PowerMode.NORMAL,
)
mocked_bulb.turn_on.reset_mock()
mocked_bulb.start_music.assert_called_once()
mocked_bulb.set_brightness.assert_called_once_with(
brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main
)
mocked_bulb.set_color_temp.assert_called_once_with(
color_temperature_mired_to_kelvin(color_temp),
duration=transition * 1000,
light_type=LightType.Main,
)
mocked_bulb.start_flow.assert_called_once() # flash
mocked_bulb.stop_flow.assert_called_once_with(light_type=LightType.Main)
# turn_on nightlight
await _async_test_service(
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_NIGHTLIGHT},
"turn_on",
payload={
"duration": DEFAULT_TRANSITION,
"light_type": LightType.Main,
"power_mode": PowerMode.MOONLIGHT,
},
domain="light",
)
# turn_off
await _async_test_service(
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_TRANSITION: transition},
"turn_off",
domain="light",
payload={"duration": transition * 1000, "light_type": LightType.Main},
)
# set_mode
mode = "rgb"
await _async_test_service(
SERVICE_SET_MODE,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_MODE: "rgb"},
"set_power_mode",
[PowerMode[mode.upper()]],
)
# start_flow
await _async_test_service(
SERVICE_START_FLOW,
{
ATTR_ENTITY_ID: ENTITY_LIGHT,
ATTR_TRANSITIONS: [{YEELIGHT_TEMPERATURE_TRANSACTION: [1900, 2000, 60]}],
},
"start_flow",
)
# set_color_scene
await _async_test_service(
SERVICE_SET_COLOR_SCENE,
{
ATTR_ENTITY_ID: ENTITY_LIGHT,
ATTR_RGB_COLOR: [10, 20, 30],
ATTR_BRIGHTNESS: 50,
},
"set_scene",
[SceneClass.COLOR, 10, 20, 30, 50],
)
# set_hsv_scene
await _async_test_service(
SERVICE_SET_HSV_SCENE,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_HS_COLOR: [180, 50], ATTR_BRIGHTNESS: 50},
"set_scene",
[SceneClass.HSV, 180, 50, 50],
)
# set_color_temp_scene
await _async_test_service(
SERVICE_SET_COLOR_TEMP_SCENE,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_KELVIN: 4000, ATTR_BRIGHTNESS: 50},
"set_scene",
[SceneClass.CT, 4000, 50],
)
# set_color_flow_scene
await _async_test_service(
SERVICE_SET_COLOR_FLOW_SCENE,
{
ATTR_ENTITY_ID: ENTITY_LIGHT,
ATTR_TRANSITIONS: [{YEELIGHT_TEMPERATURE_TRANSACTION: [1900, 2000, 60]}],
},
"set_scene",
)
# set_auto_delay_off_scene
await _async_test_service(
SERVICE_SET_AUTO_DELAY_OFF_SCENE,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_MINUTES: 1, ATTR_BRIGHTNESS: 50},
"set_scene",
[SceneClass.AUTO_DELAY_OFF, 50, 1],
)
# test _cmd wrapper error handler
err_count = len([x for x in caplog.records if x.levelno == logging.ERROR])
type(mocked_bulb).turn_on = MagicMock()
type(mocked_bulb).set_brightness = MagicMock(side_effect=BulbException)
await hass.services.async_call(
"light",
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: 50},
blocking=True,
)
assert (
len([x for x in caplog.records if x.levelno == logging.ERROR]) == err_count + 1
)
async def test_device_types(hass: HomeAssistant):
"""Test different device types."""
mocked_bulb = _mocked_bulb()
properties = {**PROPERTIES}
properties.pop("active_mode")
properties["color_mode"] = "3"
mocked_bulb.last_properties = properties
async def _async_setup(config_entry):
with patch(f"{MODULE}.Bulb", return_value=mocked_bulb):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
async def _async_test(
bulb_type,
model,
target_properties,
nightlight_properties=None,
name=UNIQUE_NAME,
entity_id=ENTITY_LIGHT,
):
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
**CONFIG_ENTRY_DATA,
CONF_NIGHTLIGHT_SWITCH: False,
},
)
config_entry.add_to_hass(hass)
mocked_bulb.bulb_type = bulb_type
model_specs = _MODEL_SPECS.get(model)
type(mocked_bulb).get_model_specs = MagicMock(return_value=model_specs)
await _async_setup(config_entry)
state = hass.states.get(entity_id)
assert state.state == "on"
target_properties["friendly_name"] = name
target_properties["flowing"] = False
target_properties["night_light"] = True
assert dict(state.attributes) == target_properties
await hass.config_entries.async_unload(config_entry.entry_id)
await config_entry.async_remove(hass)
registry = await entity_registry.async_get_registry(hass)
registry.async_clear_config_entry(config_entry.entry_id)
# nightlight
if nightlight_properties is None:
return
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
**CONFIG_ENTRY_DATA,
CONF_NIGHTLIGHT_SWITCH: True,
},
)
config_entry.add_to_hass(hass)
await _async_setup(config_entry)
assert hass.states.get(entity_id).state == "off"
state = hass.states.get(f"{entity_id}_nightlight")
assert state.state == "on"
nightlight_properties["friendly_name"] = f"{name} nightlight"
nightlight_properties["icon"] = "mdi:weather-night"
nightlight_properties["flowing"] = False
nightlight_properties["night_light"] = True
assert dict(state.attributes) == nightlight_properties
await hass.config_entries.async_unload(config_entry.entry_id)
await config_entry.async_remove(hass)
registry.async_clear_config_entry(config_entry.entry_id)
bright = round(255 * int(PROPERTIES["bright"]) / 100)
current_brightness = round(255 * int(PROPERTIES["current_brightness"]) / 100)
ct = color_temperature_kelvin_to_mired(int(PROPERTIES["ct"]))
hue = int(PROPERTIES["hue"])
sat = int(PROPERTIES["sat"])
hs_color = (round(hue / 360 * 65536, 3), round(sat / 100 * 255, 3))
rgb_color = color_hs_to_RGB(*hs_color)
xy_color = color_hs_to_xy(*hs_color)
bg_bright = round(255 * int(PROPERTIES["bg_bright"]) / 100)
bg_ct = color_temperature_kelvin_to_mired(int(PROPERTIES["bg_ct"]))
bg_rgb = int(PROPERTIES["bg_rgb"])
bg_rgb_color = ((bg_rgb >> 16) & 0xFF, (bg_rgb >> 8) & 0xFF, bg_rgb & 0xFF)
bg_hs_color = color_RGB_to_hs(*bg_rgb_color)
bg_xy_color = color_RGB_to_xy(*bg_rgb_color)
nl_br = round(255 * int(PROPERTIES["nl_br"]) / 100)
# Default
await _async_test(
None,
"mono",
{
"effect_list": YEELIGHT_MONO_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT,
"brightness": bright,
},
)
# White
await _async_test(
BulbType.White,
"mono",
{
"effect_list": YEELIGHT_MONO_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT,
"brightness": bright,
},
)
# Color
model_specs = _MODEL_SPECS["color"]
await _async_test(
BulbType.Color,
"color",
{
"effect_list": YEELIGHT_COLOR_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT_RGB,
"min_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["max"]
),
"max_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["min"]
),
"brightness": current_brightness,
"color_temp": ct,
"hs_color": hs_color,
"rgb_color": rgb_color,
"xy_color": xy_color,
},
{"supported_features": 0},
)
# WhiteTemp
model_specs = _MODEL_SPECS["ceiling1"]
await _async_test(
BulbType.WhiteTemp,
"ceiling1",
{
"effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT_WHITE_TEMP,
"min_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["max"]
),
"max_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["min"]
),
"brightness": current_brightness,
"color_temp": ct,
},
{
"effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT,
"brightness": nl_br,
},
)
# WhiteTempMood
properties.pop("power")
properties["main_power"] = "on"
model_specs = _MODEL_SPECS["ceiling4"]
await _async_test(
BulbType.WhiteTempMood,
"ceiling4",
{
"friendly_name": NAME,
"effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST,
"flowing": False,
"night_light": True,
"supported_features": SUPPORT_YEELIGHT_WHITE_TEMP,
"min_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["max"]
),
"max_mireds": color_temperature_kelvin_to_mired(
model_specs["color_temp"]["min"]
),
"brightness": current_brightness,
"color_temp": ct,
},
{
"effect_list": YEELIGHT_TEMP_ONLY_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT,
"brightness": nl_br,
},
)
await _async_test(
BulbType.WhiteTempMood,
"ceiling4",
{
"effect_list": YEELIGHT_COLOR_EFFECT_LIST,
"supported_features": SUPPORT_YEELIGHT_RGB,
"min_mireds": color_temperature_kelvin_to_mired(6500),
"max_mireds": color_temperature_kelvin_to_mired(1700),
"brightness": bg_bright,
"color_temp": bg_ct,
"hs_color": bg_hs_color,
"rgb_color": bg_rgb_color,
"xy_color": bg_xy_color,
},
name=f"{UNIQUE_NAME} ambilight",
entity_id=f"{ENTITY_LIGHT}_ambilight",
)
async def test_effects(hass: HomeAssistant):
"""Test effects."""
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_CUSTOM_EFFECTS: [
{
CONF_NAME: "mock_effect",
CONF_FLOW_PARAMS: {
ATTR_COUNT: 3,
ATTR_TRANSITIONS: [
{YEELIGHT_HSV_TRANSACTION: [300, 50, 500, 50]},
{YEELIGHT_RGB_TRANSITION: [100, 100, 100, 300, 30]},
{YEELIGHT_TEMPERATURE_TRANSACTION: [3000, 200, 20]},
{YEELIGHT_SLEEP_TRANSACTION: [800]},
],
},
},
],
},
},
)
config_entry = MockConfigEntry(
domain=DOMAIN,
data=CONFIG_ENTRY_DATA,
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with _patch_discovery(MODULE), patch(f"{MODULE}.Bulb", return_value=mocked_bulb):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ENTITY_LIGHT).attributes.get(
"effect_list"
) == YEELIGHT_COLOR_EFFECT_LIST + ["mock_effect"]
async def _async_test_effect(name, target=None, called=True):
mocked_start_flow = MagicMock()
type(mocked_bulb).start_flow = mocked_start_flow
await hass.services.async_call(
"light",
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_EFFECT: name},
blocking=True,
)
if not called:
return
mocked_start_flow.assert_called_once()
if target is None:
return
args, _ = mocked_start_flow.call_args
flow = args[0]
assert flow.count == target.count
assert flow.action == target.action
assert str(flow.transitions) == str(target.transitions)
effects = {
"mock_effect": Flow(
count=3,
transitions=[
HSVTransition(300, 50, 500, 50),
RGBTransition(100, 100, 100, 300, 30),
TemperatureTransition(3000, 200, 20),
SleepTransition(800),
],
),
EFFECT_DISCO: Flow(transitions=transitions.disco()),
EFFECT_FAST_RANDOM_LOOP: None,
EFFECT_WHATSAPP: Flow(count=2, transitions=transitions.pulse(37, 211, 102)),
EFFECT_FACEBOOK: Flow(count=2, transitions=transitions.pulse(59, 89, 152)),
EFFECT_TWITTER: Flow(count=2, transitions=transitions.pulse(0, 172, 237)),
}
for name, target in effects.items():
await _async_test_effect(name, target)
await _async_test_effect("not_existed", called=False)
|
import argparse
import sys
from a_sync import block
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.metrics.metastatus_lib import assert_frameworks_exist
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--expected",
"-e",
dest="expected",
type=str,
default="",
help="Comma separated list of frameworks to expect.\n"
"Will fail if any of these are not found",
)
return parser.parse_args()
def check_mesos_active_frameworks() -> None:
options = parse_args()
expected = options.expected.split(",")
master = get_mesos_master()
try:
state = block(master.state)
except MasterNotAvailableException as e:
print("CRITICAL: %s" % e.args[0])
sys.exit(2)
result = assert_frameworks_exist(state, expected)
if result.healthy:
print("OK: " + result.message)
sys.exit(0)
else:
print(result.message)
sys.exit(2)
if __name__ == "__main__":
check_mesos_active_frameworks()
|
revision = "33de094da890"
down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"certificate_replacement_associations",
sa.Column("replaced_certificate_id", sa.Integer(), nullable=True),
sa.Column("certificate_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["certificate_id"], ["certificates.id"], ondelete="cascade"
),
sa.ForeignKeyConstraint(
["replaced_certificate_id"], ["certificates.id"], ondelete="cascade"
),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table("certificate_replacement_associations")
### end Alembic commands ###
|
from collections import abc, ChainMap
from typing import Hashable, MutableMapping, Sequence
from cerberus import errors
from cerberus.base import (
normalize_schema,
RulesSetRegistry,
SchemaError,
SchemaRegistry,
TypeDefinition,
UnconcernedValidator,
normalize_rulesset,
)
from cerberus.platform import _GenericAlias
from cerberus.utils import schema_hash
class SchemaValidator(UnconcernedValidator):
"""
This validator provides mechanics to validate schemas passed to a Cerberus
validator.
"""
types_mapping = UnconcernedValidator.types_mapping.copy()
types_mapping.update(
{
"container_but_not_string": TypeDefinition(
"container_but_not_string", (abc.Container,), (str,)
),
"generic_type_alias": TypeDefinition(
"generic_type_alias", (_GenericAlias,), ()
),
}
)
def __init__(self, *args, **kwargs):
kwargs.setdefault("known_rules_set_refs", set())
kwargs.setdefault("known_schema_refs", set())
super().__init__(*args, **kwargs)
@property
def known_rules_set_refs(self):
""" The encountered references to rules set registry items. """
return self._config["known_rules_set_refs"]
@property
def known_schema_refs(self):
""" The encountered references to schema registry items. """
return self._config["known_schema_refs"]
@property
def target_validator(self):
""" The validator whose schema is being validated. """
return self._config['target_validator']
def _check_with_dependencies(self, field, value):
if isinstance(value, str):
return
elif isinstance(value, abc.Mapping):
validator = self._get_child_validator(
document_crumb=field,
schema={'valuesrules': {'type': ('list',)}},
allow_unknown=True,
)
if not validator(value, normalize=False):
self._error(validator._errors)
elif isinstance(value, Sequence):
if not all(isinstance(x, Hashable) for x in value):
path = self.document_path + (field,)
self._error(path, 'All dependencies must be a hashable type.')
def _check_with_items(self, field, value):
self._check_with_schema(
field, {i: rules_set for i, rules_set in enumerate(value)}
)
def _check_with_rulesset(self, field, value):
# resolve schema registry reference
if isinstance(value, str):
if value in self.known_rules_set_refs:
return
else:
self.known_rules_set_refs.add(value)
definition = self.target_validator.rules_set_registry.get(value)
if definition is None:
self._error(field, "Rules set definition '{}' not found.".format(value))
return
else:
value = definition
_hash = (
schema_hash({'turing': value}),
schema_hash(self.target_validator.types_mapping),
)
if _hash in self.target_validator._valid_schemas:
return
validator = self._get_child_validator(
document_crumb=field,
allow_unknown=False,
schema=self.target_validator.rules,
)
validator(value, normalize=False)
if validator._errors:
self._error(validator._errors)
else:
self.target_validator._valid_schemas.add(_hash)
def _check_with_schema(self, field, value):
if isinstance(value, str):
if value in self.known_schema_refs:
return
self.known_schema_refs.add(value)
definition = self.target_validator.schema_registry.get(value)
if definition is None:
path = self.document_path + (field,)
self._error(path, "Schema definition '{}' not found.".format(value))
else:
definition = value
_hash = (
schema_hash(definition),
schema_hash(self.target_validator.types_mapping),
)
if _hash in self.target_validator._valid_schemas:
return
validator = self._get_child_validator(
document_crumb=field, schema=None, allow_unknown=self.root_allow_unknown
)
validator(self._expand_rules_set_refs(definition), normalize=False)
if validator._errors:
self._error(validator._errors)
else:
self.target_validator._valid_schemas.add(_hash)
def _check_with_type_names(self, field, value):
if value not in self.target_validator.types_mapping:
self._error(field, 'Unsupported type name: {}'.format(value))
def _expand_rules_set_refs(self, schema):
result = {}
for k, v in schema.items():
if isinstance(v, str):
result[k] = self.target_validator.rules_set_registry.get(v)
else:
result[k] = v
return result
def _validate_logical(self, rule, field, value):
""" {'allowed': ('allof', 'anyof', 'noneof', 'oneof')} """
if not isinstance(value, Sequence):
self._error(field, errors.TYPE)
return
validator = self._get_child_validator(
document_crumb=rule,
allow_unknown=False,
schema=self.target_validator.validation_rules,
)
for constraints in value:
_hash = (
schema_hash({'turing': constraints}),
schema_hash(self.target_validator.types_mapping),
)
if _hash in self.target_validator._valid_schemas:
continue
validator(constraints, normalize=False)
if validator._errors:
self._error(validator._errors)
else:
self.target_validator._valid_schemas.add(_hash)
# FIXME this rule seems to be called with very unexpected values
# def _validate_type(self, data_type, field, value):
# assert isinstance(value, tuple), (self.schema_path, value)
# return super()._validate_type(data_type, field, value)
class ValidatedSchema(MutableMapping):
""" A dict-subclass for caching of validated schemas. """
def __init__(self, validator, schema=None):
"""
:param validator: An instance of Validator-(sub-)class that uses this
schema.
:param schema: A definition-schema as ``dict``. Defaults to an empty
one.
"""
self._repr = ("unvalidated schema: {}", schema)
if not isinstance(validator, UnconcernedValidator):
raise RuntimeError('validator argument must be a Validator-' 'instance.')
self.validator = validator
self.regenerate_validation_schema()
self.schema_validator = SchemaValidator(
None,
allow_unknown=self.validation_schema,
error_handler=errors.SchemaErrorHandler,
target_validator=validator,
)
if isinstance(schema, str):
schema = validator.schema_registry.get(schema, schema)
if not isinstance(schema, abc.Mapping):
raise SchemaError(errors.SCHEMA_TYPE.format(schema))
else:
schema = normalize_schema(schema)
self.validate(schema)
self._repr = ("{}", schema)
self.schema = schema
def __delitem__(self, key):
self.schema.pop(key)
def __getitem__(self, item):
return self.schema[item]
def __iter__(self):
return iter(self.schema)
def __len__(self):
return len(self.schema)
def __repr__(self):
# TODO include id
return str(self)
def __setitem__(self, key, value):
value = normalize_rulesset(value)
self.validate({key: value})
self.schema[key] = value
def __str__(self):
return self._repr[0].format(self._repr[1])
def copy(self):
return self.__class__(self.validator, self.schema.copy())
def update(self, schema):
if not isinstance(schema, abc.Mapping):
raise TypeError("Value must be of Mapping Type.")
new_schema = ChainMap(schema, self.schema)
self.validate(new_schema)
self.schema = new_schema
def regenerate_validation_schema(self):
self.validation_schema = {
'allow_unknown': False,
'schema': self.validator.rules,
'type': ('Mapping',),
}
def validate(self, schema=None):
if schema is None:
schema = self.schema
_hash = (schema_hash(schema), schema_hash(self.validator.types_mapping))
if _hash not in self.validator._valid_schemas:
self._validate(schema)
self.validator._valid_schemas.add(_hash)
def _validate(self, schema):
"""
Validates a schema that defines rules against supported rules.
:param schema: The schema to be validated as a legal cerberus schema
according to the rules of the related Validator object.
"""
if isinstance(schema, str):
schema = self.validator.schema_registry.get(schema, schema)
if schema is None:
raise SchemaError(errors.SCHEMA_MISSING)
resolved = {
k: self.validator.rules_set_registry.get(v, v)
for k, v in schema.items()
if isinstance(v, str)
}
if not self.schema_validator(ChainMap(resolved, schema), normalize=False):
raise SchemaError(self.schema_validator.errors)
__all__ = (RulesSetRegistry.__name__, SchemaRegistry.__name__)
|
from datetime import timedelta
import logging
from meteoalertapi import Meteoalert
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_SAFETY,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Information provided by MeteoAlarm"
CONF_COUNTRY = "country"
CONF_LANGUAGE = "language"
CONF_PROVINCE = "province"
DEFAULT_NAME = "meteoalarm"
SCAN_INTERVAL = timedelta(minutes=30)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COUNTRY): cv.string,
vol.Required(CONF_PROVINCE): cv.string,
vol.Optional(CONF_LANGUAGE, default="en"): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the MeteoAlarm binary sensor platform."""
country = config[CONF_COUNTRY]
province = config[CONF_PROVINCE]
language = config[CONF_LANGUAGE]
name = config[CONF_NAME]
try:
api = Meteoalert(country, province, language)
except KeyError:
_LOGGER.error("Wrong country digits or province name")
return
add_entities([MeteoAlertBinarySensor(api, name)], True)
class MeteoAlertBinarySensor(BinarySensorEntity):
"""Representation of a MeteoAlert binary sensor."""
def __init__(self, api, name):
"""Initialize the MeteoAlert binary sensor."""
self._name = name
self._attributes = {}
self._state = None
self._api = api
@property
def name(self):
"""Return the name of the binary sensor."""
return self._name
@property
def is_on(self):
"""Return the status of the binary sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION
return self._attributes
@property
def device_class(self):
"""Return the device class of this binary sensor."""
return DEVICE_CLASS_SAFETY
def update(self):
"""Update device state."""
alert = self._api.get_alert()
if alert:
self._attributes = alert
self._state = True
else:
self._attributes = {}
self._state = False
|
from copy import deepcopy
import numpy as np
from .constants import FIFF
from .tag import read_tag
from .tree import dir_tree_find
from .write import start_block, end_block, write_int
from .matrix import write_named_matrix, _read_named_matrix
from ..utils import logger, verbose, _pl
def _add_kind(one):
"""Convert CTF kind to MNE kind."""
if one['ctfkind'] == int('47314252', 16):
one['kind'] = 1
elif one['ctfkind'] == int('47324252', 16):
one['kind'] = 2
elif one['ctfkind'] == int('47334252', 16):
one['kind'] = 3
else:
one['kind'] = int(one['ctfkind'])
def _calibrate_comp(comp, chs, row_names, col_names,
mult_keys=('range', 'cal'), flip=False):
"""Get row and column cals."""
ch_names = [c['ch_name'] for c in chs]
row_cals = np.zeros(len(row_names))
col_cals = np.zeros(len(col_names))
for names, cals, inv in zip((row_names, col_names), (row_cals, col_cals),
(False, True)):
for ii in range(len(cals)):
p = ch_names.count(names[ii])
if p != 1:
raise RuntimeError('Channel %s does not appear exactly once '
'in data, found %d instance%s'
% (names[ii], p, _pl(p)))
idx = ch_names.index(names[ii])
val = chs[idx][mult_keys[0]] * chs[idx][mult_keys[1]]
val = float(1. / val) if inv else float(val)
val = 1. / val if flip else val
cals[ii] = val
comp['rowcals'] = row_cals
comp['colcals'] = col_cals
comp['data']['data'] = (row_cals[:, None] *
comp['data']['data'] * col_cals[None, :])
@verbose
def read_ctf_comp(fid, node, chs, verbose=None):
"""Read the CTF software compensation data from the given node.
Parameters
----------
fid : file
The file descriptor.
node : dict
The node in the FIF tree.
chs : list
The list of channels from info['chs'] to match with
compensators that are read.
%(verbose)s
Returns
-------
compdata : list
The compensation data
"""
compdata = []
comps = dir_tree_find(node, FIFF.FIFFB_MNE_CTF_COMP_DATA)
for node in comps:
# Read the data we need
mat = _read_named_matrix(fid, node, FIFF.FIFF_MNE_CTF_COMP_DATA)
for p in range(node['nent']):
kind = node['directory'][p].kind
pos = node['directory'][p].pos
if kind == FIFF.FIFF_MNE_CTF_COMP_KIND:
tag = read_tag(fid, pos)
break
else:
raise Exception('Compensation type not found')
# Get the compensation kind and map it to a simple number
one = dict(ctfkind=tag.data)
del tag
_add_kind(one)
for p in range(node['nent']):
kind = node['directory'][p].kind
pos = node['directory'][p].pos
if kind == FIFF.FIFF_MNE_CTF_COMP_CALIBRATED:
tag = read_tag(fid, pos)
calibrated = tag.data
break
else:
calibrated = False
one['save_calibrated'] = bool(calibrated)
one['data'] = mat
if not calibrated:
# Calibrate...
_calibrate_comp(one, chs, mat['row_names'], mat['col_names'])
else:
one['rowcals'] = np.ones(mat['data'].shape[0], dtype=np.float64)
one['colcals'] = np.ones(mat['data'].shape[1], dtype=np.float64)
compdata.append(one)
if len(compdata) > 0:
logger.info(' Read %d compensation matrices' % len(compdata))
return compdata
###############################################################################
# Writing
def write_ctf_comp(fid, comps):
"""Write the CTF compensation data into a fif file.
Parameters
----------
fid : file
The open FIF file descriptor
comps : list
The compensation data to write
"""
if len(comps) <= 0:
return
# This is very simple in fact
start_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
for comp in comps:
start_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
# Write the compensation kind
write_int(fid, FIFF.FIFF_MNE_CTF_COMP_KIND, comp['ctfkind'])
if comp.get('save_calibrated', False):
write_int(fid, FIFF.FIFF_MNE_CTF_COMP_CALIBRATED,
comp['save_calibrated'])
if not comp.get('save_calibrated', True):
# Undo calibration
comp = deepcopy(comp)
data = ((1. / comp['rowcals'][:, None]) * comp['data']['data'] *
(1. / comp['colcals'][None, :]))
comp['data']['data'] = data
write_named_matrix(fid, FIFF.FIFF_MNE_CTF_COMP_DATA, comp['data'])
end_block(fid, FIFF.FIFFB_MNE_CTF_COMP_DATA)
end_block(fid, FIFF.FIFFB_MNE_CTF_COMP)
|
from kombu.utils.compat import _detect_environment
from kombu.utils.imports import symbol_by_name
def supports_librabbitmq():
"""Return true if :pypi:`librabbitmq` can be used."""
if _detect_environment() == 'default':
try:
import librabbitmq # noqa
except ImportError: # pragma: no cover
pass
else: # pragma: no cover
return True
TRANSPORT_ALIASES = {
'amqp': 'kombu.transport.pyamqp:Transport',
'amqps': 'kombu.transport.pyamqp:SSLTransport',
'pyamqp': 'kombu.transport.pyamqp:Transport',
'librabbitmq': 'kombu.transport.librabbitmq:Transport',
'memory': 'kombu.transport.memory:Transport',
'redis': 'kombu.transport.redis:Transport',
'rediss': 'kombu.transport.redis:Transport',
'SQS': 'kombu.transport.SQS:Transport',
'sqs': 'kombu.transport.SQS:Transport',
'mongodb': 'kombu.transport.mongodb:Transport',
'zookeeper': 'kombu.transport.zookeeper:Transport',
'sqlalchemy': 'kombu.transport.sqlalchemy:Transport',
'sqla': 'kombu.transport.sqlalchemy:Transport',
'SLMQ': 'kombu.transport.SLMQ.Transport',
'slmq': 'kombu.transport.SLMQ.Transport',
'filesystem': 'kombu.transport.filesystem:Transport',
'qpid': 'kombu.transport.qpid:Transport',
'sentinel': 'kombu.transport.redis:SentinelTransport',
'consul': 'kombu.transport.consul:Transport',
'etcd': 'kombu.transport.etcd:Transport',
'azurestoragequeues': 'kombu.transport.azurestoragequeues:Transport',
'azureservicebus': 'kombu.transport.azureservicebus:Transport',
'pyro': 'kombu.transport.pyro:Transport'
}
_transport_cache = {}
def resolve_transport(transport=None):
"""Get transport by name.
Arguments:
transport (Union[str, type]): This can be either
an actual transport class, or the fully qualified
path to a transport class, or the alias of a transport.
"""
if isinstance(transport, str):
try:
transport = TRANSPORT_ALIASES[transport]
except KeyError:
if '.' not in transport and ':' not in transport:
from kombu.utils.text import fmatch_best
alt = fmatch_best(transport, TRANSPORT_ALIASES)
if alt:
raise KeyError(
'No such transport: {}. Did you mean {}?'.format(
transport, alt))
raise KeyError(f'No such transport: {transport}')
else:
if callable(transport):
transport = transport()
return symbol_by_name(transport)
return transport
def get_transport_cls(transport=None):
"""Get transport class by name.
The transport string is the full path to a transport class, e.g.::
"kombu.transport.pyamqp:Transport"
If the name does not include `"."` (is not fully qualified),
the alias table will be consulted.
"""
if transport not in _transport_cache:
_transport_cache[transport] = resolve_transport(transport)
return _transport_cache[transport]
|
from homeassistant.components import config
from homeassistant.const import EVENT_COMPONENT_LOADED
from homeassistant.setup import ATTR_COMPONENT, async_setup_component
from tests.async_mock import patch
from tests.common import mock_component
async def test_config_setup(hass, loop):
"""Test it sets up hassbian."""
await async_setup_component(hass, "config", {})
assert "config" in hass.config.components
async def test_load_on_demand_already_loaded(hass, aiohttp_client):
"""Test getting suites."""
mock_component(hass, "zwave")
with patch.object(config, "SECTIONS", []), patch.object(
config, "ON_DEMAND", ["zwave"]
), patch(
"homeassistant.components.config.zwave.async_setup", return_value=True
) as stp:
await async_setup_component(hass, "config", {})
await hass.async_block_till_done()
assert stp.called
async def test_load_on_demand_on_load(hass, aiohttp_client):
"""Test getting suites."""
with patch.object(config, "SECTIONS", []), patch.object(
config, "ON_DEMAND", ["zwave"]
):
await async_setup_component(hass, "config", {})
assert "config.zwave" not in hass.config.components
with patch(
"homeassistant.components.config.zwave.async_setup", return_value=True
) as stp:
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: "zwave"})
await hass.async_block_till_done()
assert stp.called
|
import logging
from pyotgw import vars as gw_vars
from homeassistant.components.climate import ENTITY_ID_FORMAT, ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
PRESET_AWAY,
PRESET_NONE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_ID,
PRECISION_HALVES,
PRECISION_TENTHS,
PRECISION_WHOLE,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import async_generate_entity_id
from . import DOMAIN
from .const import CONF_FLOOR_TEMP, CONF_PRECISION, DATA_GATEWAYS, DATA_OPENTHERM_GW
_LOGGER = logging.getLogger(__name__)
DEFAULT_FLOOR_TEMP = False
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up an OpenTherm Gateway climate entity."""
ents = []
ents.append(
OpenThermClimate(
hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]],
config_entry.options,
)
)
async_add_entities(ents)
class OpenThermClimate(ClimateEntity):
"""Representation of a climate device."""
def __init__(self, gw_dev, options):
"""Initialize the device."""
self._gateway = gw_dev
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, gw_dev.gw_id, hass=gw_dev.hass
)
self.friendly_name = gw_dev.name
self.floor_temp = options.get(CONF_FLOOR_TEMP, DEFAULT_FLOOR_TEMP)
self.temp_precision = options.get(CONF_PRECISION)
self._available = False
self._current_operation = None
self._current_temperature = None
self._hvac_mode = HVAC_MODE_HEAT
self._new_target_temperature = None
self._target_temperature = None
self._away_mode_a = None
self._away_mode_b = None
self._away_state_a = False
self._away_state_b = False
self._unsub_options = None
self._unsub_updates = None
@callback
def update_options(self, entry):
"""Update climate entity options."""
self.floor_temp = entry.options[CONF_FLOOR_TEMP]
self.temp_precision = entry.options[CONF_PRECISION]
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Connect to the OpenTherm Gateway device."""
_LOGGER.debug("Added OpenTherm Gateway climate device %s", self.friendly_name)
self._unsub_updates = async_dispatcher_connect(
self.hass, self._gateway.update_signal, self.receive_report
)
self._unsub_options = async_dispatcher_connect(
self.hass, self._gateway.options_update_signal, self.update_options
)
async def async_will_remove_from_hass(self):
"""Unsubscribe from updates from the component."""
_LOGGER.debug("Removing OpenTherm Gateway climate %s", self.friendly_name)
self._unsub_options()
self._unsub_updates()
@callback
def receive_report(self, status):
"""Receive and handle a new report from the Gateway."""
self._available = bool(status)
ch_active = status.get(gw_vars.DATA_SLAVE_CH_ACTIVE)
flame_on = status.get(gw_vars.DATA_SLAVE_FLAME_ON)
cooling_active = status.get(gw_vars.DATA_SLAVE_COOLING_ACTIVE)
if ch_active and flame_on:
self._current_operation = CURRENT_HVAC_HEAT
self._hvac_mode = HVAC_MODE_HEAT
elif cooling_active:
self._current_operation = CURRENT_HVAC_COOL
self._hvac_mode = HVAC_MODE_COOL
else:
self._current_operation = CURRENT_HVAC_IDLE
self._current_temperature = status.get(gw_vars.DATA_ROOM_TEMP)
temp_upd = status.get(gw_vars.DATA_ROOM_SETPOINT)
if self._target_temperature != temp_upd:
self._new_target_temperature = None
self._target_temperature = temp_upd
# GPIO mode 5: 0 == Away
# GPIO mode 6: 1 == Away
gpio_a_state = status.get(gw_vars.OTGW_GPIO_A)
if gpio_a_state == 5:
self._away_mode_a = 0
elif gpio_a_state == 6:
self._away_mode_a = 1
else:
self._away_mode_a = None
gpio_b_state = status.get(gw_vars.OTGW_GPIO_B)
if gpio_b_state == 5:
self._away_mode_b = 0
elif gpio_b_state == 6:
self._away_mode_b = 1
else:
self._away_mode_b = None
if self._away_mode_a is not None:
self._away_state_a = (
status.get(gw_vars.OTGW_GPIO_A_STATE) == self._away_mode_a
)
if self._away_mode_b is not None:
self._away_state_b = (
status.get(gw_vars.OTGW_GPIO_B_STATE) == self._away_mode_b
)
self.async_write_ha_state()
@property
def available(self):
"""Return availability of the sensor."""
return self._available
@property
def name(self):
"""Return the friendly name."""
return self.friendly_name
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {(DOMAIN, self._gateway.gw_id)},
"name": self._gateway.name,
"manufacturer": "Schelte Bron",
"model": "OpenTherm Gateway",
"sw_version": self._gateway.gw_version,
}
@property
def unique_id(self):
"""Return a unique ID."""
return self._gateway.gw_id
@property
def precision(self):
"""Return the precision of the system."""
if self.temp_precision is not None and self.temp_precision != 0:
return self.temp_precision
if self.hass.config.units.temperature_unit == TEMP_CELSIUS:
return PRECISION_HALVES
return PRECISION_WHOLE
@property
def should_poll(self):
"""Disable polling for this entity."""
return False
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def hvac_action(self):
"""Return current HVAC operation."""
return self._current_operation
@property
def hvac_mode(self):
"""Return current HVAC mode."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return available HVAC modes."""
return []
def set_hvac_mode(self, hvac_mode):
"""Set the HVAC mode."""
_LOGGER.warning("Changing HVAC mode is not supported")
@property
def current_temperature(self):
"""Return the current temperature."""
if self._current_temperature is None:
return
if self.floor_temp is True:
if self.precision == PRECISION_HALVES:
return int(2 * self._current_temperature) / 2
if self.precision == PRECISION_TENTHS:
return int(10 * self._current_temperature) / 10
return int(self._current_temperature)
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._new_target_temperature or self._target_temperature
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return self.precision
@property
def preset_mode(self):
"""Return current preset mode."""
if self._away_state_a or self._away_state_b:
return PRESET_AWAY
return PRESET_NONE
@property
def preset_modes(self):
"""Available preset modes to set."""
return []
def set_preset_mode(self, preset_mode):
"""Set the preset mode."""
_LOGGER.warning("Changing preset mode is not supported")
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if ATTR_TEMPERATURE in kwargs:
temp = float(kwargs[ATTR_TEMPERATURE])
if temp == self.target_temperature:
return
self._new_target_temperature = await self._gateway.gateway.set_target_temp(
temp
)
self.async_write_ha_state()
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def min_temp(self):
"""Return the minimum temperature."""
return 1
@property
def max_temp(self):
"""Return the maximum temperature."""
return 30
|
from aiohomekit.model.characteristics import (
CharacteristicsTypes,
InUseValues,
IsConfiguredValues,
)
from aiohomekit.model.services import ServicesTypes
from tests.components.homekit_controller.common import setup_test_component
def create_switch_service(accessory):
"""Define outlet characteristics."""
service = accessory.add_service(ServicesTypes.OUTLET)
on_char = service.add_char(CharacteristicsTypes.ON)
on_char.value = False
outlet_in_use = service.add_char(CharacteristicsTypes.OUTLET_IN_USE)
outlet_in_use.value = False
def create_valve_service(accessory):
"""Define valve characteristics."""
service = accessory.add_service(ServicesTypes.VALVE)
on_char = service.add_char(CharacteristicsTypes.ACTIVE)
on_char.value = False
in_use = service.add_char(CharacteristicsTypes.IN_USE)
in_use.value = InUseValues.IN_USE
configured = service.add_char(CharacteristicsTypes.IS_CONFIGURED)
configured.value = IsConfiguredValues.CONFIGURED
remaining = service.add_char(CharacteristicsTypes.REMAINING_DURATION)
remaining.value = 99
async def test_switch_change_outlet_state(hass, utcnow):
"""Test that we can turn a HomeKit outlet on and off again."""
helper = await setup_test_component(hass, create_switch_service)
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True
)
assert helper.characteristics[("outlet", "on")].value == 1
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "switch.testdevice"}, blocking=True
)
assert helper.characteristics[("outlet", "on")].value == 0
async def test_switch_read_outlet_state(hass, utcnow):
"""Test that we can read the state of a HomeKit outlet accessory."""
helper = await setup_test_component(hass, create_switch_service)
# Initial state is that the switch is off and the outlet isn't in use
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "off"
assert switch_1.attributes["outlet_in_use"] is False
# Simulate that someone switched on the device in the real world not via HA
helper.characteristics[("outlet", "on")].set_value(True)
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "on"
assert switch_1.attributes["outlet_in_use"] is False
# Simulate that device switched off in the real world not via HA
helper.characteristics[("outlet", "on")].set_value(False)
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "off"
# Simulate that someone plugged something into the device
helper.characteristics[("outlet", "outlet-in-use")].value = True
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "off"
assert switch_1.attributes["outlet_in_use"] is True
async def test_valve_change_active_state(hass, utcnow):
"""Test that we can turn a valve on and off again."""
helper = await setup_test_component(hass, create_valve_service)
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True
)
assert helper.characteristics[("valve", "active")].value == 1
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "switch.testdevice"}, blocking=True
)
assert helper.characteristics[("valve", "active")].value == 0
async def test_valve_read_state(hass, utcnow):
"""Test that we can read the state of a valve accessory."""
helper = await setup_test_component(hass, create_valve_service)
# Initial state is that the switch is off and the outlet isn't in use
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "off"
assert switch_1.attributes["in_use"] is True
assert switch_1.attributes["is_configured"] is True
assert switch_1.attributes["remaining_duration"] == 99
# Simulate that someone switched on the device in the real world not via HA
helper.characteristics[("valve", "active")].set_value(True)
switch_1 = await helper.poll_and_get_state()
assert switch_1.state == "on"
# Simulate that someone configured the device in the real world not via HA
helper.characteristics[
("valve", "is-configured")
].value = IsConfiguredValues.NOT_CONFIGURED
switch_1 = await helper.poll_and_get_state()
assert switch_1.attributes["is_configured"] is False
# Simulate that someone using the device in the real world not via HA
helper.characteristics[("valve", "in-use")].value = InUseValues.NOT_IN_USE
switch_1 = await helper.poll_and_get_state()
assert switch_1.attributes["in_use"] is False
|
from __future__ import absolute_import
import os
try:
import cPickle as _pickle
except ImportError:
import pickle as _pickle
from gensim import utils
from gensim.models.doc2vec import Doc2Vec
from gensim.models.word2vec import Word2Vec
from gensim.models.fasttext import FastText
from gensim.models import KeyedVectors
_NOANNOY = ImportError("Annoy not installed. To use the Annoy indexer, please run `pip install annoy`.")
class AnnoyIndexer():
"""This class allows the use of `Annoy <https://github.com/spotify/annoy>`_ for fast (approximate)
vector retrieval in `most_similar()` calls of
:class:`~gensim.models.word2vec.Word2Vec`, :class:`~gensim.models.doc2vec.Doc2Vec`,
:class:`~gensim.models.fasttext.FastText` and :class:`~gensim.models.keyedvectors.Word2VecKeyedVectors` models.
"""
def __init__(self, model=None, num_trees=None):
"""
Parameters
----------
model : trained model, optional
Use vectors from this model as the source for the index.
num_trees : int, optional
Number of trees for Annoy indexer.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.similarities.annoy import AnnoyIndexer
>>> from gensim.models import Word2Vec
>>>
>>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']]
>>> model = Word2Vec(sentences, min_count=1, seed=1)
>>>
>>> indexer = AnnoyIndexer(model, 2)
>>> model.most_similar("cat", topn=2, indexer=indexer)
[('cat', 1.0), ('dog', 0.32011348009109497)]
"""
self.index = None
self.labels = None
self.model = model
self.num_trees = num_trees
if model and num_trees:
# Extract the KeyedVectors object from whatever model we were given.
if isinstance(self.model, Doc2Vec):
kv = self.model.dv
elif isinstance(self.model, (Word2Vec, FastText)):
kv = self.model.wv
elif isinstance(self.model, (KeyedVectors,)):
kv = self.model
else:
raise ValueError("Only a Word2Vec, Doc2Vec, FastText or KeyedVectors instance can be used")
self._build_from_model(kv.get_normed_vectors(), kv.index_to_key, kv.vector_size)
def save(self, fname, protocol=2):
"""Save AnnoyIndexer instance to disk.
Parameters
----------
fname : str
Path to output. Save will produce 2 files:
`fname`: Annoy index itself.
`fname.dict`: Index metadata.
protocol : int, optional
Protocol for pickle.
Notes
-----
This method saves **only the index**. The trained model isn't preserved.
"""
self.index.save(fname)
d = {'f': self.model.vector_size, 'num_trees': self.num_trees, 'labels': self.labels}
with utils.open(fname + '.dict', 'wb') as fout:
_pickle.dump(d, fout, protocol=protocol)
def load(self, fname):
"""Load an AnnoyIndexer instance from disk.
Parameters
----------
fname : str
The path as previously used by ``save()``.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.similarities.index import AnnoyIndexer
>>> from gensim.models import Word2Vec
>>> from tempfile import mkstemp
>>>
>>> sentences = [['cute', 'cat', 'say', 'meow'], ['cute', 'dog', 'say', 'woof']]
>>> model = Word2Vec(sentences, min_count=1, seed=1, epochs=10)
>>>
>>> indexer = AnnoyIndexer(model, 2)
>>> _, temp_fn = mkstemp()
>>> indexer.save(temp_fn)
>>>
>>> new_indexer = AnnoyIndexer()
>>> new_indexer.load(temp_fn)
>>> new_indexer.model = model
"""
fname_dict = fname + '.dict'
if not (os.path.exists(fname) and os.path.exists(fname_dict)):
raise IOError(
f"Can't find index files '{fname}' and '{fname_dict}' - unable to restore AnnoyIndexer state."
)
try:
from annoy import AnnoyIndex
except ImportError:
raise _NOANNOY
with utils.open(fname_dict, 'rb') as f:
d = _pickle.loads(f.read())
self.num_trees = d['num_trees']
self.index = AnnoyIndex(d['f'], metric='angular')
self.index.load(fname)
self.labels = d['labels']
def _build_from_model(self, vectors, labels, num_features):
try:
from annoy import AnnoyIndex
except ImportError:
raise _NOANNOY
index = AnnoyIndex(num_features, metric='angular')
for vector_num, vector in enumerate(vectors):
index.add_item(vector_num, vector)
index.build(self.num_trees)
self.index = index
self.labels = labels
def most_similar(self, vector, num_neighbors):
"""Find `num_neighbors` most similar items.
Parameters
----------
vector : numpy.array
Vector for word/document.
num_neighbors : int
Number of most similar items
Returns
-------
list of (str, float)
List of most similar items in format [(`item`, `cosine_distance`), ... ]
"""
ids, distances = self.index.get_nns_by_vector(
vector, num_neighbors, include_distances=True)
return [(self.labels[ids[i]], 1 - distances[i] / 2) for i in range(len(ids))]
|
from datetime import timedelta
import logging
from typing import Any, Dict, Optional
import aiohttp
from geniushubclient import GeniusHub
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
CONF_HOST,
CONF_MAC,
CONF_PASSWORD,
CONF_TOKEN,
CONF_USERNAME,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service import verify_domain_control
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DOMAIN = "geniushub"
# temperature is repeated here, as it gives access to high-precision temps
GH_ZONE_ATTRS = ["mode", "temperature", "type", "occupied", "override"]
GH_DEVICE_ATTRS = {
"luminance": "luminance",
"measuredTemperature": "measured_temperature",
"occupancyTrigger": "occupancy_trigger",
"setback": "setback",
"setTemperature": "set_temperature",
"wakeupInterval": "wakeup_interval",
}
SCAN_INTERVAL = timedelta(seconds=60)
MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$"
V1_API_SCHEMA = vol.Schema(
{
vol.Required(CONF_TOKEN): cv.string,
vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
}
)
V3_API_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Any(V3_API_SCHEMA, V1_API_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
ATTR_ZONE_MODE = "mode"
ATTR_DURATION = "duration"
SVC_SET_ZONE_MODE = "set_zone_mode"
SVC_SET_ZONE_OVERRIDE = "set_zone_override"
SET_ZONE_MODE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_ZONE_MODE): vol.In(["off", "timer", "footprint"]),
}
)
SET_ZONE_OVERRIDE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_TEMPERATURE): vol.All(
vol.Coerce(float), vol.Range(min=4, max=28)
),
vol.Optional(ATTR_DURATION): vol.All(
cv.time_period,
vol.Range(min=timedelta(minutes=5), max=timedelta(days=1)),
),
}
)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Create a Genius Hub system."""
hass.data[DOMAIN] = {}
kwargs = dict(config[DOMAIN])
if CONF_HOST in kwargs:
args = (kwargs.pop(CONF_HOST),)
else:
args = (kwargs.pop(CONF_TOKEN),)
hub_uid = kwargs.pop(CONF_MAC, None)
client = GeniusHub(*args, **kwargs, session=async_get_clientsession(hass))
broker = hass.data[DOMAIN]["broker"] = GeniusBroker(hass, client, hub_uid)
try:
await client.update()
except aiohttp.ClientResponseError as err:
_LOGGER.error("Setup failed, check your configuration, %s", err)
return False
broker.make_debug_log_entries()
async_track_time_interval(hass, broker.async_update, SCAN_INTERVAL)
for platform in ["climate", "water_heater", "sensor", "binary_sensor", "switch"]:
hass.async_create_task(async_load_platform(hass, platform, DOMAIN, {}, config))
setup_service_functions(hass, broker)
return True
@callback
def setup_service_functions(hass: HomeAssistantType, broker):
"""Set up the service functions."""
@verify_domain_control(hass, DOMAIN)
async def set_zone_mode(call) -> None:
"""Set the system mode."""
entity_id = call.data[ATTR_ENTITY_ID]
registry = await hass.helpers.entity_registry.async_get_registry()
registry_entry = registry.async_get(entity_id)
if registry_entry is None or registry_entry.platform != DOMAIN:
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
if registry_entry.domain != "climate":
raise ValueError(f"'{entity_id}' is not an {DOMAIN} zone")
payload = {
"unique_id": registry_entry.unique_id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
hass.services.async_register(
DOMAIN, SVC_SET_ZONE_MODE, set_zone_mode, schema=SET_ZONE_MODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_mode, schema=SET_ZONE_OVERRIDE_SCHEMA
)
class GeniusBroker:
"""Container for geniushub client and data."""
def __init__(self, hass, client, hub_uid) -> None:
"""Initialize the geniushub client."""
self.hass = hass
self.client = client
self._hub_uid = hub_uid
self._connect_error = False
@property
def hub_uid(self) -> int:
"""Return the Hub UID (MAC address)."""
# pylint: disable=no-member
return self._hub_uid if self._hub_uid is not None else self.client.uid
async def async_update(self, now, **kwargs) -> None:
"""Update the geniushub client's data."""
try:
await self.client.update()
if self._connect_error:
self._connect_error = False
_LOGGER.info("Connection to geniushub re-established")
except (
aiohttp.ClientResponseError,
aiohttp.client_exceptions.ClientConnectorError,
) as err:
if not self._connect_error:
self._connect_error = True
_LOGGER.error(
"Connection to geniushub failed (unable to update), message is: %s",
err,
)
return
self.make_debug_log_entries()
async_dispatcher_send(self.hass, DOMAIN)
def make_debug_log_entries(self) -> None:
"""Make any useful debug log entries."""
# pylint: disable=protected-access
_LOGGER.debug(
"Raw JSON: \n\nclient._zones = %s \n\nclient._devices = %s",
self.client._zones,
self.client._devices,
)
class GeniusEntity(Entity):
"""Base for all Genius Hub entities."""
def __init__(self) -> None:
"""Initialize the entity."""
self._unique_id = self._name = None
async def async_added_to_hass(self) -> None:
"""Set up a listener when this entity is added to HA."""
self.async_on_remove(async_dispatcher_connect(self.hass, DOMAIN, self._refresh))
async def _refresh(self, payload: Optional[dict] = None) -> None:
"""Process any signals."""
self.async_schedule_update_ha_state(force_refresh=True)
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._unique_id
@property
def name(self) -> str:
"""Return the name of the geniushub entity."""
return self._name
@property
def should_poll(self) -> bool:
"""Return False as geniushub entities should not be polled."""
return False
class GeniusDevice(GeniusEntity):
"""Base for all Genius Hub devices."""
def __init__(self, broker, device) -> None:
"""Initialize the Device."""
super().__init__()
self._device = device
self._unique_id = f"{broker.hub_uid}_device_{device.id}"
self._last_comms = self._state_attr = None
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the device state attributes."""
attrs = {}
attrs["assigned_zone"] = self._device.data["assignedZones"][0]["name"]
if self._last_comms:
attrs["last_comms"] = self._last_comms.isoformat()
state = dict(self._device.data["state"])
if "_state" in self._device.data: # only via v3 API
state.update(self._device.data["_state"])
attrs["state"] = {
GH_DEVICE_ATTRS[k]: v for k, v in state.items() if k in GH_DEVICE_ATTRS
}
return attrs
async def async_update(self) -> None:
"""Update an entity's state data."""
if "_state" in self._device.data: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(
self._device.data["_state"]["lastComms"]
)
class GeniusZone(GeniusEntity):
"""Base for all Genius Hub zones."""
def __init__(self, broker, zone) -> None:
"""Initialize the Zone."""
super().__init__()
self._zone = zone
self._unique_id = f"{broker.hub_uid}_zone_{zone.id}"
async def _refresh(self, payload: Optional[dict] = None) -> None:
"""Process any signals."""
if payload is None:
self.async_schedule_update_ha_state(force_refresh=True)
return
if payload["unique_id"] != self._unique_id:
return
if payload["service"] == SVC_SET_ZONE_OVERRIDE:
temperature = round(payload["data"][ATTR_TEMPERATURE] * 10) / 10
duration = payload["data"].get(ATTR_DURATION, timedelta(hours=1))
await self._zone.set_override(temperature, int(duration.total_seconds()))
return
mode = payload["data"][ATTR_ZONE_MODE]
# pylint: disable=protected-access
if mode == "footprint" and not self._zone._has_pir:
raise TypeError(
f"'{self.entity_id}' can not support footprint mode (it has no PIR)"
)
await self._zone.set_mode(mode)
@property
def name(self) -> str:
"""Return the name of the climate device."""
return self._zone.name
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the device state attributes."""
status = {k: v for k, v in self._zone.data.items() if k in GH_ZONE_ATTRS}
return {"status": status}
class GeniusHeatingZone(GeniusZone):
"""Base for Genius Heating Zones."""
def __init__(self, broker, zone) -> None:
"""Initialize the Zone."""
super().__init__(broker, zone)
self._max_temp = self._min_temp = self._supported_features = None
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return self._zone.data.get("temperature")
@property
def target_temperature(self) -> float:
"""Return the temperature we try to reach."""
return self._zone.data["setpoint"]
@property
def min_temp(self) -> float:
"""Return max valid temperature that can be set."""
return self._min_temp
@property
def max_temp(self) -> float:
"""Return max valid temperature that can be set."""
return self._max_temp
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def supported_features(self) -> int:
"""Return the bitmask of supported features."""
return self._supported_features
async def async_set_temperature(self, **kwargs) -> None:
"""Set a new target temperature for this zone."""
await self._zone.set_override(
kwargs[ATTR_TEMPERATURE], kwargs.get(ATTR_DURATION, 3600)
)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from udp import UDPCollector
##########################################################################
class TestUDPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('UDPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = UDPCollector(config, None)
def test_import(self):
self.assertTrue(UDPCollector)
@patch('os.access', Mock(return_value=True))
@patch('__builtin__.open')
@patch.object(Collector, 'publish')
def test_should_open_proc_net_snmp(self, publish_mock, open_mock):
UDPCollector.PROC = ['/proc/net/snmp']
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/snmp')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
self.setUp([])
UDPCollector.PROC = [
self.getFixturePath('proc_net_snmp_1'),
]
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
UDPCollector.PROC = [
self.getFixturePath('proc_net_snmp_2'),
]
self.collector.collect()
metrics = {
'InDatagrams': 352320636.0,
'InErrors': 5.0,
'NoPorts': 449.0,
'OutDatagrams': 352353358.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import os.path as op
import numpy as np
import pytest
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_equal)
from mne import io, Epochs, read_events, pick_types
from mne.decoding.csp import CSP, _ajd_pham, SPoC
from mne.utils import requires_sklearn
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
# if stop is too small pca may fail in some cases, but we're okay on this file
start, stop = 0, 8
def simulate_data(target, n_trials=100, n_channels=10, random_state=42):
"""Simulate data according to an instantaneous mixin model.
Data are simulated in the statistical source space, where one source is
modulated according to a target variable, before being mixed with a
random mixing matrix.
"""
rs = np.random.RandomState(random_state)
# generate a orthogonal mixin matrix
mixing_mat = np.linalg.svd(rs.randn(n_channels, n_channels))[0]
S = rs.randn(n_trials, n_channels, 50)
S[:, 0] *= np.atleast_2d(np.sqrt(target)).T
S[:, 1:] *= 0.01 # less noise
X = np.dot(mixing_mat, S).transpose((1, 0, 2))
return X, mixing_mat
def deterministic_toy_data(classes=('class_a', 'class_b')):
"""Generate a small deterministic toy data set.
Four independent sources are modulated by the target class and mixed
into signal space.
"""
sources_a = np.array([[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]],
dtype=float) * 2 - 1
sources_b = np.array([[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]],
dtype=float) * 2 - 1
sources_a[0, :] *= 1
sources_a[1, :] *= 2
sources_b[2, :] *= 3
sources_b[3, :] *= 4
mixing = np.array([[1.0, 0.8, 0.6, 0.4],
[0.8, 1.0, 0.8, 0.6],
[0.6, 0.8, 1.0, 0.8],
[0.4, 0.6, 0.8, 1.0]])
x_class_a = mixing @ sources_a
x_class_b = mixing @ sources_b
x = np.stack([x_class_a, x_class_b])
y = np.array(classes)
return x, y
@pytest.mark.slowtest
def test_csp():
"""Test Common Spatial Patterns algorithm on epochs."""
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[2:12:3] # subselect channels -> disable proj!
raw.add_proj([], remove_existing=True)
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True, proj=False)
epochs_data = epochs.get_data()
n_channels = epochs_data.shape[1]
y = epochs.events[:, -1]
# Init
pytest.raises(ValueError, CSP, n_components='foo', norm_trace=False)
for reg in ['foo', -0.1, 1.1]:
csp = CSP(reg=reg, norm_trace=False)
pytest.raises(ValueError, csp.fit, epochs_data, epochs.events[:, -1])
for reg in ['oas', 'ledoit_wolf', 0, 0.5, 1.]:
CSP(reg=reg, norm_trace=False)
for cov_est in ['foo', None]:
pytest.raises(ValueError, CSP, cov_est=cov_est, norm_trace=False)
with pytest.raises(TypeError, match='instance of bool'):
CSP(norm_trace='foo')
for cov_est in ['concat', 'epoch']:
CSP(cov_est=cov_est, norm_trace=False)
n_components = 3
# Fit
for norm_trace in [True, False]:
csp = CSP(n_components=n_components, norm_trace=norm_trace)
csp.fit(epochs_data, epochs.events[:, -1])
assert_equal(len(csp.mean_), n_components)
assert_equal(len(csp.std_), n_components)
# Transform
X = csp.fit_transform(epochs_data, y)
sources = csp.transform(epochs_data)
assert (sources.shape[1] == n_components)
assert (csp.filters_.shape == (n_channels, n_channels))
assert (csp.patterns_.shape == (n_channels, n_channels))
assert_array_almost_equal(sources, X)
# Test data exception
pytest.raises(ValueError, csp.fit, epochs_data,
np.zeros_like(epochs.events))
pytest.raises(ValueError, csp.fit, epochs, y)
pytest.raises(ValueError, csp.transform, epochs)
# Test plots
epochs.pick_types(meg='mag')
cmap = ('RdBu', True)
components = np.arange(n_components)
for plot in (csp.plot_patterns, csp.plot_filters):
plot(epochs.info, components=components, res=12, show=False, cmap=cmap)
# Test with more than 2 classes
epochs = Epochs(raw, events, tmin=tmin, tmax=tmax, picks=picks,
event_id=dict(aud_l=1, aud_r=2, vis_l=3, vis_r=4),
baseline=(None, 0), proj=False, preload=True)
epochs_data = epochs.get_data()
n_channels = epochs_data.shape[1]
n_channels = epochs_data.shape[1]
for cov_est in ['concat', 'epoch']:
csp = CSP(n_components=n_components, cov_est=cov_est, norm_trace=False)
csp.fit(epochs_data, epochs.events[:, 2]).transform(epochs_data)
assert_equal(len(csp._classes), 4)
assert_array_equal(csp.filters_.shape, [n_channels, n_channels])
assert_array_equal(csp.patterns_.shape, [n_channels, n_channels])
# Test average power transform
n_components = 2
assert (csp.transform_into == 'average_power')
feature_shape = [len(epochs_data), n_components]
X_trans = dict()
for log in (None, True, False):
csp = CSP(n_components=n_components, log=log, norm_trace=False)
assert (csp.log is log)
Xt = csp.fit_transform(epochs_data, epochs.events[:, 2])
assert_array_equal(Xt.shape, feature_shape)
X_trans[str(log)] = Xt
# log=None => log=True
assert_array_almost_equal(X_trans['None'], X_trans['True'])
# Different normalization return different transform
assert (np.sum((X_trans['True'] - X_trans['False']) ** 2) > 1.)
# Check wrong inputs
pytest.raises(ValueError, CSP, transform_into='average_power', log='foo')
# Test csp space transform
csp = CSP(transform_into='csp_space', norm_trace=False)
assert (csp.transform_into == 'csp_space')
for log in ('foo', True, False):
pytest.raises(ValueError, CSP, transform_into='csp_space', log=log,
norm_trace=False)
n_components = 2
csp = CSP(n_components=n_components, transform_into='csp_space',
norm_trace=False)
Xt = csp.fit(epochs_data, epochs.events[:, 2]).transform(epochs_data)
feature_shape = [len(epochs_data), n_components, epochs_data.shape[2]]
assert_array_equal(Xt.shape, feature_shape)
# Check mixing matrix on simulated data
y = np.array([100] * 50 + [1] * 50)
X, A = simulate_data(y)
for cov_est in ['concat', 'epoch']:
# fit csp
csp = CSP(n_components=1, cov_est=cov_est, norm_trace=False)
csp.fit(X, y)
# check the first pattern match the mixing matrix
# the sign might change
corr = np.abs(np.corrcoef(csp.patterns_[0, :].T, A[:, 0])[0, 1])
assert np.abs(corr) > 0.99
# check output
out = csp.transform(X)
corr = np.abs(np.corrcoef(out[:, 0], y)[0, 1])
assert np.abs(corr) > 0.95
@requires_sklearn
def test_regularized_csp():
"""Test Common Spatial Patterns algorithm using regularized covariance."""
raw = io.read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
n_channels = epochs_data.shape[1]
n_components = 3
reg_cov = [None, 0.05, 'ledoit_wolf', 'oas']
for reg in reg_cov:
csp = CSP(n_components=n_components, reg=reg, norm_trace=False,
rank=None)
csp.fit(epochs_data, epochs.events[:, -1])
y = epochs.events[:, -1]
X = csp.fit_transform(epochs_data, y)
assert (csp.filters_.shape == (n_channels, n_channels))
assert (csp.patterns_.shape == (n_channels, n_channels))
assert_array_almost_equal(csp.fit(epochs_data, y).
transform(epochs_data), X)
# test init exception
pytest.raises(ValueError, csp.fit, epochs_data,
np.zeros_like(epochs.events))
pytest.raises(ValueError, csp.fit, epochs, y)
pytest.raises(ValueError, csp.transform, epochs)
csp.n_components = n_components
sources = csp.transform(epochs_data)
assert (sources.shape[1] == n_components)
@requires_sklearn
def test_csp_pipeline():
"""Test if CSP works in a pipeline."""
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
csp = CSP(reg=1, norm_trace=False)
svc = SVC()
pipe = Pipeline([("CSP", csp), ("SVC", svc)])
pipe.set_params(CSP__reg=0.2)
assert (pipe.get_params()["CSP__reg"] == 0.2)
def test_ajd():
"""Test approximate joint diagonalization."""
# The implementation shuold obtain the same
# results as the Matlab implementation by Pham Dinh-Tuan.
# Generate a set of cavariances matrices for test purpose
n_times, n_channels = 10, 3
seed = np.random.RandomState(0)
diags = 2.0 + 0.1 * seed.randn(n_times, n_channels)
A = 2 * seed.rand(n_channels, n_channels) - 1
A /= np.atleast_2d(np.sqrt(np.sum(A ** 2, 1))).T
covmats = np.empty((n_times, n_channels, n_channels))
for i in range(n_times):
covmats[i] = np.dot(np.dot(A, np.diag(diags[i])), A.T)
V, D = _ajd_pham(covmats)
# Results obtained with original matlab implementation
V_matlab = [[-3.507280775058041, -5.498189967306344, 7.720624541198574],
[0.694689013234610, 0.775690358505945, -1.162043086446043],
[-0.592603135588066, -0.598996925696260, 1.009550086271192]]
assert_array_almost_equal(V, V_matlab)
def test_spoc():
"""Test SPoC."""
X = np.random.randn(10, 10, 20)
y = np.random.randn(10)
spoc = SPoC(n_components=4)
spoc.fit(X, y)
Xt = spoc.transform(X)
assert_array_equal(Xt.shape, [10, 4])
spoc = SPoC(n_components=4, transform_into='csp_space')
spoc.fit(X, y)
Xt = spoc.transform(X)
assert_array_equal(Xt.shape, [10, 4, 20])
assert_array_equal(spoc.filters_.shape, [10, 10])
assert_array_equal(spoc.patterns_.shape, [10, 10])
# check y
pytest.raises(ValueError, spoc.fit, X, y * 0)
# Check that doesn't take CSP-spcific input
pytest.raises(TypeError, SPoC, cov_est='epoch')
# Check mixing matrix on simulated data
rs = np.random.RandomState(42)
y = rs.rand(100) * 50 + 1
X, A = simulate_data(y)
# fit spoc
spoc = SPoC(n_components=1)
spoc.fit(X, y)
# check the first patterns match the mixing matrix
corr = np.abs(np.corrcoef(spoc.patterns_[0, :].T, A[:, 0])[0, 1])
assert np.abs(corr) > 0.99
# check output
out = spoc.transform(X)
corr = np.abs(np.corrcoef(out[:, 0], y)[0, 1])
assert np.abs(corr) > 0.85
def test_csp_twoclass_symmetry():
"""Test that CSP is symmetric when swapping classes."""
x, y = deterministic_toy_data(['class_a', 'class_b'])
csp = CSP(norm_trace=False, transform_into='average_power', log=True)
log_power = csp.fit_transform(x, y)
log_power_ratio_ab = log_power[0] - log_power[1]
x, y = deterministic_toy_data(['class_b', 'class_a'])
csp = CSP(norm_trace=False, transform_into='average_power', log=True)
log_power = csp.fit_transform(x, y)
log_power_ratio_ba = log_power[0] - log_power[1]
assert_array_almost_equal(log_power_ratio_ab,
log_power_ratio_ba)
def test_csp_component_ordering():
"""Test that CSP component ordering works as expected."""
x, y = deterministic_toy_data(['class_a', 'class_b'])
pytest.raises(ValueError, CSP, component_order='invalid')
# component_order='alternate' only works with two classes
csp = CSP(component_order='alternate')
with pytest.raises(ValueError):
csp.fit(np.zeros((3, 0, 0)), ['a', 'b', 'c'])
p_alt = CSP(component_order='alternate').fit(x, y).patterns_
p_mut = CSP(component_order='mutual_info').fit(x, y).patterns_
# This permutation of p_alt and p_mut is explained by the particular
# eigenvalues of the toy data: [0.06, 0.1, 0.5, 0.8].
# p_alt arranges them to [0.8, 0.06, 0.5, 0.1]
# p_mut arranges them to [0.06, 0.1, 0.8, 0.5]
assert_array_almost_equal(p_alt, p_mut[[2, 0, 3, 1]])
|
import keras
import tensorflow as tf
from matchzoo.engine import hyper_spaces
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
class DUET(BaseModel):
"""
DUET Model.
Examples:
>>> model = DUET()
>>> model.params['embedding_input_dim'] = 1000
>>> model.params['embedding_output_dim'] = 300
>>> model.params['lm_filters'] = 32
>>> model.params['lm_hidden_sizes'] = [64, 32]
>>> model.params['dropout_rate'] = 0.5
>>> model.params['dm_filters'] = 32
>>> model.params['dm_kernel_size'] = 3
>>> model.params['dm_d_mpool'] = 4
>>> model.params['dm_hidden_sizes'] = [64, 32]
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls):
"""Get default parameters."""
params = super().get_default_params(with_embedding=True)
params.add(Param(name='lm_filters', value=32,
desc="Filter size of 1D convolution layer in "
"the local model."))
params.add(Param(name='lm_hidden_sizes', value=[32],
desc="A list of hidden size of the MLP layer "
"in the local model."))
params.add(Param(name='dm_filters', value=32,
desc="Filter size of 1D convolution layer in "
"the distributed model."))
params.add(Param(name='dm_kernel_size', value=3,
desc="Kernel size of 1D convolution layer in "
"the distributed model."))
params.add(Param(name='dm_q_hidden_size', value=32,
desc="Hidden size of the MLP layer for the "
"left text in the distributed model."))
params.add(Param(name='dm_d_mpool', value=3,
desc="Max pooling size for the right text in "
"the distributed model."))
params.add(Param(name='dm_hidden_sizes', value=[32],
desc="A list of hidden size of the MLP layer "
"in the distributed model."))
params.add(Param(name='padding', value='same',
desc="The padding mode in the convolution "
"layer. It should be one of `same`, "
"`valid`, ""and `causal`."))
params.add(Param(name='activation_func', value='relu',
desc="Activation function in the convolution"
" layer."))
params.add(Param(
name='dropout_rate', value=0.5,
hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
q=0.02),
desc="The dropout rate."))
return params
def build(self):
"""Build model."""
query, doc = self._make_inputs()
embedding = self._make_embedding_layer()
q_embed = embedding(query)
d_embed = embedding(doc)
lm_xor = keras.layers.Lambda(self._xor_match)([query, doc])
lm_conv = keras.layers.Conv1D(
self._params['lm_filters'],
self._params['input_shapes'][1][0],
padding=self._params['padding'],
activation=self._params['activation_func']
)(lm_xor)
lm_conv = keras.layers.Dropout(self._params['dropout_rate'])(
lm_conv)
lm_feat = keras.layers.Reshape((-1,))(lm_conv)
for hidden_size in self._params['lm_hidden_sizes']:
lm_feat = keras.layers.Dense(
hidden_size,
activation=self._params['activation_func']
)(lm_feat)
lm_drop = keras.layers.Dropout(self._params['dropout_rate'])(
lm_feat)
lm_score = keras.layers.Dense(1)(lm_drop)
dm_q_conv = keras.layers.Conv1D(
self._params['dm_filters'],
self._params['dm_kernel_size'],
padding=self._params['padding'],
activation=self._params['activation_func']
)(q_embed)
dm_q_conv = keras.layers.Dropout(self._params['dropout_rate'])(
dm_q_conv)
dm_q_mp = keras.layers.MaxPooling1D(
pool_size=self._params['input_shapes'][0][0])(dm_q_conv)
dm_q_rep = keras.layers.Reshape((-1,))(dm_q_mp)
dm_q_rep = keras.layers.Dense(self._params['dm_q_hidden_size'])(
dm_q_rep)
dm_q_rep = keras.layers.Lambda(lambda x: tf.expand_dims(x, 1))(
dm_q_rep)
dm_d_conv1 = keras.layers.Conv1D(
self._params['dm_filters'],
self._params['dm_kernel_size'],
padding=self._params['padding'],
activation=self._params['activation_func']
)(d_embed)
dm_d_conv1 = keras.layers.Dropout(self._params['dropout_rate'])(
dm_d_conv1)
dm_d_mp = keras.layers.MaxPooling1D(
pool_size=self._params['dm_d_mpool'])(dm_d_conv1)
dm_d_conv2 = keras.layers.Conv1D(
self._params['dm_filters'], 1,
padding=self._params['padding'],
activation=self._params['activation_func']
)(dm_d_mp)
dm_d_conv2 = keras.layers.Dropout(self._params['dropout_rate'])(
dm_d_conv2)
h_dot = keras.layers.Lambda(self._hadamard_dot)([dm_q_rep, dm_d_conv2])
dm_feat = keras.layers.Reshape((-1,))(h_dot)
for hidden_size in self._params['dm_hidden_sizes']:
dm_feat = keras.layers.Dense(hidden_size)(dm_feat)
dm_feat_drop = keras.layers.Dropout(self._params['dropout_rate'])(
dm_feat)
dm_score = keras.layers.Dense(1)(dm_feat_drop)
add = keras.layers.Add()([lm_score, dm_score])
out = self._make_output_layer()(add)
self._backend = keras.Model(inputs=[query, doc], outputs=out)
@classmethod
def _xor_match(cls, x):
t1 = x[0]
t2 = x[1]
t1_shape = t1.get_shape()
t2_shape = t2.get_shape()
t1_expand = tf.stack([t1] * t2_shape[1], 2)
t2_expand = tf.stack([t2] * t1_shape[1], 1)
out_bool = tf.equal(t1_expand, t2_expand)
out = tf.cast(out_bool, tf.float32)
return out
@classmethod
def _hadamard_dot(cls, x):
x1 = x[0]
x2 = x[1]
out = x1 * x2
return out
|
from datetime import timedelta
import pytest
from homeassistant.components import namecheapdns
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.common import async_fire_time_changed
HOST = "test"
DOMAIN = "bla"
PASSWORD = "abcdefgh"
@pytest.fixture
def setup_namecheapdns(hass, aioclient_mock):
"""Fixture that sets up NamecheapDNS."""
aioclient_mock.get(
namecheapdns.UPDATE_URL,
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
text="<interface-response><ErrCount>0</ErrCount></interface-response>",
)
hass.loop.run_until_complete(
async_setup_component(
hass,
namecheapdns.DOMAIN,
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
)
)
async def test_setup(hass, aioclient_mock):
"""Test setup works if update passes."""
aioclient_mock.get(
namecheapdns.UPDATE_URL,
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
text="<interface-response><ErrCount>0</ErrCount></interface-response>",
)
result = await async_setup_component(
hass,
namecheapdns.DOMAIN,
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
)
assert result
assert aioclient_mock.call_count == 1
async_fire_time_changed(hass, utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
async def test_setup_fails_if_update_fails(hass, aioclient_mock):
"""Test setup fails if first update fails."""
aioclient_mock.get(
namecheapdns.UPDATE_URL,
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
text="<interface-response><ErrCount>1</ErrCount></interface-response>",
)
result = await async_setup_component(
hass,
namecheapdns.DOMAIN,
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
)
assert not result
assert aioclient_mock.call_count == 1
|
import gc100
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
CONF_PORTS = "ports"
DEFAULT_PORT = 4998
DOMAIN = "gc100"
DATA_GC100 = "gc100"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
# pylint: disable=no-member
def setup(hass, base_config):
"""Set up the gc100 component."""
config = base_config[DOMAIN]
host = config[CONF_HOST]
port = config[CONF_PORT]
gc_device = gc100.GC100SocketClient(host, port)
def cleanup_gc100(event):
"""Stuff to do before stopping."""
gc_device.quit()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gc100)
hass.data[DATA_GC100] = GC100Device(hass, gc_device)
return True
class GC100Device:
"""The GC100 component."""
def __init__(self, hass, gc_device):
"""Init a gc100 device."""
self.hass = hass
self.gc_device = gc_device
def read_sensor(self, port_addr, callback):
"""Read a value from a digital input."""
self.gc_device.read_sensor(port_addr, callback)
def write_switch(self, port_addr, state, callback):
"""Write a value to a relay."""
self.gc_device.write_switch(port_addr, state, callback)
def subscribe(self, port_addr, callback):
"""Add detection for RISING and FALLING events."""
self.gc_device.subscribe_notify(port_addr, callback)
|
from importlib import import_module
import tablib
class Format:
def get_title(self):
return type(self)
def create_dataset(self, in_stream):
"""
Create dataset from given string.
"""
raise NotImplementedError()
def export_data(self, dataset, **kwargs):
"""
Returns format representation for given dataset.
"""
raise NotImplementedError()
def is_binary(self):
"""
Returns if this format is binary.
"""
return True
def get_read_mode(self):
"""
Returns mode for opening files.
"""
return 'rb'
def get_extension(self):
"""
Returns extension for this format files.
"""
return ""
def get_content_type(self):
# For content types see
# https://www.iana.org/assignments/media-types/media-types.xhtml
return 'application/octet-stream'
@classmethod
def is_available(cls):
return True
def can_import(self):
return False
def can_export(self):
return False
class TablibFormat(Format):
TABLIB_MODULE = None
CONTENT_TYPE = 'application/octet-stream'
def get_format(self):
"""
Import and returns tablib module.
"""
try:
# Available since tablib 1.0
from tablib.formats import registry
except ImportError:
return import_module(self.TABLIB_MODULE)
else:
key = self.TABLIB_MODULE.split('.')[-1].replace('_', '')
return registry.get_format(key)
@classmethod
def is_available(cls):
try:
cls().get_format()
except (tablib.core.UnsupportedFormat, ImportError):
return False
return True
def get_title(self):
return self.get_format().title
def create_dataset(self, in_stream, **kwargs):
return tablib.import_set(in_stream, format=self.get_title())
def export_data(self, dataset, **kwargs):
return dataset.export(self.get_title(), **kwargs)
def get_extension(self):
return self.get_format().extensions[0]
def get_content_type(self):
return self.CONTENT_TYPE
def can_import(self):
return hasattr(self.get_format(), 'import_set')
def can_export(self):
return hasattr(self.get_format(), 'export_set')
class TextFormat(TablibFormat):
def get_read_mode(self):
return 'r'
def is_binary(self):
return False
class CSV(TextFormat):
TABLIB_MODULE = 'tablib.formats._csv'
CONTENT_TYPE = 'text/csv'
def create_dataset(self, in_stream, **kwargs):
return super().create_dataset(in_stream, **kwargs)
class JSON(TextFormat):
TABLIB_MODULE = 'tablib.formats._json'
CONTENT_TYPE = 'application/json'
class YAML(TextFormat):
TABLIB_MODULE = 'tablib.formats._yaml'
# See https://stackoverflow.com/questions/332129/yaml-mime-type
CONTENT_TYPE = 'text/yaml'
class TSV(TextFormat):
TABLIB_MODULE = 'tablib.formats._tsv'
CONTENT_TYPE = 'text/tab-separated-values'
def create_dataset(self, in_stream, **kwargs):
return super().create_dataset(in_stream, **kwargs)
class ODS(TextFormat):
TABLIB_MODULE = 'tablib.formats._ods'
CONTENT_TYPE = 'application/vnd.oasis.opendocument.spreadsheet'
class HTML(TextFormat):
TABLIB_MODULE = 'tablib.formats._html'
CONTENT_TYPE = 'text/html'
class XLS(TablibFormat):
TABLIB_MODULE = 'tablib.formats._xls'
CONTENT_TYPE = 'application/vnd.ms-excel'
def create_dataset(self, in_stream):
"""
Create dataset from first sheet.
"""
import xlrd
xls_book = xlrd.open_workbook(file_contents=in_stream)
dataset = tablib.Dataset()
sheet = xls_book.sheets()[0]
dataset.headers = sheet.row_values(0)
for i in range(1, sheet.nrows):
dataset.append(sheet.row_values(i))
return dataset
class XLSX(TablibFormat):
TABLIB_MODULE = 'tablib.formats._xlsx'
CONTENT_TYPE = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
def create_dataset(self, in_stream):
"""
Create dataset from first sheet.
"""
from io import BytesIO
import openpyxl
xlsx_book = openpyxl.load_workbook(BytesIO(in_stream), read_only=True)
dataset = tablib.Dataset()
sheet = xlsx_book.active
# obtain generator
rows = sheet.rows
dataset.headers = [cell.value for cell in next(rows)]
for row in rows:
row_values = [cell.value for cell in row]
dataset.append(row_values)
return dataset
#: These are the default formats for import and export. Whether they can be
#: used or not is depending on their implementation in the tablib library.
DEFAULT_FORMATS = [fmt for fmt in (
CSV,
XLS,
XLSX,
TSV,
ODS,
JSON,
YAML,
HTML,
) if fmt.is_available()]
|
from PyQt5.QtCore import QUrl
from PyQt5.QtNetwork import QNetworkReply, QNetworkAccessManager
from qutebrowser.browser import qutescheme
from qutebrowser.browser.webkit.network import networkreply
from qutebrowser.utils import log, qtutils
def handler(request, operation, current_url):
"""Scheme handler for qute:// URLs.
Args:
request: QNetworkRequest to answer to.
operation: The HTTP operation being done.
current_url: The page we're on currently.
Return:
A QNetworkReply.
"""
if operation != QNetworkAccessManager.GetOperation:
return networkreply.ErrorNetworkReply(
request, "Unsupported request type",
QNetworkReply.ContentOperationNotPermittedError)
url = request.url()
if ((url.scheme(), url.host(), url.path()) ==
('qute', 'settings', '/set')):
if current_url != QUrl('qute://settings/'):
log.network.warning("Blocking malicious request from {} to {}"
.format(current_url.toDisplayString(),
url.toDisplayString()))
return networkreply.ErrorNetworkReply(
request, "Invalid qute://settings request",
QNetworkReply.ContentAccessDenied)
try:
mimetype, data = qutescheme.data_for_url(url)
except qutescheme.Error as e:
errors = {
qutescheme.NotFoundError:
QNetworkReply.ContentNotFoundError,
qutescheme.UrlInvalidError:
QNetworkReply.ContentOperationNotPermittedError,
qutescheme.RequestDeniedError:
QNetworkReply.ContentAccessDenied,
qutescheme.SchemeOSError:
QNetworkReply.ContentNotFoundError,
qutescheme.Error:
QNetworkReply.InternalServerError,
}
exctype = type(e)
log.misc.error("{} while handling qute://* URL".format(
exctype.__name__))
return networkreply.ErrorNetworkReply(request, str(e), errors[exctype])
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
return networkreply.RedirectNetworkReply(e.url)
return networkreply.FixedDataNetworkReply(request, data, mimetype)
|
from panasonic_viera import TV_TYPE_ENCRYPTED, TV_TYPE_NONENCRYPTED, SOAPError
import pytest
from homeassistant import config_entries
from homeassistant.components.panasonic_viera.const import (
ATTR_DEVICE_INFO,
ATTR_FRIENDLY_NAME,
ATTR_MANUFACTURER,
ATTR_MODEL_NUMBER,
ATTR_UDN,
CONF_APP_ID,
CONF_ENCRYPTION_KEY,
CONF_ON_ACTION,
DEFAULT_MANUFACTURER,
DEFAULT_MODEL_NUMBER,
DEFAULT_NAME,
DEFAULT_PORT,
DOMAIN,
ERROR_INVALID_PIN_CODE,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PIN, CONF_PORT
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
@pytest.fixture(name="panasonic_viera_setup", autouse=True)
def panasonic_viera_setup_fixture():
"""Mock panasonic_viera setup."""
with patch(
"homeassistant.components.panasonic_viera.async_setup", return_value=True
), patch(
"homeassistant.components.panasonic_viera.async_setup_entry",
return_value=True,
):
yield
def get_mock_remote(
host="1.2.3.4",
request_error=None,
authorize_error=None,
encrypted=False,
app_id=None,
encryption_key=None,
name=DEFAULT_NAME,
manufacturer=DEFAULT_MANUFACTURER,
model_number=DEFAULT_MODEL_NUMBER,
unique_id="mock-unique-id",
):
"""Return a mock remote."""
mock_remote = Mock()
mock_remote.type = TV_TYPE_ENCRYPTED if encrypted else TV_TYPE_NONENCRYPTED
mock_remote.app_id = app_id
mock_remote.enc_key = encryption_key
def request_pin_code(name=None):
if request_error is not None:
raise request_error
mock_remote.request_pin_code = request_pin_code
def authorize_pin_code(pincode):
if pincode == "1234":
return
if authorize_error is not None:
raise authorize_error
mock_remote.authorize_pin_code = authorize_pin_code
def get_device_info():
return {
ATTR_FRIENDLY_NAME: name,
ATTR_MANUFACTURER: manufacturer,
ATTR_MODEL_NUMBER: model_number,
ATTR_UDN: unique_id,
}
mock_remote.get_device_info = get_device_info
return mock_remote
async def test_flow_non_encrypted(hass):
"""Test flow without encryption."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=False)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "create_entry"
assert result["title"] == DEFAULT_NAME
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: None,
ATTR_DEVICE_INFO: {
ATTR_FRIENDLY_NAME: DEFAULT_NAME,
ATTR_MANUFACTURER: DEFAULT_MANUFACTURER,
ATTR_MODEL_NUMBER: DEFAULT_MODEL_NUMBER,
ATTR_UDN: "mock-unique-id",
},
}
async def test_flow_not_connected_error(hass):
"""Test flow with connection error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
side_effect=TimeoutError,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_unknown_abort(hass):
"""Test flow with unknown error abortion."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
side_effect=Exception,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_flow_encrypted_not_connected_pin_code_request(hass):
"""Test flow with encryption and PIN code request connection error abortion during pairing request step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=True, request_error=TimeoutError)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_encrypted_unknown_pin_code_request(hass):
"""Test flow with encryption and PIN code request unknown error abortion during pairing request step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=True, request_error=Exception)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_flow_encrypted_valid_pin_code(hass):
"""Test flow with encryption and valid PIN code."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(
encrypted=True,
app_id="test-app-id",
encryption_key="test-encryption-key",
)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "1234"},
)
assert result["type"] == "create_entry"
assert result["title"] == DEFAULT_NAME
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: None,
CONF_APP_ID: "test-app-id",
CONF_ENCRYPTION_KEY: "test-encryption-key",
ATTR_DEVICE_INFO: {
ATTR_FRIENDLY_NAME: DEFAULT_NAME,
ATTR_MANUFACTURER: DEFAULT_MANUFACTURER,
ATTR_MODEL_NUMBER: DEFAULT_MODEL_NUMBER,
ATTR_UDN: "mock-unique-id",
},
}
async def test_flow_encrypted_invalid_pin_code_error(hass):
"""Test flow with encryption and invalid PIN code error during pairing step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=True, authorize_error=SOAPError)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
assert result["errors"] == {"base": ERROR_INVALID_PIN_CODE}
async def test_flow_encrypted_not_connected_abort(hass):
"""Test flow with encryption and PIN code connection error abortion during pairing step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=True, authorize_error=TimeoutError)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_encrypted_unknown_abort(hass):
"""Test flow with encryption and PIN code unknown error abortion during pairing step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
mock_remote = get_mock_remote(encrypted=True, authorize_error=Exception)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_flow_non_encrypted_already_configured_abort(hass):
"""Test flow without encryption and existing config entry abortion."""
MockConfigEntry(
domain=DOMAIN,
unique_id="1.2.3.4",
data={CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME, CONF_PORT: DEFAULT_PORT},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_flow_encrypted_already_configured_abort(hass):
"""Test flow with encryption and existing config entry abortion."""
MockConfigEntry(
domain=DOMAIN,
unique_id="1.2.3.4",
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_APP_ID: "test-app-id",
CONF_ENCRYPTION_KEY: "test-encryption-key",
},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_imported_flow_non_encrypted(hass):
"""Test imported flow without encryption."""
mock_remote = get_mock_remote(encrypted=False)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "create_entry"
assert result["title"] == DEFAULT_NAME
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
ATTR_DEVICE_INFO: {
ATTR_FRIENDLY_NAME: DEFAULT_NAME,
ATTR_MANUFACTURER: DEFAULT_MANUFACTURER,
ATTR_MODEL_NUMBER: DEFAULT_MODEL_NUMBER,
ATTR_UDN: "mock-unique-id",
},
}
async def test_imported_flow_encrypted_valid_pin_code(hass):
"""Test imported flow with encryption and valid PIN code."""
mock_remote = get_mock_remote(
encrypted=True,
app_id="test-app-id",
encryption_key="test-encryption-key",
)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "1234"},
)
assert result["type"] == "create_entry"
assert result["title"] == DEFAULT_NAME
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
CONF_APP_ID: "test-app-id",
CONF_ENCRYPTION_KEY: "test-encryption-key",
ATTR_DEVICE_INFO: {
ATTR_FRIENDLY_NAME: DEFAULT_NAME,
ATTR_MANUFACTURER: DEFAULT_MANUFACTURER,
ATTR_MODEL_NUMBER: DEFAULT_MODEL_NUMBER,
ATTR_UDN: "mock-unique-id",
},
}
async def test_imported_flow_encrypted_invalid_pin_code_error(hass):
"""Test imported flow with encryption and invalid PIN code error during pairing step."""
mock_remote = get_mock_remote(encrypted=True, authorize_error=SOAPError)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
assert result["errors"] == {"base": ERROR_INVALID_PIN_CODE}
async def test_imported_flow_encrypted_not_connected_abort(hass):
"""Test imported flow with encryption and PIN code connection error abortion during pairing step."""
mock_remote = get_mock_remote(encrypted=True, authorize_error=TimeoutError)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_imported_flow_encrypted_unknown_abort(hass):
"""Test imported flow with encryption and PIN code unknown error abortion during pairing step."""
mock_remote = get_mock_remote(encrypted=True, authorize_error=Exception)
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
return_value=mock_remote,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "form"
assert result["step_id"] == "pairing"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PIN: "0000"},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_imported_flow_not_connected_error(hass):
"""Test imported flow with connection error abortion."""
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
side_effect=TimeoutError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_imported_flow_unknown_abort(hass):
"""Test imported flow with unknown error abortion."""
with patch(
"homeassistant.components.panasonic_viera.config_flow.RemoteControl",
side_effect=Exception,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_imported_flow_non_encrypted_already_configured_abort(hass):
"""Test imported flow without encryption and existing config entry abortion."""
MockConfigEntry(
domain=DOMAIN,
unique_id="1.2.3.4",
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_imported_flow_encrypted_already_configured_abort(hass):
"""Test imported flow with encryption and existing config entry abortion."""
MockConfigEntry(
domain=DOMAIN,
unique_id="1.2.3.4",
data={
CONF_HOST: "1.2.3.4",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: "test-on-action",
CONF_APP_ID: "test-app-id",
CONF_ENCRYPTION_KEY: "test-encryption-key",
},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: "1.2.3.4", CONF_NAME: DEFAULT_NAME},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
|
from abc import ABCMeta, abstractmethod
import numpy as np
import pandas as pd
from scipy import stats
from scipy.sparse import vstack
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termranking.TermRanker import TermRanker
try:
from future.utils import with_metaclass
except:
from six import with_metaclass
def sparse_var(X):
'''
Compute variance from
:param X:
:return:
'''
Xc = X.copy()
Xc.data **= 2
return np.array(Xc.mean(axis=0) - np.power(X.mean(axis=0), 2))[0]
class NeedToSetCategoriesException(Exception):
pass
class CorpusBasedTermScorer(with_metaclass(ABCMeta, object)):
def __init__(self, corpus, *args, **kwargs):
self.corpus_ = corpus
self.category_ids_ = corpus._y
self.tdf_ = None
self._set_scorer_args(**kwargs)
self.term_ranker_ = AbsoluteFrequencyRanker(corpus)
self.use_metadata_ = False
self.category_name_is_set_ = False
@abstractmethod
def _set_scorer_args(self, **kwargs):
pass
def use_metadata(self):
self.use_metadata_ = True
self.term_ranker_.use_non_text_features()
return self
def set_term_ranker(self, term_ranker):
assert issubclass(term_ranker, TermRanker)
self.term_ranker_ = term_ranker(self.corpus_)
if self.use_metadata_:
self.term_ranker_.use_non_text_features()
return self
def is_category_name_set(self):
return self.category_name_is_set_
def set_categories(self,
category_name,
not_category_names=[],
neutral_category_names=[]):
'''
Specify the category to score. Optionally, score against a specific set of categories.
'''
tdf = self.term_ranker_.get_ranks()
d = {'cat': tdf[category_name + ' freq']}
if not_category_names == []:
not_category_names = [c + ' freq' for c in self.corpus_.get_categories()
if c != category_name]
else:
not_category_names = [c + ' freq' for c in not_category_names]
d['ncat'] = tdf[not_category_names].sum(axis=1)
if neutral_category_names == []:
# neutral_category_names = [c + ' freq' for c in self.corpus.get_categories()
# if c != category_name and c not in not_category_names]
pass
else:
neutral_category_names = [c + ' freq' for c in neutral_category_names]
for i, c in enumerate(neutral_category_names):
d['neut%s' % (i)] = tdf[c]
self.tdf_ = pd.DataFrame(d)
self.category_name = category_name
self.not_category_names = [c[:-5] for c in not_category_names]
self.neutral_category_names = [c[:-5] for c in neutral_category_names]
self.category_name_is_set_ = True
return self
def _get_X(self):
return self.corpus_.get_metadata_doc_mat() if self.use_metadata_ else self.term_ranker_.get_X()
def get_t_statistics(self):
'''
In this case, parameters a and b aren't used, since this information is taken
directly from the corpus categories.
Returns
-------
'''
X = self._get_X()
cat_X, ncat_X = self._get_cat_and_ncat(X)
mean_delta = self._get_mean_delta(cat_X, ncat_X)
cat_var = sparse_var(cat_X)
ncat_var = sparse_var(ncat_X)
cat_n = cat_X.shape[0]
ncat_n = ncat_X.shape[0]
pooled_stderr = np.sqrt(cat_var / cat_n + ncat_var / ncat_n)
tt = mean_delta / pooled_stderr
# Use Satterthaite-Welch adjustment for degrees of freedom
degs_of_freedom = (cat_var ** 2 / cat_n + ncat_var ** 2 / ncat_n) ** 2 / (
(cat_var ** 2 / cat_n) ** 2 / (cat_n - 1)
+ (ncat_var ** 2 / ncat_n) ** 2 / (ncat_n - 1)
)
only_in_neutral_mask = self.tdf_[['cat', 'ncat']].sum(axis=1) == 0
pvals = stats.t.sf(np.abs(tt), degs_of_freedom)
tt[only_in_neutral_mask] = 0
pvals[only_in_neutral_mask] = 0
return tt, pvals
def _get_mean_delta(self, cat_X, ncat_X):
return np.array(cat_X.mean(axis=0) - ncat_X.mean(axis=0))[0]
def _get_cat_and_ncat(self, X):
if self.category_name_is_set_ is False:
raise NeedToSetCategoriesException()
cat_X = X[np.isin(self.corpus_.get_category_names_by_row(),
[self.category_name] + self.neutral_category_names), :]
ncat_X = X[np.isin(self.corpus_.get_category_names_by_row(),
self.not_category_names + self.neutral_category_names), :]
if len(self.neutral_category_names) > 0:
neut_X = [np.isin(self.corpus_.get_category_names_by_row(), self.neutral_category_names)]
cat_X = vstack([cat_X, neut_X])
ncat_X = vstack([ncat_X, neut_X])
return cat_X, ncat_X
def _get_index(self):
return self.corpus_.get_metadata() if self.use_metadata_ else self.corpus_.get_terms()
@abstractmethod
def get_scores(self, *args):
'''
Args are ignored
Returns
-------
'''
@abstractmethod
def get_name(self):
pass
|
from datetime import timedelta
import logging
from homeassistant.components.lock import LockEntity
from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_LOCKED, STATE_UNLOCKED
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=120)
TAHOMA_STATE_LOCKED = "locked"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tahoma lock."""
if discovery_info is None:
return
controller = hass.data[TAHOMA_DOMAIN]["controller"]
devices = []
for device in hass.data[TAHOMA_DOMAIN]["devices"]["lock"]:
devices.append(TahomaLock(device, controller))
add_entities(devices, True)
class TahomaLock(TahomaDevice, LockEntity):
"""Representation a Tahoma lock."""
def __init__(self, tahoma_device, controller):
"""Initialize the device."""
super().__init__(tahoma_device, controller)
self._lock_status = None
self._available = False
self._battery_level = None
self._name = None
def update(self):
"""Update method."""
self.controller.get_states([self.tahoma_device])
self._battery_level = self.tahoma_device.active_states["core:BatteryState"]
self._name = self.tahoma_device.active_states["core:NameState"]
if (
self.tahoma_device.active_states.get("core:LockedUnlockedState")
== TAHOMA_STATE_LOCKED
):
self._lock_status = STATE_LOCKED
else:
self._lock_status = STATE_UNLOCKED
self._available = (
self.tahoma_device.active_states.get("core:AvailabilityState")
== "available"
)
def unlock(self, **kwargs):
"""Unlock method."""
_LOGGER.debug("Unlocking %s", self._name)
self.apply_action("unlock")
def lock(self, **kwargs):
"""Lock method."""
_LOGGER.debug("Locking %s", self._name)
self.apply_action("lock")
@property
def name(self):
"""Return the name of the lock."""
return self._name
@property
def available(self):
"""Return True if the lock is available."""
return self._available
@property
def is_locked(self):
"""Return True if the lock is locked."""
return self._lock_status == STATE_LOCKED
@property
def device_state_attributes(self):
"""Return the lock state attributes."""
attr = {
ATTR_BATTERY_LEVEL: self._battery_level,
}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
return attr
|
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOVING,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import ATTR_ATTRIBUTION
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_CREATED_AT,
ATTR_DROPLET_ID,
ATTR_DROPLET_NAME,
ATTR_FEATURES,
ATTR_IPV4_ADDRESS,
ATTR_IPV6_ADDRESS,
ATTR_MEMORY,
ATTR_REGION,
ATTR_VCPUS,
ATTRIBUTION,
CONF_DROPLETS,
DATA_DIGITAL_OCEAN,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Droplet"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DROPLETS): vol.All(cv.ensure_list, [cv.string])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Digital Ocean droplet sensor."""
digital = hass.data.get(DATA_DIGITAL_OCEAN)
if not digital:
return False
droplets = config[CONF_DROPLETS]
dev = []
for droplet in droplets:
droplet_id = digital.get_droplet_id(droplet)
if droplet_id is None:
_LOGGER.error("Droplet %s is not available", droplet)
return False
dev.append(DigitalOceanBinarySensor(digital, droplet_id))
add_entities(dev, True)
class DigitalOceanBinarySensor(BinarySensorEntity):
"""Representation of a Digital Ocean droplet sensor."""
def __init__(self, do, droplet_id):
"""Initialize a new Digital Ocean sensor."""
self._digital_ocean = do
self._droplet_id = droplet_id
self._state = None
self.data = None
@property
def name(self):
"""Return the name of the sensor."""
return self.data.name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.data.status == "active"
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICE_CLASS_MOVING
@property
def device_state_attributes(self):
"""Return the state attributes of the Digital Ocean droplet."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_CREATED_AT: self.data.created_at,
ATTR_DROPLET_ID: self.data.id,
ATTR_DROPLET_NAME: self.data.name,
ATTR_FEATURES: self.data.features,
ATTR_IPV4_ADDRESS: self.data.ip_address,
ATTR_IPV6_ADDRESS: self.data.ip_v6_address,
ATTR_MEMORY: self.data.memory,
ATTR_REGION: self.data.region["name"],
ATTR_VCPUS: self.data.vcpus,
}
def update(self):
"""Update state of sensor."""
self._digital_ocean.update()
for droplet in self._digital_ocean.data:
if droplet.id == self._droplet_id:
self.data = droplet
|
import pytest
import math
import os
import shutil
import numpy as np
import tensorflow as tf
import tensorflow.keras
from tensorflow.keras import backend as K
from tensorflow.keras.models import Sequential, load_model
from tensorflow.keras.layers import Conv2D, Dense, Flatten
from tensornetwork.tn_keras.layers import Conv2DMPO
LAYER_NAME = 'conv_layer'
@pytest.fixture(params=[(100, 8, 8, 16)])
def dummy_data(request):
np.random.seed(42)
data = np.random.rand(*request.param)
labels = np.concatenate((np.ones((50, 1)), np.zeros((50, 1))))
return data, labels
@pytest.fixture()
def make_model(dummy_data):
# pylint: disable=redefined-outer-name
data, _ = dummy_data
model = Sequential()
model.add(
Conv2DMPO(filters=4,
kernel_size=3,
num_nodes=2,
bond_dim=10,
padding='same',
input_shape=data.shape[1:],
name=LAYER_NAME)
)
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
return model
def test_train(dummy_data, make_model):
# pylint: disable=redefined-outer-name
model = make_model
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
data, labels = dummy_data
# Train the model for 10 epochs
history = model.fit(data, labels, epochs=10, batch_size=32)
# Check that loss decreases and accuracy increases
assert history.history['loss'][0] > history.history['loss'][-1]
assert history.history['accuracy'][0] < history.history['accuracy'][-1]
def test_weights_change(dummy_data, make_model):
# pylint: disable=redefined-outer-name
data, labels = dummy_data
model = make_model
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
before = model.get_weights()
model.fit(data, labels, epochs=5, batch_size=32)
after = model.get_weights()
# Make sure every layer's weights changed
for b, a in zip(before, after):
assert (b != a).any()
def test_output_shape(dummy_data, make_model):
# pylint: disable=redefined-outer-name
data, _ = dummy_data
data = K.constant(data)
model = make_model
l = model.get_layer(LAYER_NAME)
actual_output_shape = l(data).shape
expected_output_shape = l.compute_output_shape(data.shape)
np.testing.assert_equal(expected_output_shape, actual_output_shape)
def test_num_parameters(dummy_data, make_model):
# pylint: disable=redefined-outer-name
data, _ = dummy_data
model = make_model
l = model.get_layer(LAYER_NAME)
in_dim = math.ceil(data.shape[-1] ** (1. / l.num_nodes))
out_dim = math.ceil(l.filters ** (1. / l.num_nodes))
exp_num_parameters = ((l.num_nodes - 2) *
(l.bond_dim * 2 * in_dim * out_dim) +
(l.kernel_size[0] * out_dim * in_dim * l.bond_dim) +
(l.kernel_size[1] * out_dim * in_dim * l.bond_dim) +
(l.filters))
np.testing.assert_equal(exp_num_parameters, l.count_params())
def test_config(make_model):
# pylint: disable=redefined-outer-name
model = make_model
expected_num_parameters = model.layers[0].count_params()
# Serialize model and use config to create new layer
l = model.get_layer(LAYER_NAME)
layer_config = l.get_config()
new_model = Conv2DMPO.from_config(layer_config)
# Build the layer so we can count params below
new_model.build(layer_config['batch_input_shape'])
np.testing.assert_equal(expected_num_parameters, new_model.count_params())
assert layer_config == new_model.get_config()
def test_model_save(dummy_data, make_model, tmp_path):
# pylint: disable=redefined-outer-name
data, labels = dummy_data
model = make_model
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# Train the model for 5 epochs
model.fit(data, labels, epochs=5)
for save_path in [tmp_path / 'test_model', tmp_path / 'test_model.h5']:
# Save model to a SavedModel folder or h5 file, then load model
print('save_path: ', save_path)
model.save(save_path)
loaded_model = load_model(save_path)
# Clean up SavedModel folder
if os.path.isdir(save_path):
shutil.rmtree(save_path)
# Clean up h5 file
if os.path.exists(save_path):
os.remove(save_path)
# Compare model predictions and loaded_model predictions
np.testing.assert_almost_equal(model.predict(data),
loaded_model.predict(data))
|
import pytest
from molecule.model import schema_v2
@pytest.fixture
def _model_dependency_section_data():
return {
'dependency': {
'name': 'galaxy',
'enabled': True,
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'foo',
'FOO_BAR': 'foo_bar',
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_dependency_section_data'], indirect=True)
def test_dependency(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_dependency_errors_section_data():
return {
'dependency': {
'name': int(),
'command': None,
'enabled': str(),
'options': [],
'env': {
'foo': 'foo',
'foo-bar': 'foo-bar',
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_dependency_errors_section_data'], indirect=True)
def test_dependency_has_errors(_config):
x = {
'dependency': [{
'name': ['must be of string type'],
'enabled': ['must be of boolean type'],
'options': ['must be of dict type'],
'env': [{
'foo': ["value does not match regex '^[A-Z0-9_-]+$'"],
'foo-bar': ["value does not match regex '^[A-Z0-9_-]+$'"],
}],
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_dependency_allows_galaxy_section_data():
return {
'dependency': {
'name': 'galaxy',
}
}
@pytest.fixture
def _model_dependency_allows_gilt_section_data():
return {
'dependency': {
'name': 'gilt',
}
}
@pytest.fixture
def _model_dependency_allows_shell_section_data():
return {
'dependency': {
'name': 'shell',
}
}
@pytest.mark.parametrize(
'_config', [
('_model_dependency_allows_galaxy_section_data'),
('_model_dependency_allows_gilt_section_data'),
('_model_dependency_allows_shell_section_data'),
],
indirect=True)
def test_dependency_allows_shell_name(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_dependency_shell_errors_section_data():
return {
'dependency': {
'name': 'shell',
'command': None,
}
}
@pytest.mark.parametrize(
'_config', ['_model_dependency_shell_errors_section_data'], indirect=True)
def test_dependency_shell_has_errors(_config):
x = {'dependency': [{'command': ['null value not allowed']}]}
assert x == schema_v2.validate(_config)
|
from homeassistant.components import sensor, tellduslive
from homeassistant.const import (
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
LENGTH_MILLIMETERS,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
SPEED_METERS_PER_SECOND,
TEMP_CELSIUS,
TIME_HOURS,
UV_INDEX,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .entry import TelldusLiveEntity
SENSOR_TYPE_TEMPERATURE = "temp"
SENSOR_TYPE_HUMIDITY = "humidity"
SENSOR_TYPE_RAINRATE = "rrate"
SENSOR_TYPE_RAINTOTAL = "rtot"
SENSOR_TYPE_WINDDIRECTION = "wdir"
SENSOR_TYPE_WINDAVERAGE = "wavg"
SENSOR_TYPE_WINDGUST = "wgust"
SENSOR_TYPE_UV = "uv"
SENSOR_TYPE_WATT = "watt"
SENSOR_TYPE_LUMINANCE = "lum"
SENSOR_TYPE_DEW_POINT = "dewp"
SENSOR_TYPE_BAROMETRIC_PRESSURE = "barpress"
SENSOR_TYPES = {
SENSOR_TYPE_TEMPERATURE: [
"Temperature",
TEMP_CELSIUS,
None,
DEVICE_CLASS_TEMPERATURE,
],
SENSOR_TYPE_HUMIDITY: ["Humidity", PERCENTAGE, None, DEVICE_CLASS_HUMIDITY],
SENSOR_TYPE_RAINRATE: [
"Rain rate",
f"{LENGTH_MILLIMETERS}/{TIME_HOURS}",
"mdi:water",
None,
],
SENSOR_TYPE_RAINTOTAL: ["Rain total", LENGTH_MILLIMETERS, "mdi:water", None],
SENSOR_TYPE_WINDDIRECTION: ["Wind direction", "", "", None],
SENSOR_TYPE_WINDAVERAGE: ["Wind average", SPEED_METERS_PER_SECOND, "", None],
SENSOR_TYPE_WINDGUST: ["Wind gust", SPEED_METERS_PER_SECOND, "", None],
SENSOR_TYPE_UV: ["UV", UV_INDEX, "", None],
SENSOR_TYPE_WATT: ["Power", POWER_WATT, "", None],
SENSOR_TYPE_LUMINANCE: ["Luminance", LIGHT_LUX, None, DEVICE_CLASS_ILLUMINANCE],
SENSOR_TYPE_DEW_POINT: ["Dew Point", TEMP_CELSIUS, None, DEVICE_CLASS_TEMPERATURE],
SENSOR_TYPE_BAROMETRIC_PRESSURE: ["Barometric Pressure", "kPa", "", None],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tellduslive sensors dynamically."""
async def async_discover_sensor(device_id):
"""Discover and add a discovered sensor."""
client = hass.data[tellduslive.DOMAIN]
async_add_entities([TelldusLiveSensor(client, device_id)])
async_dispatcher_connect(
hass,
tellduslive.TELLDUS_DISCOVERY_NEW.format(sensor.DOMAIN, tellduslive.DOMAIN),
async_discover_sensor,
)
class TelldusLiveSensor(TelldusLiveEntity):
"""Representation of a Telldus Live sensor."""
@property
def device_id(self):
"""Return id of the device."""
return self._id[0]
@property
def _type(self):
"""Return the type of the sensor."""
return self._id[1]
@property
def _value(self):
"""Return value of the sensor."""
return self.device.value(*self._id[1:])
@property
def _value_as_temperature(self):
"""Return the value as temperature."""
return round(float(self._value), 1)
@property
def _value_as_luminance(self):
"""Return the value as luminance."""
return round(float(self._value), 1)
@property
def _value_as_humidity(self):
"""Return the value as humidity."""
return int(round(float(self._value)))
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(super().name, self.quantity_name or "").strip()
@property
def state(self):
"""Return the state of the sensor."""
if not self.available:
return None
if self._type == SENSOR_TYPE_TEMPERATURE:
return self._value_as_temperature
if self._type == SENSOR_TYPE_HUMIDITY:
return self._value_as_humidity
if self._type == SENSOR_TYPE_LUMINANCE:
return self._value_as_luminance
return self._value
@property
def quantity_name(self):
"""Name of quantity."""
return SENSOR_TYPES[self._type][0] if self._type in SENSOR_TYPES else None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._type][1] if self._type in SENSOR_TYPES else None
@property
def icon(self):
"""Return the icon."""
return SENSOR_TYPES[self._type][2] if self._type in SENSOR_TYPES else None
@property
def device_class(self):
"""Return the device class."""
return SENSOR_TYPES[self._type][3] if self._type in SENSOR_TYPES else None
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "{}-{}-{}".format(*self._id)
|
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from paasta_tools.paastaapi.model.envoy_status import EnvoyStatus
from paasta_tools.paastaapi.model.instance_status_kubernetes_autoscaling_status import InstanceStatusKubernetesAutoscalingStatus
from paasta_tools.paastaapi.model.kubernetes_pod import KubernetesPod
from paasta_tools.paastaapi.model.kubernetes_replica_set import KubernetesReplicaSet
from paasta_tools.paastaapi.model.smartstack_status import SmartstackStatus
globals()['EnvoyStatus'] = EnvoyStatus
globals()['InstanceStatusKubernetesAutoscalingStatus'] = InstanceStatusKubernetesAutoscalingStatus
globals()['KubernetesPod'] = KubernetesPod
globals()['KubernetesReplicaSet'] = KubernetesReplicaSet
globals()['SmartstackStatus'] = SmartstackStatus
class InstanceStatusKubernetes(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('bounce_method',): {
'BRUTAL': "brutal",
'UPTHENDOWN': "upthendown",
'DOWNTHENUP': "downthenup",
'CROSSOVER': "crossover",
},
('desired_state',): {
'START': "start",
'STOP': "stop",
},
('deploy_status',): {
'RUNNING': "Running",
'DEPLOYING': "Deploying",
'STOPPED': "Stopped",
'DELAYED': "Delayed",
'WAITING': "Waiting",
'NOTRUNNING': "NotRunning",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'app_count': (int,), # noqa: E501
'bounce_method': (str,), # noqa: E501
'desired_state': (str,), # noqa: E501
'active_shas': ([[str, none_type]],), # noqa: E501
'app_id': (str,), # noqa: E501
'autoscaling_status': (InstanceStatusKubernetesAutoscalingStatus,), # noqa: E501
'backoff_seconds': (int,), # noqa: E501
'create_timestamp': (float,), # noqa: E501
'deploy_status': (str,), # noqa: E501
'deploy_status_message': (str,), # noqa: E501
'error_message': (str,), # noqa: E501
'expected_instance_count': (int,), # noqa: E501
'namespace': (str,), # noqa: E501
'pods': ([KubernetesPod],), # noqa: E501
'replicasets': ([KubernetesReplicaSet],), # noqa: E501
'running_instance_count': (int,), # noqa: E501
'smartstack': (SmartstackStatus,), # noqa: E501
'envoy': (EnvoyStatus,), # noqa: E501
'evicted_count': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'app_count': 'app_count', # noqa: E501
'bounce_method': 'bounce_method', # noqa: E501
'desired_state': 'desired_state', # noqa: E501
'active_shas': 'active_shas', # noqa: E501
'app_id': 'app_id', # noqa: E501
'autoscaling_status': 'autoscaling_status', # noqa: E501
'backoff_seconds': 'backoff_seconds', # noqa: E501
'create_timestamp': 'create_timestamp', # noqa: E501
'deploy_status': 'deploy_status', # noqa: E501
'deploy_status_message': 'deploy_status_message', # noqa: E501
'error_message': 'error_message', # noqa: E501
'expected_instance_count': 'expected_instance_count', # noqa: E501
'namespace': 'namespace', # noqa: E501
'pods': 'pods', # noqa: E501
'replicasets': 'replicasets', # noqa: E501
'running_instance_count': 'running_instance_count', # noqa: E501
'smartstack': 'smartstack', # noqa: E501
'envoy': 'envoy', # noqa: E501
'evicted_count': 'evicted_count', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, app_count, bounce_method, desired_state, *args, **kwargs): # noqa: E501
"""InstanceStatusKubernetes - a model defined in OpenAPI
Args:
app_count (int): The number of different running versions of the same service (0 for stopped, 1 for running and 1+ for bouncing)
bounce_method (str): Method to transit between new and old versions of a service
desired_state (str): Desired state of a service, for Kubernetes
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
active_shas ([[str, none_type]]): List of git/config SHAs running.. [optional] # noqa: E501
app_id (str): ID of the desired version of a service instance. [optional] # noqa: E501
autoscaling_status (InstanceStatusKubernetesAutoscalingStatus): [optional] # noqa: E501
backoff_seconds (int): backoff in seconds before launching the next task. [optional] # noqa: E501
create_timestamp (float): Unix timestamp when this app was created. [optional] # noqa: E501
deploy_status (str): Deploy status of a Kubernetes service. [optional] # noqa: E501
deploy_status_message (str): Reason for the deploy status. [optional] # noqa: E501
error_message (str): Error message when a kubernetes object (Deployment/Statefulset) cannot be found. [optional] # noqa: E501
expected_instance_count (int): The number of desired instances of the service. [optional] # noqa: E501
namespace (str): The namespace this app is running. [optional] # noqa: E501
pods ([KubernetesPod]): Pods associated to this app. [optional] # noqa: E501
replicasets ([KubernetesReplicaSet]): ReplicaSets associated to this app. [optional] # noqa: E501
running_instance_count (int): The number of actual running instances of the service. [optional] # noqa: E501
smartstack (SmartstackStatus): [optional] # noqa: E501
envoy (EnvoyStatus): [optional] # noqa: E501
evicted_count (int): Number of pods with status reason \"Evicted\". [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.app_count = app_count
self.bounce_method = bounce_method
self.desired_state = desired_state
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
from asyncio import Event
import logging
from threading import Thread
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
DOMAIN = "ptvsd"
CONF_WAIT = "wait"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST, default="0.0.0.0"): cv.string,
vol.Optional(CONF_PORT, default=5678): cv.port,
vol.Optional(CONF_WAIT, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up ptvsd debugger."""
_LOGGER.warning(
"ptvsd is deprecated and will be removed in Home Assistant Core 0.120."
"The debugpy integration can be used as a full replacement for ptvsd"
)
# This is a local import, since importing this at the top, will cause
# ptvsd to hook into `sys.settrace`. So does `coverage` to generate
# coverage, resulting in a battle and incomplete code test coverage.
import ptvsd # pylint: disable=import-outside-toplevel
conf = config[DOMAIN]
host = conf[CONF_HOST]
port = conf[CONF_PORT]
ptvsd.enable_attach((host, port))
wait = conf[CONF_WAIT]
if wait:
_LOGGER.warning("Waiting for ptvsd connection on %s:%s", host, port)
ready = Event()
def waitfor():
ptvsd.wait_for_attach()
hass.loop.call_soon_threadsafe(ready.set)
Thread(target=waitfor).start()
await ready.wait()
else:
_LOGGER.warning("Listening for ptvsd connection on %s:%s", host, port)
return True
|
from __future__ import unicode_literals # at top of module
import os
import sys
import base64
import requests
import json
from gunicorn.config import make_settings
from cryptography.fernet import Fernet
from flask import current_app
from flask_script import Manager, Command, Option, prompt_pass
from flask_migrate import Migrate, MigrateCommand, stamp
from flask_script.commands import ShowUrls, Clean, Server
from lemur.dns_providers.cli import manager as dns_provider_manager
from lemur.acme_providers.cli import manager as acme_manager
from lemur.sources.cli import manager as source_manager
from lemur.policies.cli import manager as policy_manager
from lemur.reporting.cli import manager as report_manager
from lemur.endpoints.cli import manager as endpoint_manager
from lemur.certificates.cli import manager as certificate_manager
from lemur.notifications.cli import manager as notification_manager
from lemur.pending_certificates.cli import manager as pending_certificate_manager
from lemur import database
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.policies import service as policy_service
from lemur.notifications import service as notification_service
from lemur.common.utils import validate_conf
from lemur import create_app
# Needed to be imported so that SQLAlchemy create_all can find our models
from lemur.users.models import User # noqa
from lemur.roles.models import Role # noqa
from lemur.authorities.models import Authority # noqa
from lemur.certificates.models import Certificate # noqa
from lemur.destinations.models import Destination # noqa
from lemur.domains.models import Domain # noqa
from lemur.notifications.models import Notification # noqa
from lemur.sources.models import Source # noqa
from lemur.logs.models import Log # noqa
from lemur.endpoints.models import Endpoint # noqa
from lemur.policies.models import RotationPolicy # noqa
from lemur.pending_certificates.models import PendingCertificate # noqa
from lemur.dns_providers.models import DnsProvider # noqa
from sqlalchemy.sql import text
manager = Manager(create_app)
manager.add_option("-c", "--config", dest="config_path", required=False)
migrate = Migrate(create_app)
REQUIRED_VARIABLES = [
"LEMUR_SECURITY_TEAM_EMAIL",
"LEMUR_DEFAULT_ORGANIZATIONAL_UNIT",
"LEMUR_DEFAULT_ORGANIZATION",
"LEMUR_DEFAULT_LOCATION",
"LEMUR_DEFAULT_COUNTRY",
"LEMUR_DEFAULT_STATE",
"SQLALCHEMY_DATABASE_URI",
]
KEY_LENGTH = 40
DEFAULT_CONFIG_PATH = "~/.lemur/lemur.conf.py"
DEFAULT_SETTINGS = "lemur.conf.server"
SETTINGS_ENVVAR = "LEMUR_CONF"
CONFIG_TEMPLATE = """
# This is just Python which means you can inherit and tweak settings
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
THREADS_PER_PAGE = 8
# General
# These will need to be set to `True` if you are developing locally
CORS = False
debug = False
# this is the secret key used by flask session management
SECRET_KEY = '{flask_secret_key}'
# You should consider storing these separately from your config
LEMUR_TOKEN_SECRET = '{secret_token}'
LEMUR_ENCRYPTION_KEYS = '{encryption_key}'
# List of domain regular expressions that non-admin users can issue
LEMUR_ALLOWED_DOMAINS = []
# Mail Server
LEMUR_EMAIL = ''
LEMUR_SECURITY_TEAM_EMAIL = []
# Certificate Defaults
LEMUR_DEFAULT_COUNTRY = ''
LEMUR_DEFAULT_STATE = ''
LEMUR_DEFAULT_LOCATION = ''
LEMUR_DEFAULT_ORGANIZATION = ''
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = ''
# Authentication Providers
ACTIVE_PROVIDERS = []
# Metrics Providers
METRIC_PROVIDERS = []
# Logging
LOG_LEVEL = "DEBUG"
LOG_FILE = "lemur.log"
LOG_UPGRADE_FILE = "db_upgrade.log"
# Database
# modify this if you are not using a local database
SQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur'
# AWS
#LEMUR_INSTANCE_PROFILE = 'Lemur'
# Issuers
# These will be dependent on which 3rd party that Lemur is
# configured to use.
# VERISIGN_URL = ''
# VERISIGN_PEM_PATH = ''
# VERISIGN_FIRST_NAME = ''
# VERISIGN_LAST_NAME = ''
# VERSIGN_EMAIL = ''
"""
@MigrateCommand.command
def create():
database.db.engine.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm"))
database.db.create_all()
stamp(revision="head")
@MigrateCommand.command
def drop_all():
database.db.drop_all()
@manager.shell
def make_shell_context():
"""
Creates a python REPL with several default imports
in the context of the current_app
:return:
"""
return dict(current_app=current_app)
def generate_settings():
"""
This command is run when ``default_path`` doesn't exist, or ``init`` is
run and returns a string representing the default data to put into their
settings file.
"""
output = CONFIG_TEMPLATE.format(
# we use Fernet.generate_key to make sure that the key length is
# compatible with Fernet
encryption_key=Fernet.generate_key().decode("utf-8"),
secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode("utf-8"),
flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode("utf-8"),
)
return output
class InitializeApp(Command):
"""
This command will bootstrap our database with any destinations as
specified by our config.
Additionally a Lemur user will be created as a default user
and be used when certificates are discovered by Lemur.
"""
option_list = (Option("-p", "--password", dest="password"),)
def run(self, password):
create()
user = user_service.get_by_username("lemur")
admin_role = role_service.get_by_name("admin")
if admin_role:
sys.stdout.write("[-] Admin role already created, skipping...!\n")
else:
# we create an admin role
admin_role = role_service.create(
"admin", description="This is the Lemur administrator role."
)
sys.stdout.write("[+] Created 'admin' role\n")
operator_role = role_service.get_by_name("operator")
if operator_role:
sys.stdout.write("[-] Operator role already created, skipping...!\n")
else:
# we create an operator role
operator_role = role_service.create(
"operator", description="This is the Lemur operator role."
)
sys.stdout.write("[+] Created 'operator' role\n")
read_only_role = role_service.get_by_name("read-only")
if read_only_role:
sys.stdout.write("[-] Read only role already created, skipping...!\n")
else:
# we create an read only role
read_only_role = role_service.create(
"read-only", description="This is the Lemur read only role."
)
sys.stdout.write("[+] Created 'read-only' role\n")
if not user:
if not password:
sys.stdout.write("We need to set Lemur's password to continue!\n")
password = prompt_pass("Password")
password1 = prompt_pass("Confirm Password")
if password != password1:
sys.stderr.write("[!] Passwords do not match!\n")
sys.exit(1)
user_service.create(
"lemur", password, "[email protected]", True, None, [admin_role]
)
sys.stdout.write(
"[+] Created the user 'lemur' and granted it the 'admin' role!\n"
)
else:
sys.stdout.write(
"[-] Default user has already been created, skipping...!\n"
)
intervals = current_app.config.get(
"LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", []
)
sys.stdout.write(
"[!] Creating {num} notifications for {intervals} days as specified by LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\n".format(
num=len(intervals), intervals=",".join([str(x) for x in intervals])
)
)
recipients = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
sys.stdout.write("[+] Creating expiration email notifications!\n")
sys.stdout.write(
"[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\n".format(
recipients
)
)
notification_service.create_default_expiration_notifications(
"DEFAULT_SECURITY", recipients=recipients
)
_DEFAULT_ROTATION_INTERVAL = "default"
default_rotation_interval = policy_service.get_by_name(
_DEFAULT_ROTATION_INTERVAL
)
if default_rotation_interval:
sys.stdout.write(
"[-] Default rotation interval policy already created, skipping...!\n"
)
else:
days = current_app.config.get("LEMUR_DEFAULT_ROTATION_INTERVAL", 30)
sys.stdout.write(
"[+] Creating default certificate rotation policy of {days} days before issuance.\n".format(
days=days
)
)
policy_service.create(days=days, name=_DEFAULT_ROTATION_INTERVAL)
sys.stdout.write("[/] Done!\n")
class CreateUser(Command):
"""
This command allows for the creation of a new user within Lemur.
"""
option_list = (
Option("-u", "--username", dest="username", required=True),
Option("-e", "--email", dest="email", required=True),
Option("-a", "--active", dest="active", default=True),
Option("-r", "--roles", dest="roles", action="append", default=[]),
Option("-p", "--password", dest="password", default=None),
)
def run(self, username, email, active, roles, password):
role_objs = []
for r in roles:
role_obj = role_service.get_by_name(r)
if role_obj:
role_objs.append(role_obj)
else:
sys.stderr.write("[!] Cannot find role {0}\n".format(r))
sys.exit(1)
if not password:
password1 = prompt_pass("Password")
password2 = prompt_pass("Confirm Password")
password = password1
if password1 != password2:
sys.stderr.write("[!] Passwords do not match!\n")
sys.exit(1)
user_service.create(username, password, email, active, None, role_objs)
sys.stdout.write("[+] Created new user: {0}\n".format(username))
class ResetPassword(Command):
"""
This command allows you to reset a user's password.
"""
option_list = (Option("-u", "--username", dest="username", required=True),)
def run(self, username):
user = user_service.get_by_username(username)
if not user:
sys.stderr.write("[!] No user found for username: {0}\n".format(username))
sys.exit(1)
sys.stderr.write("[+] Resetting password for {0}\n".format(username))
password1 = prompt_pass("Password")
password2 = prompt_pass("Confirm Password")
if password1 != password2:
sys.stderr.write("[!] Passwords do not match\n")
sys.exit(1)
user.password = password1
user.hash_password()
database.commit()
class CreateRole(Command):
"""
This command allows for the creation of a new role within Lemur
"""
option_list = (
Option("-n", "--name", dest="name", required=True),
Option("-u", "--users", dest="users", default=[]),
Option("-d", "--description", dest="description", required=True),
)
def run(self, name, users, description):
user_objs = []
for u in users:
user_obj = user_service.get_by_username(u)
if user_obj:
user_objs.append(user_obj)
else:
sys.stderr.write("[!] Cannot find user {0}".format(u))
sys.exit(1)
role_service.create(name, description=description, users=users)
sys.stdout.write("[+] Created new role: {0}".format(name))
class LemurServer(Command):
"""
This is the main Lemur server, it runs the flask app with gunicorn and
uses any configuration options passed to it.
You can pass all standard gunicorn flags to this command as if you were
running gunicorn itself.
For example:
lemur start -w 4 -b 127.0.0.0:8002
Will start gunicorn with 4 workers bound to 127.0.0.0:8002
"""
description = "Run the app within Gunicorn"
def get_options(self):
settings = make_settings()
options = []
for setting, klass in settings.items():
if klass.cli:
if klass.action:
if klass.action == "store_const":
options.append(
Option(*klass.cli, const=klass.const, action=klass.action)
)
else:
options.append(Option(*klass.cli, action=klass.action))
else:
options.append(Option(*klass.cli))
return options
def run(self, *args, **kwargs):
from gunicorn.app.wsgiapp import WSGIApplication
app = WSGIApplication()
# run startup tasks on an app like object
validate_conf(current_app, REQUIRED_VARIABLES)
app.app_uri = 'lemur:create_app(config_path="{0}")'.format(
current_app.config.get("CONFIG_PATH")
)
return app.run()
@manager.command
def create_config(config_path=None):
"""
Creates a new configuration file if one does not already exist
"""
if not config_path:
config_path = DEFAULT_CONFIG_PATH
config_path = os.path.expanduser(config_path)
dir = os.path.dirname(config_path)
if not os.path.exists(dir):
os.makedirs(dir)
config = generate_settings()
with open(config_path, "w") as f:
f.write(config)
sys.stdout.write("[+] Created a new configuration file {0}\n".format(config_path))
@manager.command
def lock(path=None):
"""
Encrypts a given path. This directory can be used to store secrets needed for normal
Lemur operation. This is especially useful for storing secrets needed for communication
with third parties (e.g. external certificate authorities).
Lemur does not assume anything about the contents of the directory and will attempt to
encrypt all files contained within. Currently this has only been tested against plain
text files.
Path defaults ~/.lemur/keys
:param: path
"""
if not path:
path = os.path.expanduser("~/.lemur/keys")
dest_dir = os.path.join(path, "encrypted")
sys.stdout.write("[!] Generating a new key...\n")
key = Fernet.generate_key()
if not os.path.exists(dest_dir):
sys.stdout.write("[+] Creating encryption directory: {0}\n".format(dest_dir))
os.makedirs(dest_dir)
for root, dirs, files in os.walk(os.path.join(path, "decrypted")):
for f in files:
source = os.path.join(root, f)
dest = os.path.join(dest_dir, f + ".enc")
with open(source, "rb") as in_file, open(dest, "wb") as out_file:
f = Fernet(key)
data = f.encrypt(in_file.read())
out_file.write(data)
sys.stdout.write(
"[+] Writing file: {0} Source: {1}\n".format(dest, source)
)
sys.stdout.write("[+] Keys have been encrypted with key {0}\n".format(key))
@manager.command
def unlock(path=None):
"""
Decrypts all of the files in a given directory with provided password.
This is most commonly used during the startup sequence of Lemur
allowing it to go from source code to something that can communicate
with external services.
Path defaults ~/.lemur/keys
:param: path
"""
key = prompt_pass("[!] Please enter the encryption password")
if not path:
path = os.path.expanduser("~/.lemur/keys")
dest_dir = os.path.join(path, "decrypted")
source_dir = os.path.join(path, "encrypted")
if not os.path.exists(dest_dir):
sys.stdout.write("[+] Creating decryption directory: {0}\n".format(dest_dir))
os.makedirs(dest_dir)
for root, dirs, files in os.walk(source_dir):
for f in files:
source = os.path.join(source_dir, f)
dest = os.path.join(dest_dir, ".".join(f.split(".")[:-1]))
with open(source, "rb") as in_file, open(dest, "wb") as out_file:
f = Fernet(key)
data = f.decrypt(in_file.read())
out_file.write(data)
sys.stdout.write(
"[+] Writing file: {0} Source: {1}\n".format(dest, source)
)
sys.stdout.write("[+] Keys have been unencrypted!\n")
@manager.command
def publish_verisign_units():
"""
Simple function that queries verisign for API units and posts the mertics to
Atlas API for other teams to consume.
:return:
"""
from lemur.plugins import plugins
v = plugins.get("verisign-issuer")
units = v.get_available_units()
metrics = {}
for item in units:
if item["@type"] in metrics.keys():
metrics[item["@type"]] += int(item["@remaining"])
else:
metrics.update({item["@type"]: int(item["@remaining"])})
for name, value in metrics.items():
metric = [
{
"timestamp": 1321351651,
"type": "GAUGE",
"name": "Symantec {0} Unit Count".format(name),
"tags": {},
"value": value,
}
]
requests.post("http://localhost:8078/metrics", data=json.dumps(metric))
def main():
manager.add_command("start", LemurServer())
manager.add_command("runserver", Server(host="127.0.0.1", threaded=True))
manager.add_command("clean", Clean())
manager.add_command("show_urls", ShowUrls())
manager.add_command("db", MigrateCommand)
manager.add_command("init", InitializeApp())
manager.add_command("create_user", CreateUser())
manager.add_command("reset_password", ResetPassword())
manager.add_command("create_role", CreateRole())
manager.add_command("source", source_manager)
manager.add_command("certificate", certificate_manager)
manager.add_command("notify", notification_manager)
manager.add_command("endpoint", endpoint_manager)
manager.add_command("report", report_manager)
manager.add_command("policy", policy_manager)
manager.add_command("pending_certs", pending_certificate_manager)
manager.add_command("dns_providers", dns_provider_manager)
manager.add_command("acme", acme_manager)
manager.run()
if __name__ == "__main__":
main()
|
from homeassistant.components.remote import (
ATTR_COMMAND,
DOMAIN as REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.components.directv import setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
ATTR_UNIQUE_ID = "unique_id"
CLIENT_ENTITY_ID = f"{REMOTE_DOMAIN}.client"
MAIN_ENTITY_ID = f"{REMOTE_DOMAIN}.host"
UNAVAILABLE_ENTITY_ID = f"{REMOTE_DOMAIN}.unavailable_client"
# pylint: disable=redefined-outer-name
async def test_setup(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setup with basic config."""
await setup_integration(hass, aioclient_mock)
assert hass.states.get(MAIN_ENTITY_ID)
assert hass.states.get(CLIENT_ENTITY_ID)
assert hass.states.get(UNAVAILABLE_ENTITY_ID)
async def test_unique_id(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test unique id."""
await setup_integration(hass, aioclient_mock)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
main = entity_registry.async_get(MAIN_ENTITY_ID)
assert main.unique_id == "028877455858"
client = entity_registry.async_get(CLIENT_ENTITY_ID)
assert client.unique_id == "2CA17D1CD30X"
unavailable_client = entity_registry.async_get(UNAVAILABLE_ENTITY_ID)
assert unavailable_client.unique_id == "9XXXXXXXXXX9"
async def test_main_services(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the different services."""
await setup_integration(hass, aioclient_mock)
with patch("directv.DIRECTV.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("poweroff", "0")
with patch("directv.DIRECTV.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("poweron", "0")
with patch("directv.DIRECTV.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID, ATTR_COMMAND: ["dash"]},
blocking=True,
)
remote_mock.assert_called_once_with("dash", "0")
|
import pytest
import voluptuous as vol
from homeassistant.components.climate.const import (
ATTR_AUX_HEAT,
ATTR_CURRENT_HUMIDITY,
ATTR_CURRENT_TEMPERATURE,
ATTR_FAN_MODE,
ATTR_HUMIDITY,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_HUMIDITY,
ATTR_MAX_TEMP,
ATTR_MIN_HUMIDITY,
ATTR_MIN_TEMP,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
DOMAIN,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_ECO,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
from homeassistant.util.unit_system import METRIC_SYSTEM
ENTITY_CLIMATE = "climate.hvac"
ENTITY_ECOBEE = "climate.ecobee"
ENTITY_HEATPUMP = "climate.heatpump"
@pytest.fixture(autouse=True)
async def setup_demo_climate(hass):
"""Initialize setup demo climate."""
hass.config.units = METRIC_SYSTEM
assert await async_setup_component(hass, DOMAIN, {"climate": {"platform": "demo"}})
await hass.async_block_till_done()
def test_setup_params(hass):
"""Test the initial parameters."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_COOL
assert state.attributes.get(ATTR_TEMPERATURE) == 21
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 22
assert state.attributes.get(ATTR_FAN_MODE) == "On High"
assert state.attributes.get(ATTR_HUMIDITY) == 67
assert state.attributes.get(ATTR_CURRENT_HUMIDITY) == 54
assert state.attributes.get(ATTR_SWING_MODE) == "Off"
assert STATE_OFF == state.attributes.get(ATTR_AUX_HEAT)
assert state.attributes.get(ATTR_HVAC_MODES) == [
"off",
"heat",
"cool",
"auto",
"dry",
"fan_only",
]
def test_default_setup_params(hass):
"""Test the setup with default parameters."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_MIN_TEMP) == 7
assert state.attributes.get(ATTR_MAX_TEMP) == 35
assert state.attributes.get(ATTR_MIN_HUMIDITY) == 30
assert state.attributes.get(ATTR_MAX_HUMIDITY) == 99
async def test_set_only_target_temp_bad_attr(hass):
"""Test setting the target temperature without required attribute."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_TEMPERATURE) == 21
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_TEMPERATURE) == 21
async def test_set_only_target_temp(hass):
"""Test the setting of the target temperature."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_TEMPERATURE) == 21
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: 30},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_TEMPERATURE) == 30.0
async def test_set_only_target_temp_with_convert(hass):
"""Test the setting of the target temperature."""
state = hass.states.get(ENTITY_HEATPUMP)
assert state.attributes.get(ATTR_TEMPERATURE) == 20
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_HEATPUMP, ATTR_TEMPERATURE: 21},
blocking=True,
)
state = hass.states.get(ENTITY_HEATPUMP)
assert state.attributes.get(ATTR_TEMPERATURE) == 21.0
async def test_set_target_temp_range(hass):
"""Test the setting of the target temperature with range."""
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 21.0
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{
ATTR_ENTITY_ID: ENTITY_ECOBEE,
ATTR_TARGET_TEMP_LOW: 20,
ATTR_TARGET_TEMP_HIGH: 25,
},
blocking=True,
)
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 20.0
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25.0
async def test_set_target_temp_range_bad_attr(hass):
"""Test setting the target temperature range without attribute."""
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 21.0
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{
ATTR_ENTITY_ID: ENTITY_ECOBEE,
ATTR_TARGET_TEMP_LOW: None,
ATTR_TARGET_TEMP_HIGH: None,
},
blocking=True,
)
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 21.0
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0
async def test_set_target_humidity_bad_attr(hass):
"""Test setting the target humidity without required attribute."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HUMIDITY) == 67
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HUMIDITY,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HUMIDITY) == 67
async def test_set_target_humidity(hass):
"""Test the setting of the target humidity."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HUMIDITY) == 67
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HUMIDITY,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: 64},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HUMIDITY) == 64.0
async def test_set_fan_mode_bad_attr(hass):
"""Test setting fan mode without required attribute."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_FAN_MODE) == "On High"
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_FAN_MODE) == "On High"
async def test_set_fan_mode(hass):
"""Test setting of new fan mode."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_FAN_MODE) == "On High"
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: "On Low"},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_FAN_MODE) == "On Low"
async def test_set_swing_mode_bad_attr(hass):
"""Test setting swing mode without required attribute."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_SWING_MODE) == "Off"
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_SWING_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_SWING_MODE) == "Off"
async def test_set_swing(hass):
"""Test setting of new swing mode."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_SWING_MODE) == "Off"
await hass.services.async_call(
DOMAIN,
SERVICE_SET_SWING_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: "Auto"},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_SWING_MODE) == "Auto"
async def test_set_hvac_bad_attr_and_state(hass):
"""Test setting hvac mode without required attribute.
Also check the state.
"""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HVAC_ACTION) == CURRENT_HVAC_COOL
assert state.state == HVAC_MODE_COOL
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_HVAC_ACTION) == CURRENT_HVAC_COOL
assert state.state == HVAC_MODE_COOL
async def test_set_hvac(hass):
"""Test setting of new hvac mode."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_COOL
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_HEAT
async def test_set_hold_mode_away(hass):
"""Test setting the hold mode away."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_AWAY},
blocking=True,
)
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_PRESET_MODE) == PRESET_AWAY
async def test_set_hold_mode_eco(hass):
"""Test setting the hold mode eco."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_ECO},
blocking=True,
)
state = hass.states.get(ENTITY_ECOBEE)
assert state.attributes.get(ATTR_PRESET_MODE) == PRESET_ECO
async def test_set_aux_heat_bad_attr(hass):
"""Test setting the auxiliary heater without required attribute."""
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF
with pytest.raises(vol.Invalid):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_AUX_HEAT,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_AUX_HEAT: None},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF
async def test_set_aux_heat_on(hass):
"""Test setting the axillary heater on/true."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_AUX_HEAT,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_AUX_HEAT: True},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_AUX_HEAT) == STATE_ON
async def test_set_aux_heat_off(hass):
"""Test setting the auxiliary heater off/false."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_AUX_HEAT,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_AUX_HEAT: False},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF
async def test_turn_on(hass):
"""Test turn on device."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVAC_MODE_OFF},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_OFF
await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_HEAT
async def test_turn_off(hass):
"""Test turn on device."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
blocking=True,
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_HEAT
await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True
)
state = hass.states.get(ENTITY_CLIMATE)
assert state.state == HVAC_MODE_OFF
|
import copy
import json
from hatasmota.utils import (
get_topic_stat_result,
get_topic_tele_state,
get_topic_tele_will,
)
from homeassistant.components import switch
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
from tests.components.switch import common
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the switch on and verify MQTT message is sent
await common.async_turn_on(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
# Turn the switch off and verify MQTT message is sent
await common.async_turn_off(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "OFF", 0, False
)
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_relay_as_light(hass, mqtt_mock, setup_tasmota):
"""Test relay does not show up as switch in light mode."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state is None
state = hass.states.get("light.test")
assert state is not None
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
await help_test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, switch.DOMAIN, config
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
await help_test_availability(hass, mqtt_mock, switch.DOMAIN, config)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
await help_test_availability_discovery_update(
hass, mqtt_mock, switch.DOMAIN, config
)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
poll_topic = "tasmota_49A3BC/cmnd/STATE"
await help_test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, switch.DOMAIN, config, poll_topic, ""
)
async def test_discovery_removal_switch(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered switch."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 0
await help_test_discovery_removal(
hass, mqtt_mock, caplog, switch.DOMAIN, config1, config2
)
async def test_discovery_removal_relay_as_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered relay as light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config1["so"]["30"] = 0 # Disable Home Assistant auto-discovery as light
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 1
config2["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
await help_test_discovery_removal(
hass, mqtt_mock, caplog, switch.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_switch(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test update of discovered switch."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
with patch(
"homeassistant.components.tasmota.switch.TasmotaSwitch.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, switch.DOMAIN, config, discovery_update
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
unique_id = f"{DEFAULT_CONFIG['mac']}_switch_relay_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, switch.DOMAIN, unique_id, config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
topics = [
get_topic_stat_result(config),
get_topic_tele_state(config),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, switch.DOMAIN, config, topics
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, switch.DOMAIN, config
)
|
from typing import (
TYPE_CHECKING,
Dict,
Hashable,
Iterable,
List,
Optional,
Set,
Tuple,
Union,
overload,
)
import pandas as pd
from . import dtypes, utils
from .alignment import align
from .duck_array_ops import lazy_array_equiv
from .merge import _VALID_COMPAT, merge_attrs, unique_variable
from .variable import IndexVariable, Variable, as_variable
from .variable import concat as concat_vars
if TYPE_CHECKING:
from .dataarray import DataArray
from .dataset import Dataset
@overload
def concat(
objs: Iterable["Dataset"],
dim: Union[str, "DataArray", pd.Index],
data_vars: Union[str, List[str]] = "all",
coords: Union[str, List[str]] = "different",
compat: str = "equals",
positions: Optional[Iterable[int]] = None,
fill_value: object = dtypes.NA,
join: str = "outer",
combine_attrs: str = "override",
) -> "Dataset":
...
@overload
def concat(
objs: Iterable["DataArray"],
dim: Union[str, "DataArray", pd.Index],
data_vars: Union[str, List[str]] = "all",
coords: Union[str, List[str]] = "different",
compat: str = "equals",
positions: Optional[Iterable[int]] = None,
fill_value: object = dtypes.NA,
join: str = "outer",
combine_attrs: str = "override",
) -> "DataArray":
...
def concat(
objs,
dim,
data_vars="all",
coords="different",
compat="equals",
positions=None,
fill_value=dtypes.NA,
join="outer",
combine_attrs="override",
):
"""Concatenate xarray objects along a new or existing dimension.
Parameters
----------
objs : sequence of Dataset and DataArray
xarray objects to concatenate together. Each object is expected to
consist of variables and coordinates with matching shapes except for
along the concatenated dimension.
dim : str or DataArray or pandas.Index
Name of the dimension to concatenate along. This can either be a new
dimension name, in which case it is added along axis=0, or an existing
dimension name, in which case the location of the dimension is
unchanged. If dimension is provided as a DataArray or Index, its name
is used as the dimension to concatenate along and the values are added
as a coordinate.
data_vars : {"minimal", "different", "all"} or list of str, optional
These data variables will be concatenated together:
* "minimal": Only data variables in which the dimension already
appears are included.
* "different": Data variables which are not equal (ignoring
attributes) across all datasets are also concatenated (as well as
all for which dimension already appears). Beware: this option may
load the data payload of data variables into memory if they are not
already loaded.
* "all": All data variables will be concatenated.
* list of str: The listed data variables will be concatenated, in
addition to the "minimal" data variables.
If objects are DataArrays, data_vars must be "all".
coords : {"minimal", "different", "all"} or list of str, optional
These coordinate variables will be concatenated together:
* "minimal": Only coordinates in which the dimension already appears
are included.
* "different": Coordinates which are not equal (ignoring attributes)
across all datasets are also concatenated (as well as all for which
dimension already appears). Beware: this option may load the data
payload of coordinate variables into memory if they are not already
loaded.
* "all": All coordinate variables will be concatenated, except
those corresponding to other dimensions.
* list of str: The listed coordinate variables will be concatenated,
in addition to the "minimal" coordinates.
compat : {"identical", "equals", "broadcast_equals", "no_conflicts", "override"}, optional
String indicating how to compare non-concatenated variables of the same name for
potential conflicts. This is passed down to merge.
- "broadcast_equals": all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- "equals": all values and dimensions must be the same.
- "identical": all values, dimensions and attributes must be the
same.
- "no_conflicts": only values which are not null in both datasets
must be equal. The returned dataset then contains the combination
of all non-null values.
- "override": skip comparing and pick variable from first dataset
positions : None or list of integer arrays, optional
List of integer arrays which specifies the integer positions to which
to assign each dataset along the concatenated dimension. If not
supplied, objects are concatenated in the provided order.
fill_value : scalar or dict-like, optional
Value to use for newly missing values. If a dict-like, maps
variable names to fill values. Use a data array's name to
refer to its values.
join : {"outer", "inner", "left", "right", "exact"}, optional
String indicating how to combine differing indexes
(excluding dim) in objects
- "outer": use the union of object indexes
- "inner": use the intersection of object indexes
- "left": use indexes from the first object with each dimension
- "right": use indexes from the last object with each dimension
- "exact": instead of aligning, raise `ValueError` when indexes to be
aligned are not equal
- "override": if indexes are of same size, rewrite indexes to be
those of the first object with that dimension. Indexes for the same
dimension must have the same size in all objects.
combine_attrs : {"drop", "identical", "no_conflicts", "override"}, \
default: "override"
String indicating how to combine attrs of the objects being merged:
- "drop": empty attrs on returned Dataset.
- "identical": all attrs must be the same on every object.
- "no_conflicts": attrs from all objects are combined, any that have
the same name must also have the same value.
- "override": skip comparing and copy attrs from the first dataset to
the result.
Returns
-------
concatenated : type of objs
See also
--------
merge
Examples
--------
>>> da = xr.DataArray(
... np.arange(6).reshape(2, 3), [("x", ["a", "b"]), ("y", [10, 20, 30])]
... )
>>> da
<xarray.DataArray (x: 2, y: 3)>
array([[0, 1, 2],
[3, 4, 5]])
Coordinates:
* x (x) <U1 'a' 'b'
* y (y) int64 10 20 30
>>> xr.concat([da.isel(y=slice(0, 1)), da.isel(y=slice(1, None))], dim="y")
<xarray.DataArray (x: 2, y: 3)>
array([[0, 1, 2],
[3, 4, 5]])
Coordinates:
* x (x) <U1 'a' 'b'
* y (y) int64 10 20 30
>>> xr.concat([da.isel(x=0), da.isel(x=1)], "x")
<xarray.DataArray (x: 2, y: 3)>
array([[0, 1, 2],
[3, 4, 5]])
Coordinates:
* x (x) object 'a' 'b'
* y (y) int64 10 20 30
>>> xr.concat([da.isel(x=0), da.isel(x=1)], "new_dim")
<xarray.DataArray (new_dim: 2, y: 3)>
array([[0, 1, 2],
[3, 4, 5]])
Coordinates:
x (new_dim) <U1 'a' 'b'
* y (y) int64 10 20 30
Dimensions without coordinates: new_dim
>>> xr.concat([da.isel(x=0), da.isel(x=1)], pd.Index([-90, -100], name="new_dim"))
<xarray.DataArray (new_dim: 2, y: 3)>
array([[0, 1, 2],
[3, 4, 5]])
Coordinates:
x (new_dim) <U1 'a' 'b'
* y (y) int64 10 20 30
* new_dim (new_dim) int64 -90 -100
"""
# TODO: add ignore_index arguments copied from pandas.concat
# TODO: support concatenating scalar coordinates even if the concatenated
# dimension already exists
from .dataarray import DataArray
from .dataset import Dataset
try:
first_obj, objs = utils.peek_at(objs)
except StopIteration:
raise ValueError("must supply at least one object to concatenate")
if compat not in _VALID_COMPAT:
raise ValueError(
"compat=%r invalid: must be 'broadcast_equals', 'equals', 'identical', 'no_conflicts' or 'override'"
% compat
)
if isinstance(first_obj, DataArray):
f = _dataarray_concat
elif isinstance(first_obj, Dataset):
f = _dataset_concat
else:
raise TypeError(
"can only concatenate xarray Dataset and DataArray "
"objects, got %s" % type(first_obj)
)
return f(
objs, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs
)
def _calc_concat_dim_coord(dim):
"""
Infer the dimension name and 1d coordinate variable (if appropriate)
for concatenating along the new dimension.
"""
from .dataarray import DataArray
if isinstance(dim, str):
coord = None
elif not isinstance(dim, (DataArray, Variable)):
dim_name = getattr(dim, "name", None)
if dim_name is None:
dim_name = "concat_dim"
coord = IndexVariable(dim_name, dim)
dim = dim_name
elif not isinstance(dim, DataArray):
coord = as_variable(dim).to_index_variable()
(dim,) = coord.dims
else:
coord = dim
(dim,) = coord.dims
return dim, coord
def _calc_concat_over(datasets, dim, dim_names, data_vars, coords, compat):
"""
Determine which dataset variables need to be concatenated in the result,
"""
# Return values
concat_over = set()
equals = {}
if dim in dim_names:
concat_over_existing_dim = True
concat_over.add(dim)
else:
concat_over_existing_dim = False
concat_dim_lengths = []
for ds in datasets:
if concat_over_existing_dim:
if dim not in ds.dims:
if dim in ds:
ds = ds.set_coords(dim)
concat_over.update(k for k, v in ds.variables.items() if dim in v.dims)
concat_dim_lengths.append(ds.dims.get(dim, 1))
def process_subset_opt(opt, subset):
if isinstance(opt, str):
if opt == "different":
if compat == "override":
raise ValueError(
"Cannot specify both %s='different' and compat='override'."
% subset
)
# all nonindexes that are not the same in each dataset
for k in getattr(datasets[0], subset):
if k not in concat_over:
equals[k] = None
variables = []
for ds in datasets:
if k in ds.variables:
variables.append(ds.variables[k])
if len(variables) == 1:
# coords="different" doesn't make sense when only one object
# contains a particular variable.
break
elif len(variables) != len(datasets) and opt == "different":
raise ValueError(
f"{k!r} not present in all datasets and coords='different'. "
f"Either add {k!r} to datasets where it is missing or "
"specify coords='minimal'."
)
# first check without comparing values i.e. no computes
for var in variables[1:]:
equals[k] = getattr(variables[0], compat)(
var, equiv=lazy_array_equiv
)
if equals[k] is not True:
# exit early if we know these are not equal or that
# equality cannot be determined i.e. one or all of
# the variables wraps a numpy array
break
if equals[k] is False:
concat_over.add(k)
elif equals[k] is None:
# Compare the variable of all datasets vs. the one
# of the first dataset. Perform the minimum amount of
# loads in order to avoid multiple loads from disk
# while keeping the RAM footprint low.
v_lhs = datasets[0].variables[k].load()
# We'll need to know later on if variables are equal.
computed = []
for ds_rhs in datasets[1:]:
v_rhs = ds_rhs.variables[k].compute()
computed.append(v_rhs)
if not getattr(v_lhs, compat)(v_rhs):
concat_over.add(k)
equals[k] = False
# computed variables are not to be re-computed
# again in the future
for ds, v in zip(datasets[1:], computed):
ds.variables[k].data = v.data
break
else:
equals[k] = True
elif opt == "all":
concat_over.update(
set(getattr(datasets[0], subset)) - set(datasets[0].dims)
)
elif opt == "minimal":
pass
else:
raise ValueError(f"unexpected value for {subset}: {opt}")
else:
invalid_vars = [k for k in opt if k not in getattr(datasets[0], subset)]
if invalid_vars:
if subset == "coords":
raise ValueError(
"some variables in coords are not coordinates on "
"the first dataset: %s" % (invalid_vars,)
)
else:
raise ValueError(
"some variables in data_vars are not data variables "
"on the first dataset: %s" % (invalid_vars,)
)
concat_over.update(opt)
process_subset_opt(data_vars, "data_vars")
process_subset_opt(coords, "coords")
return concat_over, equals, concat_dim_lengths
# determine dimensional coordinate names and a dict mapping name to DataArray
def _parse_datasets(
datasets: Iterable["Dataset"],
) -> Tuple[Dict[Hashable, Variable], Dict[Hashable, int], Set[Hashable], Set[Hashable]]:
dims: Set[Hashable] = set()
all_coord_names: Set[Hashable] = set()
data_vars: Set[Hashable] = set() # list of data_vars
dim_coords: Dict[Hashable, Variable] = {} # maps dim name to variable
dims_sizes: Dict[Hashable, int] = {} # shared dimension sizes to expand variables
for ds in datasets:
dims_sizes.update(ds.dims)
all_coord_names.update(ds.coords)
data_vars.update(ds.data_vars)
# preserves ordering of dimensions
for dim in ds.dims:
if dim in dims:
continue
if dim not in dim_coords:
dim_coords[dim] = ds.coords[dim].variable
dims = dims | set(ds.dims)
return dim_coords, dims_sizes, all_coord_names, data_vars
def _dataset_concat(
datasets: List["Dataset"],
dim: Union[str, "DataArray", pd.Index],
data_vars: Union[str, List[str]],
coords: Union[str, List[str]],
compat: str,
positions: Optional[Iterable[int]],
fill_value: object = dtypes.NA,
join: str = "outer",
combine_attrs: str = "override",
) -> "Dataset":
"""
Concatenate a sequence of datasets along a new or existing dimension
"""
from .dataset import Dataset
dim, coord = _calc_concat_dim_coord(dim)
# Make sure we're working on a copy (we'll be loading variables)
datasets = [ds.copy() for ds in datasets]
datasets = list(
align(*datasets, join=join, copy=False, exclude=[dim], fill_value=fill_value)
)
dim_coords, dims_sizes, coord_names, data_names = _parse_datasets(datasets)
dim_names = set(dim_coords)
unlabeled_dims = dim_names - coord_names
both_data_and_coords = coord_names & data_names
if both_data_and_coords:
raise ValueError(
"%r is a coordinate in some datasets but not others." % both_data_and_coords
)
# we don't want the concat dimension in the result dataset yet
dim_coords.pop(dim, None)
dims_sizes.pop(dim, None)
# case where concat dimension is a coordinate or data_var but not a dimension
if (dim in coord_names or dim in data_names) and dim not in dim_names:
datasets = [ds.expand_dims(dim) for ds in datasets]
# determine which variables to concatentate
concat_over, equals, concat_dim_lengths = _calc_concat_over(
datasets, dim, dim_names, data_vars, coords, compat
)
# determine which variables to merge, and then merge them according to compat
variables_to_merge = (coord_names | data_names) - concat_over - dim_names
result_vars = {}
if variables_to_merge:
to_merge: Dict[Hashable, List[Variable]] = {
var: [] for var in variables_to_merge
}
for ds in datasets:
for var in variables_to_merge:
if var in ds:
to_merge[var].append(ds.variables[var])
for var in variables_to_merge:
result_vars[var] = unique_variable(
var, to_merge[var], compat=compat, equals=equals.get(var, None)
)
else:
result_vars = {}
result_vars.update(dim_coords)
# assign attrs and encoding from first dataset
result_attrs = merge_attrs([ds.attrs for ds in datasets], combine_attrs)
result_encoding = datasets[0].encoding
# check that global attributes are fixed across all datasets if necessary
for ds in datasets[1:]:
if compat == "identical" and not utils.dict_equiv(ds.attrs, result_attrs):
raise ValueError("Dataset global attributes not equal.")
# we've already verified everything is consistent; now, calculate
# shared dimension sizes so we can expand the necessary variables
def ensure_common_dims(vars):
# ensure each variable with the given name shares the same
# dimensions and the same shape for all of them except along the
# concat dimension
common_dims = tuple(pd.unique([d for v in vars for d in v.dims]))
if dim not in common_dims:
common_dims = (dim,) + common_dims
for var, dim_len in zip(vars, concat_dim_lengths):
if var.dims != common_dims:
common_shape = tuple(dims_sizes.get(d, dim_len) for d in common_dims)
var = var.set_dims(common_dims, common_shape)
yield var
# stack up each variable to fill-out the dataset (in order)
# n.b. this loop preserves variable order, needed for groupby.
for k in datasets[0].variables:
if k in concat_over:
try:
vars = ensure_common_dims([ds.variables[k] for ds in datasets])
except KeyError:
raise ValueError("%r is not present in all datasets." % k)
combined = concat_vars(vars, dim, positions)
assert isinstance(combined, Variable)
result_vars[k] = combined
elif k in result_vars:
# preserves original variable order
result_vars[k] = result_vars.pop(k)
result = Dataset(result_vars, attrs=result_attrs)
absent_coord_names = coord_names - set(result.variables)
if absent_coord_names:
raise ValueError(
"Variables %r are coordinates in some datasets but not others."
% absent_coord_names
)
result = result.set_coords(coord_names)
result.encoding = result_encoding
result = result.drop_vars(unlabeled_dims, errors="ignore")
if coord is not None:
# add concat dimension last to ensure that its in the final Dataset
result[coord.name] = coord
return result
def _dataarray_concat(
arrays: Iterable["DataArray"],
dim: Union[str, "DataArray", pd.Index],
data_vars: Union[str, List[str]],
coords: Union[str, List[str]],
compat: str,
positions: Optional[Iterable[int]],
fill_value: object = dtypes.NA,
join: str = "outer",
combine_attrs: str = "override",
) -> "DataArray":
arrays = list(arrays)
if data_vars != "all":
raise ValueError(
"data_vars is not a valid argument when concatenating DataArray objects"
)
datasets = []
for n, arr in enumerate(arrays):
if n == 0:
name = arr.name
elif name != arr.name:
if compat == "identical":
raise ValueError("array names not identical")
else:
arr = arr.rename(name)
datasets.append(arr._to_temp_dataset())
ds = _dataset_concat(
datasets,
dim,
data_vars,
coords,
compat,
positions,
fill_value=fill_value,
join=join,
combine_attrs="drop",
)
merged_attrs = merge_attrs([da.attrs for da in arrays], combine_attrs)
result = arrays[0]._from_temp_dataset(ds, name)
result.attrs = merged_attrs
return result
|
import os
import numpy as np
import pandas as pd
import xarray as xr
from . import randint, randn, requires_dask
nx = 3000
ny = 2000
nt = 1000
basic_indexes = {
"1slice": {"x": slice(0, 3)},
"1slice-1scalar": {"x": 0, "y": slice(None, None, 3)},
"2slicess-1scalar": {"x": slice(3, -3, 3), "y": 1, "t": slice(None, -3, 3)},
}
basic_assignment_values = {
"1slice": xr.DataArray(randn((3, ny), frac_nan=0.1), dims=["x", "y"]),
"1slice-1scalar": xr.DataArray(randn(int(ny / 3) + 1, frac_nan=0.1), dims=["y"]),
"2slicess-1scalar": xr.DataArray(
randn(int((nx - 6) / 3), frac_nan=0.1), dims=["x"]
),
}
outer_indexes = {
"1d": {"x": randint(0, nx, 400)},
"2d": {"x": randint(0, nx, 500), "y": randint(0, ny, 400)},
"2d-1scalar": {"x": randint(0, nx, 100), "y": 1, "t": randint(0, nt, 400)},
}
outer_assignment_values = {
"1d": xr.DataArray(randn((400, ny), frac_nan=0.1), dims=["x", "y"]),
"2d": xr.DataArray(randn((500, 400), frac_nan=0.1), dims=["x", "y"]),
"2d-1scalar": xr.DataArray(randn(100, frac_nan=0.1), dims=["x"]),
}
vectorized_indexes = {
"1-1d": {"x": xr.DataArray(randint(0, nx, 400), dims="a")},
"2-1d": {
"x": xr.DataArray(randint(0, nx, 400), dims="a"),
"y": xr.DataArray(randint(0, ny, 400), dims="a"),
},
"3-2d": {
"x": xr.DataArray(randint(0, nx, 400).reshape(4, 100), dims=["a", "b"]),
"y": xr.DataArray(randint(0, ny, 400).reshape(4, 100), dims=["a", "b"]),
"t": xr.DataArray(randint(0, nt, 400).reshape(4, 100), dims=["a", "b"]),
},
}
vectorized_assignment_values = {
"1-1d": xr.DataArray(randn((400, 2000)), dims=["a", "y"], coords={"a": randn(400)}),
"2-1d": xr.DataArray(randn(400), dims=["a"], coords={"a": randn(400)}),
"3-2d": xr.DataArray(
randn((4, 100)), dims=["a", "b"], coords={"a": randn(4), "b": randn(100)}
),
}
class Base:
def setup(self, key):
self.ds = xr.Dataset(
{
"var1": (("x", "y"), randn((nx, ny), frac_nan=0.1)),
"var2": (("x", "t"), randn((nx, nt))),
"var3": (("t",), randn(nt)),
},
coords={
"x": np.arange(nx),
"y": np.linspace(0, 1, ny),
"t": pd.date_range("1970-01-01", periods=nt, freq="D"),
"x_coords": ("x", np.linspace(1.1, 2.1, nx)),
},
)
class Indexing(Base):
def time_indexing_basic(self, key):
self.ds.isel(**basic_indexes[key]).load()
time_indexing_basic.param_names = ["key"]
time_indexing_basic.params = [list(basic_indexes.keys())]
def time_indexing_outer(self, key):
self.ds.isel(**outer_indexes[key]).load()
time_indexing_outer.param_names = ["key"]
time_indexing_outer.params = [list(outer_indexes.keys())]
def time_indexing_vectorized(self, key):
self.ds.isel(**vectorized_indexes[key]).load()
time_indexing_vectorized.param_names = ["key"]
time_indexing_vectorized.params = [list(vectorized_indexes.keys())]
class Assignment(Base):
def time_assignment_basic(self, key):
ind = basic_indexes[key]
val = basic_assignment_values[key]
self.ds["var1"][ind.get("x", slice(None)), ind.get("y", slice(None))] = val
time_assignment_basic.param_names = ["key"]
time_assignment_basic.params = [list(basic_indexes.keys())]
def time_assignment_outer(self, key):
ind = outer_indexes[key]
val = outer_assignment_values[key]
self.ds["var1"][ind.get("x", slice(None)), ind.get("y", slice(None))] = val
time_assignment_outer.param_names = ["key"]
time_assignment_outer.params = [list(outer_indexes.keys())]
def time_assignment_vectorized(self, key):
ind = vectorized_indexes[key]
val = vectorized_assignment_values[key]
self.ds["var1"][ind.get("x", slice(None)), ind.get("y", slice(None))] = val
time_assignment_vectorized.param_names = ["key"]
time_assignment_vectorized.params = [list(vectorized_indexes.keys())]
class IndexingDask(Indexing):
def setup(self, key):
requires_dask()
super().setup(key)
self.ds = self.ds.chunk({"x": 100, "y": 50, "t": 50})
class BooleanIndexing:
# https://github.com/pydata/xarray/issues/2227
def setup(self):
self.ds = xr.Dataset(
{"a": ("time", np.arange(10_000_000))},
coords={"time": np.arange(10_000_000)},
)
self.time_filter = self.ds.time > 50_000
def time_indexing(self):
self.ds.isel(time=self.time_filter)
class HugeAxisSmallSliceIndexing:
# https://github.com/pydata/xarray/pull/4560
def setup(self):
self.filepath = "test_indexing_huge_axis_small_slice.nc"
if not os.path.isfile(self.filepath):
xr.Dataset(
{"a": ("x", np.arange(10_000_000))},
coords={"x": np.arange(10_000_000)},
).to_netcdf(self.filepath, format="NETCDF4")
self.ds = xr.open_dataset(self.filepath)
def time_indexing(self):
self.ds.isel(x=slice(100))
def cleanup(self):
self.ds.close()
|
import logging
import requests
import rxv
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOUND_MODE,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PLAYING,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from .const import SERVICE_ENABLE_OUTPUT, SERVICE_SELECT_SCENE
_LOGGER = logging.getLogger(__name__)
ATTR_ENABLED = "enabled"
ATTR_PORT = "port"
ATTR_SCENE = "scene"
CONF_SOURCE_IGNORE = "source_ignore"
CONF_SOURCE_NAMES = "source_names"
CONF_ZONE_IGNORE = "zone_ignore"
CONF_ZONE_NAMES = "zone_names"
DATA_YAMAHA = "yamaha_known_receivers"
DEFAULT_NAME = "Yamaha Receiver"
SUPPORT_YAMAHA = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY
| SUPPORT_SELECT_SOUND_MODE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_SOURCE_IGNORE, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_ZONE_IGNORE, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_SOURCE_NAMES, default={}): {cv.string: cv.string},
vol.Optional(CONF_ZONE_NAMES, default={}): {cv.string: cv.string},
}
)
class YamahaConfigInfo:
"""Configuration Info for Yamaha Receivers."""
def __init__(self, config: None, discovery_info: None):
"""Initialize the Configuration Info for Yamaha Receiver."""
self.name = config.get(CONF_NAME)
self.host = config.get(CONF_HOST)
self.ctrl_url = f"http://{self.host}:80/YamahaRemoteControl/ctrl"
self.source_ignore = config.get(CONF_SOURCE_IGNORE)
self.source_names = config.get(CONF_SOURCE_NAMES)
self.zone_ignore = config.get(CONF_ZONE_IGNORE)
self.zone_names = config.get(CONF_ZONE_NAMES)
self.from_discovery = False
if discovery_info is not None:
self.name = discovery_info.get("name")
self.model = discovery_info.get("model_name")
self.ctrl_url = discovery_info.get("control_url")
self.desc_url = discovery_info.get("description_url")
self.zone_ignore = []
self.from_discovery = True
def _discovery(config_info):
"""Discover receivers from configuration in the network."""
if config_info.from_discovery:
receivers = rxv.RXV(
config_info.ctrl_url,
model_name=config_info.model,
friendly_name=config_info.name,
unit_desc_url=config_info.desc_url,
).zone_controllers()
_LOGGER.debug("Receivers: %s", receivers)
elif config_info.host is None:
receivers = []
for recv in rxv.find():
receivers.extend(recv.zone_controllers())
else:
receivers = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers()
return receivers
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Yamaha platform."""
# Keep track of configured receivers so that we don't end up
# discovering a receiver dynamically that we have static config
# for. Map each device from its zone_id .
known_zones = hass.data.setdefault(DATA_YAMAHA, set())
# Get the Infos for configuration from config (YAML) or Discovery
config_info = YamahaConfigInfo(config=config, discovery_info=discovery_info)
# Async check if the Receivers are there in the network
receivers = await hass.async_add_executor_job(_discovery, config_info)
entities = []
for receiver in receivers:
if receiver.zone in config_info.zone_ignore:
continue
entity = YamahaDevice(
config_info.name,
receiver,
config_info.source_ignore,
config_info.source_names,
config_info.zone_names,
)
# Only add device if it's not already added
if entity.zone_id not in known_zones:
known_zones.add(entity.zone_id)
entities.append(entity)
else:
_LOGGER.debug("Ignoring duplicate receiver: %s", config_info.name)
async_add_entities(entities)
# Register Service 'select_scene'
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_SELECT_SCENE,
{vol.Required(ATTR_SCENE): cv.string},
"set_scene",
)
# Register Service 'enable_output'
platform.async_register_entity_service(
SERVICE_ENABLE_OUTPUT,
{vol.Required(ATTR_ENABLED): cv.boolean, vol.Required(ATTR_PORT): cv.string},
"enable_output",
)
class YamahaDevice(MediaPlayerEntity):
"""Representation of a Yamaha device."""
def __init__(self, name, receiver, source_ignore, source_names, zone_names):
"""Initialize the Yamaha Receiver."""
self.receiver = receiver
self._muted = False
self._volume = 0
self._pwstate = STATE_OFF
self._current_source = None
self._sound_mode = None
self._sound_mode_list = None
self._source_list = None
self._source_ignore = source_ignore or []
self._source_names = source_names or {}
self._zone_names = zone_names or {}
self._reverse_mapping = None
self._playback_support = None
self._is_playback_supported = False
self._play_status = None
self._name = name
self._zone = receiver.zone
def update(self):
"""Get the latest details from the device."""
try:
self._play_status = self.receiver.play_status()
except requests.exceptions.ConnectionError:
_LOGGER.info("Receiver is offline: %s", self._name)
return
if self.receiver.on:
if self._play_status is None:
self._pwstate = STATE_ON
elif self._play_status.playing:
self._pwstate = STATE_PLAYING
else:
self._pwstate = STATE_IDLE
else:
self._pwstate = STATE_OFF
self._muted = self.receiver.mute
self._volume = (self.receiver.volume / 100) + 1
if self.source_list is None:
self.build_source_list()
current_source = self.receiver.input
self._current_source = self._source_names.get(current_source, current_source)
self._playback_support = self.receiver.get_playback_support()
self._is_playback_supported = self.receiver.is_playback_supported(
self._current_source
)
surround_programs = self.receiver.surround_programs()
if surround_programs:
self._sound_mode = self.receiver.surround_program
self._sound_mode_list = surround_programs
else:
self._sound_mode = None
self._sound_mode_list = None
def build_source_list(self):
"""Build the source list."""
self._reverse_mapping = {
alias: source for source, alias in self._source_names.items()
}
self._source_list = sorted(
self._source_names.get(source, source)
for source in self.receiver.inputs()
if source not in self._source_ignore
)
@property
def name(self):
"""Return the name of the device."""
name = self._name
zone_name = self._zone_names.get(self._zone, self._zone)
if zone_name != "Main_Zone":
# Zone will be one of Main_Zone, Zone_2, Zone_3
name += f" {zone_name.replace('_', ' ')}"
return name
@property
def state(self):
"""Return the state of the device."""
return self._pwstate
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def sound_mode(self):
"""Return the current sound mode."""
return self._sound_mode
@property
def sound_mode_list(self):
"""Return the current sound mode."""
return self._sound_mode_list
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
@property
def zone_id(self):
"""Return a zone_id to ensure 1 media player per zone."""
return f"{self.receiver.ctrl_url}:{self._zone}"
@property
def supported_features(self):
"""Flag media player features that are supported."""
supported_features = SUPPORT_YAMAHA
supports = self._playback_support
mapping = {
"play": (SUPPORT_PLAY | SUPPORT_PLAY_MEDIA),
"pause": SUPPORT_PAUSE,
"stop": SUPPORT_STOP,
"skip_f": SUPPORT_NEXT_TRACK,
"skip_r": SUPPORT_PREVIOUS_TRACK,
}
for attr, feature in mapping.items():
if getattr(supports, attr, False):
supported_features |= feature
return supported_features
def turn_off(self):
"""Turn off media player."""
self.receiver.on = False
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
receiver_vol = 100 - (volume * 100)
negative_receiver_vol = -receiver_vol
self.receiver.volume = negative_receiver_vol
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self.receiver.mute = mute
def turn_on(self):
"""Turn the media player on."""
self.receiver.on = True
self._volume = (self.receiver.volume / 100) + 1
def media_play(self):
"""Send play command."""
self._call_playback_function(self.receiver.play, "play")
def media_pause(self):
"""Send pause command."""
self._call_playback_function(self.receiver.pause, "pause")
def media_stop(self):
"""Send stop command."""
self._call_playback_function(self.receiver.stop, "stop")
def media_previous_track(self):
"""Send previous track command."""
self._call_playback_function(self.receiver.previous, "previous track")
def media_next_track(self):
"""Send next track command."""
self._call_playback_function(self.receiver.next, "next track")
def _call_playback_function(self, function, function_text):
try:
function()
except rxv.exceptions.ResponseException:
_LOGGER.warning("Failed to execute %s on %s", function_text, self._name)
def select_source(self, source):
"""Select input source."""
self.receiver.input = self._reverse_mapping.get(source, source)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from an ID.
This exposes a pass through for various input sources in the
Yamaha to direct play certain kinds of media. media_type is
treated as the input type that we are setting, and media id is
specific to it.
For the NET RADIO mediatype the format for ``media_id`` is a
"path" in your vtuner hierarchy. For instance:
``Bookmarks>Internet>Radio Paradise``. The separators are
``>`` and the parts of this are navigated by name behind the
scenes. There is a looping construct built into the yamaha
library to do this with a fallback timeout if the vtuner
service is unresponsive.
NOTE: this might take a while, because the only API interface
for setting the net radio station emulates button pressing and
navigating through the net radio menu hierarchy. And each sub
menu must be fetched by the receiver from the vtuner service.
"""
if media_type == "NET RADIO":
self.receiver.net_radio(media_id)
def enable_output(self, port, enabled):
"""Enable or disable an output port.."""
self.receiver.enable_output(port, enabled)
def set_scene(self, scene):
"""Set the current scene."""
try:
self.receiver.scene = scene
except AssertionError:
_LOGGER.warning("Scene '%s' does not exist!", scene)
def select_sound_mode(self, sound_mode):
"""Set Sound Mode for Receiver.."""
self.receiver.surround_program = sound_mode
@property
def media_artist(self):
"""Artist of current playing media."""
if self._play_status is not None:
return self._play_status.artist
@property
def media_album_name(self):
"""Album of current playing media."""
if self._play_status is not None:
return self._play_status.album
@property
def media_content_type(self):
"""Content type of current playing media."""
# Loose assumption that if playback is supported, we are playing music
if self._is_playback_supported:
return MEDIA_TYPE_MUSIC
return None
@property
def media_title(self):
"""Artist of current playing media."""
if self._play_status is not None:
song = self._play_status.song
station = self._play_status.station
# If both song and station is available, print both, otherwise
# just the one we have.
if song and station:
return f"{station}: {song}"
return song or station
|
import asyncio
from collections import OrderedDict, deque
import logging
import queue
import re
import traceback
import voluptuous as vol
from homeassistant import __path__ as HOMEASSISTANT_PATH
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
CONF_MAX_ENTRIES = "max_entries"
CONF_FIRE_EVENT = "fire_event"
CONF_MESSAGE = "message"
CONF_LEVEL = "level"
CONF_LOGGER = "logger"
DATA_SYSTEM_LOG = "system_log"
DEFAULT_MAX_ENTRIES = 50
DEFAULT_FIRE_EVENT = False
DOMAIN = "system_log"
EVENT_SYSTEM_LOG = "system_log_event"
SERVICE_CLEAR = "clear"
SERVICE_WRITE = "write"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(
CONF_MAX_ENTRIES, default=DEFAULT_MAX_ENTRIES
): cv.positive_int,
vol.Optional(CONF_FIRE_EVENT, default=DEFAULT_FIRE_EVENT): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_CLEAR_SCHEMA = vol.Schema({})
SERVICE_WRITE_SCHEMA = vol.Schema(
{
vol.Required(CONF_MESSAGE): cv.string,
vol.Optional(CONF_LEVEL, default="error"): vol.In(
["debug", "info", "warning", "error", "critical"]
),
vol.Optional(CONF_LOGGER): cv.string,
}
)
def _figure_out_source(record, call_stack, hass):
paths = [HOMEASSISTANT_PATH[0], hass.config.config_dir]
# If a stack trace exists, extract file names from the entire call stack.
# The other case is when a regular "log" is made (without an attached
# exception). In that case, just use the file where the log was made from.
if record.exc_info:
stack = [(x[0], x[1]) for x in traceback.extract_tb(record.exc_info[2])]
else:
index = -1
for i, frame in enumerate(call_stack):
if frame[0] == record.pathname:
index = i
break
if index == -1:
# For some reason we couldn't find pathname in the stack.
stack = [(record.pathname, record.lineno)]
else:
stack = call_stack[0 : index + 1]
# Iterate through the stack call (in reverse) and find the last call from
# a file in Home Assistant. Try to figure out where error happened.
paths_re = r"(?:{})/(.*)".format("|".join([re.escape(x) for x in paths]))
for pathname in reversed(stack):
# Try to match with a file within Home Assistant
match = re.match(paths_re, pathname[0])
if match:
return [match.group(1), pathname[1]]
# Ok, we don't know what this is
return (record.pathname, record.lineno)
class LogEntry:
"""Store HA log entries."""
def __init__(self, record, stack, source):
"""Initialize a log entry."""
self.first_occurred = self.timestamp = record.created
self.name = record.name
self.level = record.levelname
self.message = deque([record.getMessage()], maxlen=5)
self.exception = ""
self.root_cause = None
if record.exc_info:
self.exception = "".join(traceback.format_exception(*record.exc_info))
_, _, tb = record.exc_info # pylint: disable=invalid-name
# Last line of traceback contains the root cause of the exception
if traceback.extract_tb(tb):
self.root_cause = str(traceback.extract_tb(tb)[-1])
self.source = source
self.count = 1
self.hash = str([self.name, *self.source, self.root_cause])
def to_dict(self):
"""Convert object into dict to maintain backward compatibility."""
return {
"name": self.name,
"message": list(self.message),
"level": self.level,
"source": self.source,
"timestamp": self.timestamp,
"exception": self.exception,
"count": self.count,
"first_occurred": self.first_occurred,
}
class DedupStore(OrderedDict):
"""Data store to hold max amount of deduped entries."""
def __init__(self, maxlen=50):
"""Initialize a new DedupStore."""
super().__init__()
self.maxlen = maxlen
def add_entry(self, entry):
"""Add a new entry."""
key = entry.hash
if key in self:
# Update stored entry
existing = self[key]
existing.count += 1
existing.timestamp = entry.timestamp
if entry.message[0] not in existing.message:
existing.message.append(entry.message[0])
self.move_to_end(key)
else:
self[key] = entry
if len(self) > self.maxlen:
# Removes the first record which should also be the oldest
self.popitem(last=False)
def to_list(self):
"""Return reversed list of log entries - LIFO."""
return [value.to_dict() for value in reversed(self.values())]
class LogErrorQueueHandler(logging.handlers.QueueHandler):
"""Process the log in another thread."""
def emit(self, record):
"""Emit a log record."""
try:
self.enqueue(record)
except asyncio.CancelledError:
raise
except Exception: # pylint: disable=broad-except
self.handleError(record)
class LogErrorHandler(logging.Handler):
"""Log handler for error messages."""
def __init__(self, hass, maxlen, fire_event):
"""Initialize a new LogErrorHandler."""
super().__init__()
self.hass = hass
self.records = DedupStore(maxlen=maxlen)
self.fire_event = fire_event
def emit(self, record):
"""Save error and warning logs.
Everything logged with error or warning is saved in local buffer. A
default upper limit is set to 50 (older entries are discarded) but can
be changed if needed.
"""
stack = []
if not record.exc_info:
stack = [(f[0], f[1]) for f in traceback.extract_stack()]
entry = LogEntry(record, stack, _figure_out_source(record, stack, self.hass))
self.records.add_entry(entry)
if self.fire_event:
self.hass.bus.fire(EVENT_SYSTEM_LOG, entry.to_dict())
async def async_setup(hass, config):
"""Set up the logger component."""
conf = config.get(DOMAIN)
if conf is None:
conf = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN]
simple_queue = queue.SimpleQueue()
queue_handler = LogErrorQueueHandler(simple_queue)
queue_handler.setLevel(logging.WARN)
logging.root.addHandler(queue_handler)
handler = LogErrorHandler(hass, conf[CONF_MAX_ENTRIES], conf[CONF_FIRE_EVENT])
hass.data[DOMAIN] = handler
listener = logging.handlers.QueueListener(
simple_queue, handler, respect_handler_level=True
)
listener.start()
@callback
def _async_stop_queue_handler(_) -> None:
"""Cleanup handler."""
logging.root.removeHandler(queue_handler)
listener.stop()
del hass.data[DOMAIN]
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_stop_queue_handler)
hass.http.register_view(AllErrorsView(handler))
async def async_service_handler(service):
"""Handle logger services."""
if service.service == "clear":
handler.records.clear()
return
if service.service == "write":
logger = logging.getLogger(
service.data.get(CONF_LOGGER, f"{__name__}.external")
)
level = service.data[CONF_LEVEL]
getattr(logger, level)(service.data[CONF_MESSAGE])
async def async_shutdown_handler(event):
"""Remove logging handler when Home Assistant is shutdown."""
# This is needed as older logger instances will remain
logging.getLogger().removeHandler(handler)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_handler)
hass.services.async_register(
DOMAIN, SERVICE_CLEAR, async_service_handler, schema=SERVICE_CLEAR_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_WRITE, async_service_handler, schema=SERVICE_WRITE_SCHEMA
)
return True
class AllErrorsView(HomeAssistantView):
"""Get all logged errors and warnings."""
url = "/api/error/all"
name = "api:error:all"
def __init__(self, handler):
"""Initialize a new AllErrorsView."""
self.handler = handler
async def get(self, request):
"""Get all errors and warnings."""
return self.json(self.handler.records.to_list())
|
import abodepy.helpers.constants as CONST
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_WINDOW,
BinarySensorEntity,
)
from . import AbodeDevice
from .const import DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Abode binary sensor devices."""
data = hass.data[DOMAIN]
device_types = [
CONST.TYPE_CONNECTIVITY,
CONST.TYPE_MOISTURE,
CONST.TYPE_MOTION,
CONST.TYPE_OCCUPANCY,
CONST.TYPE_OPENING,
]
entities = []
for device in data.abode.get_devices(generic_type=device_types):
entities.append(AbodeBinarySensor(data, device))
async_add_entities(entities)
class AbodeBinarySensor(AbodeDevice, BinarySensorEntity):
"""A binary sensor implementation for Abode device."""
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._device.is_on
@property
def device_class(self):
"""Return the class of the binary sensor."""
if self._device.get_value("is_window") == "1":
return DEVICE_CLASS_WINDOW
return self._device.generic_type
|
from __future__ import absolute_import
import unittest
from copy import deepcopy
from .common_imports import etree, HelperTestCase
def summarize(elem):
return elem.tag
def summarize_list(seq):
return list(map(summarize, seq))
def normalize_crlf(tree):
for elem in tree.getiterator():
if elem.text: elem.text = elem.text.replace("\r\n", "\n")
if elem.tail: elem.tail = elem.tail.replace("\r\n", "\n")
class EtreeElementPathTestCase(HelperTestCase):
etree = etree
from lxml import _elementpath
def test_cache(self):
self._elementpath._cache.clear()
el = self.etree.XML(b'<a><b><c/><c/></b></a>')
self.assertFalse(self._elementpath._cache)
self.assertTrue(el.findall('b/c'))
self.assertEqual(1, len(self._elementpath._cache))
self.assertTrue(el.findall('b/c'))
self.assertEqual(1, len(self._elementpath._cache))
self.assertFalse(el.findall('xxx'))
self.assertEqual(2, len(self._elementpath._cache))
self.assertFalse(el.findall('xxx'))
self.assertEqual(2, len(self._elementpath._cache))
self.assertTrue(el.findall('b/c'))
self.assertEqual(2, len(self._elementpath._cache))
def _assert_tokens(self, tokens, path, namespaces=None):
self.assertEqual(tokens, list(self._elementpath.xpath_tokenizer(path, namespaces)))
def test_tokenizer(self):
assert_tokens = self._assert_tokens
assert_tokens(
[('/', '')],
'/',
)
assert_tokens(
[('.', ''), ('/', ''), ('', 'a'), ('/', ''), ('', 'b'), ('/', ''), ('', 'c')],
'./a/b/c',
)
assert_tokens(
[('/', ''), ('', 'a'), ('/', ''), ('', 'b'), ('/', ''), ('', 'c')],
'/a/b/c',
)
assert_tokens(
[('/', ''), ('', '{nsx}a'), ('/', ''), ('', '{nsy}b'), ('/', ''), ('', 'c')],
'/x:a/y:b/c',
{'x': 'nsx', 'y': 'nsy'},
)
assert_tokens(
[('/', ''), ('', '{nsx}a'), ('/', ''), ('', '{nsy}b'), ('/', ''), ('', '{nsnone}c')],
'/x:a/y:b/c',
{'x': 'nsx', 'y': 'nsy', None: 'nsnone'},
)
def test_tokenizer_predicates(self):
assert_tokens = self._assert_tokens
assert_tokens(
[('', 'a'), ('[', ''), ('', 'b'), (']', '')],
'a[b]',
)
assert_tokens(
[('', 'a'), ('[', ''), ('', 'b'), ('=', ''), ('"abc"', ''), (']', '')],
'a[b="abc"]',
)
assert_tokens(
[('', 'a'), ('[', ''), ('.', ''), ('', ''), ('=', ''), ('', ''), ('"abc"', ''), (']', '')],
'a[. = "abc"]',
)
def test_xpath_tokenizer(self):
# Test the XPath tokenizer. Copied from CPython's "test_xml_etree.py"
ElementPath = self._elementpath
def check(p, expected, namespaces=None):
self.assertEqual([op or tag
for op, tag in ElementPath.xpath_tokenizer(p, namespaces)],
expected)
# tests from the xml specification
check("*", ['*'])
check("text()", ['text', '()'])
check("@name", ['@', 'name'])
check("@*", ['@', '*'])
check("para[1]", ['para', '[', '1', ']'])
check("para[last()]", ['para', '[', 'last', '()', ']'])
check("*/para", ['*', '/', 'para'])
check("/doc/chapter[5]/section[2]",
['/', 'doc', '/', 'chapter', '[', '5', ']',
'/', 'section', '[', '2', ']'])
check("chapter//para", ['chapter', '//', 'para'])
check("//para", ['//', 'para'])
check("//olist/item", ['//', 'olist', '/', 'item'])
check(".", ['.'])
check(".//para", ['.', '//', 'para'])
check("..", ['..'])
check("../@lang", ['..', '/', '@', 'lang'])
check("chapter[title]", ['chapter', '[', 'title', ']'])
check("employee[@secretary and @assistant]", ['employee',
'[', '@', 'secretary', '', 'and', '', '@', 'assistant', ']'])
# additional tests
check("@{ns}attr", ['@', '{ns}attr'])
check("{http://spam}egg", ['{http://spam}egg'])
check("./spam.egg", ['.', '/', 'spam.egg'])
check(".//{http://spam}egg", ['.', '//', '{http://spam}egg'])
# wildcard tags
check("{ns}*", ['{ns}*'])
check("{}*", ['{}*'])
check("{*}tag", ['{*}tag'])
check("{*}*", ['{*}*'])
check(".//{*}tag", ['.', '//', '{*}tag'])
# namespace prefix resolution
check("./xsd:type", ['.', '/', '{http://www.w3.org/2001/XMLSchema}type'],
{'xsd': 'http://www.w3.org/2001/XMLSchema'})
check("type", ['{http://www.w3.org/2001/XMLSchema}type'],
{'': 'http://www.w3.org/2001/XMLSchema'})
check("@xsd:type", ['@', '{http://www.w3.org/2001/XMLSchema}type'],
{'xsd': 'http://www.w3.org/2001/XMLSchema'})
check("@type", ['@', 'type'],
{'': 'http://www.w3.org/2001/XMLSchema'})
check("@{*}type", ['@', '{*}type'],
{'': 'http://www.w3.org/2001/XMLSchema'})
check("@{ns}attr", ['@', '{ns}attr'],
{'': 'http://www.w3.org/2001/XMLSchema',
'ns': 'http://www.w3.org/2001/XMLSchema'})
def test_find(self):
"""
Test find methods (including xpath syntax).
Originally copied from 'selftest.py'.
"""
elem = etree.XML("""
<body>
<tag class='a'>text</tag>
<tag class='b' />
<section>
<tag class='b' id='inner'>subtext</tag>
</section>
</body>
""")
self.assertEqual(elem.find("tag").tag,
'tag')
self.assertEqual(etree.ElementTree(elem).find("tag").tag,
'tag')
self.assertEqual(elem.find("section/tag").tag,
'tag')
self.assertEqual(etree.ElementTree(elem).find("section/tag").tag,
'tag')
self.assertEqual(elem.findtext("tag"),
'text')
self.assertEqual(elem.findtext("tog"),
None)
self.assertEqual(elem.findtext("tog", "default"),
'default')
self.assertEqual(etree.ElementTree(elem).findtext("tag"),
'text')
self.assertEqual(elem.findtext("section/tag"),
'subtext')
self.assertEqual(etree.ElementTree(elem).findtext("section/tag"),
'subtext')
self.assertEqual(summarize_list(elem.findall("tag")),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall("*")),
['tag', 'tag', 'section'])
self.assertEqual(summarize_list(elem.findall(".//tag")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall("section/tag")),
['tag'])
self.assertEqual(summarize_list(elem.findall("section//tag")),
['tag'])
self.assertEqual(summarize_list(elem.findall("section/*")),
['tag'])
self.assertEqual(summarize_list(elem.findall("section//*")),
['tag'])
self.assertEqual(summarize_list(elem.findall("section/.//*")),
['tag'])
self.assertEqual(summarize_list(elem.findall("*/*")),
['tag'])
self.assertEqual(summarize_list(elem.findall("*//*")),
['tag'])
self.assertEqual(summarize_list(elem.findall("*/tag")),
['tag'])
self.assertEqual(summarize_list(elem.findall("*/./tag")),
['tag'])
self.assertEqual(summarize_list(elem.findall("./tag")),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall("././tag")),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class]")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[ @class]")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class ]")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[ @class ]")),
['tag', 'tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class='a']")),
['tag'])
self.assertEqual(summarize_list(elem.findall('.//tag[@class="a"]')),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class='b']")),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall('.//tag[@class="b"]')),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall('.//tag[@class = "b"]')),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@id]")),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class][@id]")),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//section[tag]")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//section[element]")),
[])
self.assertEqual(summarize_list(elem.findall(".//section[tag='subtext']")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//section[tag ='subtext']")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//section[tag= 'subtext']")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//section[tag = 'subtext']")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//section[ tag = 'subtext' ]")),
['section'])
self.assertEqual(summarize_list(elem.findall(".//tag[.='subtext']")),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[. ='subtext']")),
['tag'])
self.assertEqual(summarize_list(elem.findall('.//tag[.= "subtext"]')),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[. = 'subtext']")),
['tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[. = 'subtext ']")),
[])
self.assertEqual(summarize_list(elem.findall(".//tag[.= ' subtext']")),
[])
self.assertEqual(summarize_list(elem.findall("../tag")),
[])
self.assertEqual(summarize_list(elem.findall("section/../tag")),
['tag', 'tag'])
self.assertEqual(summarize_list(etree.ElementTree(elem).findall("./tag")),
['tag', 'tag'])
# FIXME: ET's Path module handles this case incorrectly; this gives
# a warning in 1.3, and the behaviour will be modified in 1.4.
self.assertEqual(summarize_list(etree.ElementTree(elem).findall("/tag")),
['tag', 'tag'])
# duplicate section => 2x tag matches
elem[1] = deepcopy(elem[2])
self.assertEqual(summarize_list(elem.findall(".//section[tag = 'subtext']")),
['section', 'section'])
self.assertEqual(summarize_list(elem.findall(".//tag[. = 'subtext']")),
['tag', 'tag'])
self.assertEqual(summarize_list(elem.findall(".//tag[@class][@id]")),
['tag', 'tag'])
#class ElementTreeElementPathTestCase(EtreeElementPathTestCase):
# import xml.etree.ElementTree as etree
# import xml.etree.ElementPath as _elementpath
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(EtreeElementPathTestCase)])
#suite.addTests([unittest.makeSuite(ElementTreeElementPathTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
from datetime import timedelta
import logging
from pybotvac.exceptions import NeatoRobotException
import voluptuous as vol
from homeassistant.components.vacuum import (
ATTR_STATUS,
STATE_CLEANING,
STATE_DOCKED,
STATE_ERROR,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_LOCATE,
SUPPORT_MAP,
SUPPORT_PAUSE,
SUPPORT_RETURN_HOME,
SUPPORT_START,
SUPPORT_STATE,
SUPPORT_STOP,
StateVacuumEntity,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_MODE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.service import extract_entity_ids
from .const import (
ACTION,
ALERTS,
ERRORS,
MODE,
NEATO_DOMAIN,
NEATO_LOGIN,
NEATO_MAP_DATA,
NEATO_PERSISTENT_MAPS,
NEATO_ROBOTS,
SCAN_INTERVAL_MINUTES,
SERVICE_NEATO_CUSTOM_CLEANING,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
SUPPORT_NEATO = (
SUPPORT_BATTERY
| SUPPORT_PAUSE
| SUPPORT_RETURN_HOME
| SUPPORT_STOP
| SUPPORT_START
| SUPPORT_CLEAN_SPOT
| SUPPORT_STATE
| SUPPORT_MAP
| SUPPORT_LOCATE
)
ATTR_CLEAN_START = "clean_start"
ATTR_CLEAN_STOP = "clean_stop"
ATTR_CLEAN_AREA = "clean_area"
ATTR_CLEAN_BATTERY_START = "battery_level_at_clean_start"
ATTR_CLEAN_BATTERY_END = "battery_level_at_clean_end"
ATTR_CLEAN_SUSP_COUNT = "clean_suspension_count"
ATTR_CLEAN_SUSP_TIME = "clean_suspension_time"
ATTR_CLEAN_PAUSE_TIME = "clean_pause_time"
ATTR_CLEAN_ERROR_TIME = "clean_error_time"
ATTR_LAUNCHED_FROM = "launched_from"
ATTR_NAVIGATION = "navigation"
ATTR_CATEGORY = "category"
ATTR_ZONE = "zone"
SERVICE_NEATO_CUSTOM_CLEANING_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_MODE, default=2): cv.positive_int,
vol.Optional(ATTR_NAVIGATION, default=1): cv.positive_int,
vol.Optional(ATTR_CATEGORY, default=4): cv.positive_int,
vol.Optional(ATTR_ZONE): cv.string,
}
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Neato vacuum with config entry."""
dev = []
neato = hass.data.get(NEATO_LOGIN)
mapdata = hass.data.get(NEATO_MAP_DATA)
persistent_maps = hass.data.get(NEATO_PERSISTENT_MAPS)
for robot in hass.data[NEATO_ROBOTS]:
dev.append(NeatoConnectedVacuum(neato, robot, mapdata, persistent_maps))
if not dev:
return
_LOGGER.debug("Adding vacuums %s", dev)
async_add_entities(dev, True)
def neato_custom_cleaning_service(call):
"""Zone cleaning service that allows user to change options."""
for robot in service_to_entities(call):
if call.service == SERVICE_NEATO_CUSTOM_CLEANING:
mode = call.data.get(ATTR_MODE)
navigation = call.data.get(ATTR_NAVIGATION)
category = call.data.get(ATTR_CATEGORY)
zone = call.data.get(ATTR_ZONE)
try:
robot.neato_custom_cleaning(mode, navigation, category, zone)
except NeatoRobotException as ex:
_LOGGER.error("Neato vacuum connection error: %s", ex)
def service_to_entities(call):
"""Return the known devices that a service call mentions."""
entity_ids = extract_entity_ids(hass, call)
entities = [entity for entity in dev if entity.entity_id in entity_ids]
return entities
hass.services.async_register(
NEATO_DOMAIN,
SERVICE_NEATO_CUSTOM_CLEANING,
neato_custom_cleaning_service,
schema=SERVICE_NEATO_CUSTOM_CLEANING_SCHEMA,
)
class NeatoConnectedVacuum(StateVacuumEntity):
"""Representation of a Neato Connected Vacuum."""
def __init__(self, neato, robot, mapdata, persistent_maps):
"""Initialize the Neato Connected Vacuum."""
self.robot = robot
self._available = neato.logged_in if neato is not None else False
self._mapdata = mapdata
self._name = f"{self.robot.name}"
self._robot_has_map = self.robot.has_persistent_maps
self._robot_maps = persistent_maps
self._robot_serial = self.robot.serial
self._status_state = None
self._clean_state = None
self._state = None
self._clean_time_start = None
self._clean_time_stop = None
self._clean_area = None
self._clean_battery_start = None
self._clean_battery_end = None
self._clean_susp_charge_count = None
self._clean_susp_time = None
self._clean_pause_time = None
self._clean_error_time = None
self._launched_from = None
self._battery_level = None
self._robot_boundaries = []
self._robot_stats = None
def update(self):
"""Update the states of Neato Vacuums."""
_LOGGER.debug("Running Neato Vacuums update for '%s'", self.entity_id)
try:
if self._robot_stats is None:
self._robot_stats = self.robot.get_general_info().json().get("data")
except NeatoRobotException:
_LOGGER.warning("Couldn't fetch robot information of %s", self.entity_id)
try:
self._state = self.robot.state
except NeatoRobotException as ex:
if self._available: # print only once when available
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
self._state = None
self._available = False
return
self._available = True
_LOGGER.debug("self._state=%s", self._state)
if "alert" in self._state:
robot_alert = ALERTS.get(self._state["alert"])
else:
robot_alert = None
if self._state["state"] == 1:
if self._state["details"]["isCharging"]:
self._clean_state = STATE_DOCKED
self._status_state = "Charging"
elif (
self._state["details"]["isDocked"]
and not self._state["details"]["isCharging"]
):
self._clean_state = STATE_DOCKED
self._status_state = "Docked"
else:
self._clean_state = STATE_IDLE
self._status_state = "Stopped"
if robot_alert is not None:
self._status_state = robot_alert
elif self._state["state"] == 2:
if robot_alert is None:
self._clean_state = STATE_CLEANING
self._status_state = (
f"{MODE.get(self._state['cleaning']['mode'])} "
f"{ACTION.get(self._state['action'])}"
)
if (
"boundary" in self._state["cleaning"]
and "name" in self._state["cleaning"]["boundary"]
):
self._status_state += (
f" {self._state['cleaning']['boundary']['name']}"
)
else:
self._status_state = robot_alert
elif self._state["state"] == 3:
self._clean_state = STATE_PAUSED
self._status_state = "Paused"
elif self._state["state"] == 4:
self._clean_state = STATE_ERROR
self._status_state = ERRORS.get(self._state["error"])
self._battery_level = self._state["details"]["charge"]
if not self._mapdata.get(self._robot_serial, {}).get("maps", []):
return
mapdata = self._mapdata[self._robot_serial]["maps"][0]
self._clean_time_start = (mapdata["start_at"].strip("Z")).replace("T", " ")
self._clean_time_stop = (mapdata["end_at"].strip("Z")).replace("T", " ")
self._clean_area = mapdata["cleaned_area"]
self._clean_susp_charge_count = mapdata["suspended_cleaning_charging_count"]
self._clean_susp_time = mapdata["time_in_suspended_cleaning"]
self._clean_pause_time = mapdata["time_in_pause"]
self._clean_error_time = mapdata["time_in_error"]
self._clean_battery_start = mapdata["run_charge_at_start"]
self._clean_battery_end = mapdata["run_charge_at_end"]
self._launched_from = mapdata["launched_from"]
if (
self._robot_has_map
and self._state["availableServices"]["maps"] != "basic-1"
and self._robot_maps[self._robot_serial]
):
allmaps = self._robot_maps[self._robot_serial]
_LOGGER.debug(
"Found the following maps for '%s': %s", self.entity_id, allmaps
)
self._robot_boundaries = [] # Reset boundaries before refreshing boundaries
for maps in allmaps:
try:
robot_boundaries = self.robot.get_map_boundaries(maps["id"]).json()
except NeatoRobotException as ex:
_LOGGER.error(
"Could not fetch map boundaries for '%s': %s",
self.entity_id,
ex,
)
return
_LOGGER.debug(
"Boundaries for robot '%s' in map '%s': %s",
self.entity_id,
maps["name"],
robot_boundaries,
)
if "boundaries" in robot_boundaries["data"]:
self._robot_boundaries += robot_boundaries["data"]["boundaries"]
_LOGGER.debug(
"List of boundaries for '%s': %s",
self.entity_id,
self._robot_boundaries,
)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def supported_features(self):
"""Flag vacuum cleaner robot features that are supported."""
return SUPPORT_NEATO
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
return self._battery_level
@property
def available(self):
"""Return if the robot is available."""
return self._available
@property
def icon(self):
"""Return neato specific icon."""
return "mdi:robot-vacuum-variant"
@property
def state(self):
"""Return the status of the vacuum cleaner."""
return self._clean_state
@property
def unique_id(self):
"""Return a unique ID."""
return self._robot_serial
@property
def device_state_attributes(self):
"""Return the state attributes of the vacuum cleaner."""
data = {}
if self._status_state is not None:
data[ATTR_STATUS] = self._status_state
if self._clean_time_start is not None:
data[ATTR_CLEAN_START] = self._clean_time_start
if self._clean_time_stop is not None:
data[ATTR_CLEAN_STOP] = self._clean_time_stop
if self._clean_area is not None:
data[ATTR_CLEAN_AREA] = self._clean_area
if self._clean_susp_charge_count is not None:
data[ATTR_CLEAN_SUSP_COUNT] = self._clean_susp_charge_count
if self._clean_susp_time is not None:
data[ATTR_CLEAN_SUSP_TIME] = self._clean_susp_time
if self._clean_pause_time is not None:
data[ATTR_CLEAN_PAUSE_TIME] = self._clean_pause_time
if self._clean_error_time is not None:
data[ATTR_CLEAN_ERROR_TIME] = self._clean_error_time
if self._clean_battery_start is not None:
data[ATTR_CLEAN_BATTERY_START] = self._clean_battery_start
if self._clean_battery_end is not None:
data[ATTR_CLEAN_BATTERY_END] = self._clean_battery_end
if self._launched_from is not None:
data[ATTR_LAUNCHED_FROM] = self._launched_from
return data
@property
def device_info(self):
"""Device info for neato robot."""
info = {"identifiers": {(NEATO_DOMAIN, self._robot_serial)}, "name": self._name}
if self._robot_stats:
info["manufacturer"] = self._robot_stats["battery"]["vendor"]
info["model"] = self._robot_stats["model"]
info["sw_version"] = self._robot_stats["firmware"]
return info
def start(self):
"""Start cleaning or resume cleaning."""
try:
if self._state["state"] == 1:
self.robot.start_cleaning()
elif self._state["state"] == 3:
self.robot.resume_cleaning()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def pause(self):
"""Pause the vacuum."""
try:
self.robot.pause_cleaning()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
try:
if self._clean_state == STATE_CLEANING:
self.robot.pause_cleaning()
self._clean_state = STATE_RETURNING
self.robot.send_to_base()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def stop(self, **kwargs):
"""Stop the vacuum cleaner."""
try:
self.robot.stop_cleaning()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def locate(self, **kwargs):
"""Locate the robot by making it emit a sound."""
try:
self.robot.locate()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def clean_spot(self, **kwargs):
"""Run a spot cleaning starting from the base."""
try:
self.robot.start_spot_cleaning()
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
def neato_custom_cleaning(self, mode, navigation, category, zone=None, **kwargs):
"""Zone cleaning service call."""
boundary_id = None
if zone is not None:
for boundary in self._robot_boundaries:
if zone in boundary["name"]:
boundary_id = boundary["id"]
if boundary_id is None:
_LOGGER.error(
"Zone '%s' was not found for the robot '%s'", zone, self.entity_id
)
return
self._clean_state = STATE_CLEANING
try:
self.robot.start_cleaning(mode, navigation, category, boundary_id)
except NeatoRobotException as ex:
_LOGGER.error(
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
)
|
import datetime
import six
from dogpile.cache import make_region
from dogpile.cache.util import function_key_generator
#: Expiration time for show caching
SHOW_EXPIRATION_TIME = datetime.timedelta(weeks=3).total_seconds()
#: Expiration time for episode caching
EPISODE_EXPIRATION_TIME = datetime.timedelta(days=3).total_seconds()
#: Expiration time for scraper searches
REFINER_EXPIRATION_TIME = datetime.timedelta(weeks=1).total_seconds()
def _to_native_str(value):
if six.PY2:
# In Python 2, the native string type is bytes
if isinstance(value, six.text_type): # unicode for Python 2
return value.encode('utf-8')
else:
return six.binary_type(value)
else:
# In Python 3, the native string type is unicode
if isinstance(value, six.binary_type): # bytes for Python 3
return value.decode('utf-8')
else:
return six.text_type(value)
def to_native_str_key_generator(namespace, fn, to_str=_to_native_str):
return function_key_generator(namespace, fn, to_str)
region = make_region(function_key_generator=to_native_str_key_generator)
|
import numpy as np
from numpy.testing import assert_equal
import numpy as np
import pytest
from numpy.testing import assert_equal
from arctic._util import FwPointersCfg
from arctic.store._ndarray_store import NdarrayStore, _APPEND_COUNT
from arctic.store.version_store import register_versioned_storage
from tests.integration.store.test_version_store import FwPointersCtx
register_versioned_storage(NdarrayStore)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_append_simple_ndarray(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.ones(1000, dtype='int64')
library.write('MYARR', ndarr)
library.append('MYARR', np.ones(1000, dtype='int64'))
library.append('MYARR', np.ones(1000, dtype='int64'))
library.append('MYARR', np.ones(2005, dtype='int64'))
saved_arr = library.read('MYARR').data
assert np.all(np.ones(5005, dtype='int64') == saved_arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_append_simple_ndarray_promoting_types(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.ones(100, dtype='int64')
library.write('MYARR', ndarr)
library.append('MYARR', np.ones(100, dtype='float64'))
library.append('MYARR', np.ones(100, dtype='int64'))
library.append('MYARR', np.ones(205, dtype='float64'))
saved_arr = library.read('MYARR').data
assert np.all(np.ones(505, dtype='float64') == saved_arr)
def test_promote_types(library):
ndarr = np.empty(1000, dtype=[('abc', 'int64')])
library.write('MYARR', ndarr[:800])
library.append('MYARR', ndarr[-200:].astype([('abc', 'float64')]))
saved_arr = library.read('MYARR').data
assert np.all(ndarr.astype([('abc', 'float64')]) == saved_arr)
def test_promote_types2(library):
ndarr = np.array(np.arange(1000), dtype=[('abc', 'float64')])
library.write('MYARR', ndarr[:800])
library.append('MYARR', ndarr[-200:].astype([('abc', 'int64')]))
saved_arr = library.read('MYARR').data
assert np.all(ndarr.astype([('abc', np.promote_types('float64', 'int64'))]) == saved_arr)
def test_promote_types_smaller_sizes(library):
library.write('MYARR', np.ones(100, dtype='int64'))
library.append('MYARR', np.ones(100, dtype='int32'))
saved_arr = library.read('MYARR').data
assert np.all(np.ones(200, dtype='int64') == saved_arr)
def test_promote_types_larger_sizes(library):
library.write('MYARR', np.ones(100, dtype='int32'))
library.append('MYARR', np.ones(100, dtype='int64'))
saved_arr = library.read('MYARR').data
assert np.all(np.ones(200, dtype='int64') == saved_arr)
def test_promote_field_types_smaller_sizes(library):
arr = np.array([(3, 7)], dtype=[('a', '<i8'), ('b', '<i8')])
library.write('MYARR', arr)
arr = np.array([(9, 8)], dtype=[('a', '<i4'), ('b', '<i8')])
library.append('MYARR', arr)
saved_arr = library.read('MYARR').data
expected = np.array([(3, 7), (9, 8)], dtype=[('a', '<i8'), ('b', '<i8')])
assert np.all(saved_arr == expected)
def test_promote_field_types_larger_sizes(library):
arr = np.array([(3, 7)], dtype=[('a', '<i4'), ('b', '<i8')])
library.write('MYARR', arr)
arr = np.array([(9, 8)], dtype=[('a', '<i8'), ('b', '<i8')])
library.append('MYARR', arr)
saved_arr = library.read('MYARR').data
expected = np.array([(3, 7), (9, 8)], dtype=[('a', '<i8'), ('b', '<i8')])
assert np.all(saved_arr == expected)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_append_ndarray_with_field_shape(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.empty(10, dtype=[('A', 'int64'), ('B', 'float64', (2,))])
ndarr['A'] = 1
ndarr['B'] = 2
ndarr2 = np.empty(10, dtype=[('A', 'int64'), ('B', 'int64', (2,))])
ndarr2['A'] = 1
ndarr2['B'] = 2
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
ndarr3 = np.empty(20, dtype=[('A', 'int64'), ('B', 'float64', (2,))])
ndarr3['A'] = 1
ndarr3['B'] = 2
assert np.all(ndarr3 == saved_arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_append_read_large_ndarray(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(50 * 1024 * 1024 / dtype.itemsize).view(dtype=dtype)
assert len(ndarr.tostring()) > 16 * 1024 * 1024
library.write('MYARR1', ndarr)
# Exactly enough appends to trigger 2 re-compacts, so the result should be identical
# to writing the whole array at once
ndarr2 = np.arange(240).view(dtype=dtype)
for n in np.split(ndarr2, 120):
library.append('MYARR1', n)
saved_arr = library.read('MYARR1').data
assert np.all(np.concatenate([ndarr, ndarr2]) == saved_arr)
library.write('MYARR2', np.concatenate([ndarr, ndarr2]))
version1 = library._read_metadata('MYARR1')
version2 = library._read_metadata('MYARR2')
assert version1['append_count'] == version2['append_count']
assert version1['append_size'] == version2['append_size']
assert version1['segment_count'] == version2['segment_count']
assert version1['up_to'] == version2['up_to']
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_save_append_read_ndarray(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 * 1024 * 1024 / dtype.itemsize).view(dtype=dtype)
assert len(ndarr.tostring()) > 16 * 1024 * 1024
library.write('MYARR', ndarr)
sliver = np.arange(30).view(dtype=dtype)
library.append('MYARR', sliver)
saved_arr = library.read('MYARR').data
assert np.all(np.concatenate([ndarr, sliver]) == saved_arr)
library.append('MYARR', sliver)
saved_arr = library.read('MYARR').data
assert np.all(np.concatenate([ndarr, sliver, sliver]) == saved_arr)
def test_save_append_read_1row_ndarray(library):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 * 1024 * 1024 / dtype.itemsize).view(dtype=dtype)
assert len(ndarr.tostring()) > 16 * 1024 * 1024
library.write('MYARR', ndarr)
sliver = np.arange(1).view(dtype=dtype)
library.append('MYARR', sliver)
saved_arr = library.read('MYARR').data
assert np.all(np.concatenate([ndarr, sliver]) == saved_arr)
library.append('MYARR', sliver)
saved_arr = library.read('MYARR').data
assert np.all(np.concatenate([ndarr, sliver, sliver]) == saved_arr)
def test_append_too_large_ndarray(library):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 * 1024 * 1024 / dtype.itemsize).view(dtype=dtype)
assert len(ndarr.tostring()) > 16 * 1024 * 1024
library.write('MYARR', ndarr)
library.append('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(np.concatenate([ndarr, ndarr]) == saved_arr)
def test_empty_field_append_keeps_all_columns(library):
ndarr = np.array([(3, 5)], dtype=[('a', '<i'), ('b', '<i')])
ndarr2 = np.array([], dtype=[('a', '<i')])
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == np.array([(3, 5)], dtype=[('a', '<i'), ('b', '<i')]))
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_empty_append_promotes_dtype(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.array(["a", "b", "c"])
ndarr2 = np.array([])
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == ndarr)
def test_empty_append_promotes_dtype2(library):
ndarr = np.array([])
ndarr2 = np.array(["a", "b", "c"])
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == ndarr2)
def test_empty_append_promotes_dtype3(library):
ndarr = np.array([])
ndarr2 = np.array(["a", "b", "c"])
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
library.append('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == np.hstack((ndarr2, ndarr2)))
def test_convert_to_structured_array(library):
arr = np.ones(100, dtype='int64')
library.write('MYARR', arr)
arr = np.array([(6,)], dtype=[('a', '<i8')])
with pytest.raises(ValueError):
library.append('MYARR', arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_empty_append_concat_and_rewrite(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.array([])
ndarr2 = np.array(["a", "b", "c"])
library.write('MYARR', ndarr)
for _ in range(_APPEND_COUNT + 2):
library.append('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == ndarr2)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_empty_append_concat_and_rewrite_2(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr2 = np.array(["a", "b", "c"])
library.write('MYARR', ndarr2)
for _ in range(_APPEND_COUNT + 1):
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == np.hstack([ndarr2] * (_APPEND_COUNT + 2)))
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_empty_append_concat_and_rewrite_3(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.array([])
ndarr2 = np.array(["a", "b", "c"])
library.write('MYARR', ndarr2)
for _ in range(_APPEND_COUNT + 1):
library.append('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(saved_arr == ndarr2)
def test_append_with_extra_columns(library):
ndarr = np.array([(2.1, 1, "a")], dtype=[('C', np.float), ('B', np.int), ('A', 'S1')])
ndarr2 = np.array([("b", 2, 3.1, 'c', 4, 5.)], dtype=[('A', 'S1'), ('B', np.int), ('C', np.float),
('D', 'S1'), ('E', np.int), ('F', np.float)])
expected = np.array([("a", 1, 2.1, '', 0, np.nan),
("b", 2, 3.1, 'c', 4, 5.)],
dtype=np.dtype([('A', 'S1'), ('B', np.int), ('C', np.float),
('D', 'S1'), ('E', np.int), ('F', np.float)]))
library.write('MYARR', ndarr)
library.append('MYARR', ndarr2)
saved_arr = library.read('MYARR').data
assert expected.dtype == saved_arr.dtype
assert_equal(expected.tolist(), saved_arr.tolist())
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_save_append_delete_append(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 / dtype.itemsize).view(dtype=dtype)
v1 = library.write('MYARR', ndarr)
sliver = np.arange(30).view(dtype=dtype)
v2 = library.append('MYARR', sliver)
# intentionally leave an orphaned chunk lying around here
library._delete_version('MYARR', v2.version, do_cleanup=False)
sliver2 = np.arange(start=10, stop=40).view(dtype=dtype)
# we can't append here, as the latest version is now out of sync with version_nums.
# This gets translated to a do_append by the handler anyway.
v3 = library.write('MYARR', np.concatenate([ndarr, sliver2]))
assert np.all(ndarr == library.read('MYARR', as_of=v1.version).data)
# Check that we don't get the orphaned chunk from v2 back again.
assert np.all(np.concatenate([ndarr, sliver2]) == library.read('MYARR', as_of=v3.version).data)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_append_after_failed_append(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 / dtype.itemsize).view(dtype=dtype)
v1 = library.write('MYARR', ndarr)
sliver = np.arange(3, 4).view(dtype=dtype)
v2 = library.append('MYARR', sliver)
# simulate a failed append - intentionally leave an orphaned chunk lying around here
library._delete_version('MYARR', v2.version, do_cleanup=False)
sliver2 = np.arange(3, 5).view(dtype=dtype)
v3 = library.append('MYARR', sliver2)
assert np.all(ndarr == library.read('MYARR', as_of=v1.version).data)
assert np.all(np.concatenate([ndarr, sliver2]) == library.read('MYARR', as_of=v3.version).data)
def test_append_reorder_columns(library):
foo = np.array([(1, 2)], dtype=np.dtype([('a', 'u1'), ('b', 'u1')]))
library.write('MYARR', foo)
foo = np.array([(1, 2)], dtype=np.dtype([('b', 'u1'), ('a', 'u1')]))
library.append('MYARR', foo)
assert np.all(library.read('MYARR').data == np.array([(2, 1), (1, 2)], dtype=[('b', 'u1'), ('a', 'u1')]))
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from diamond.collector import Collector
from hadoop import HadoopCollector
import os
##########################################################################
class TestHadoopCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('HadoopCollector', {
'metrics': [os.path.dirname(__file__) + '/fixtures/*metrics.log'],
})
self.collector = HadoopCollector(config, {})
def test_import(self):
self.assertTrue(HadoopCollector)
@patch.object(Collector, 'publish_metric')
def test_should_work_with_real_data(self, publish_mock):
self.collector.collect()
metrics = self.getPickledResults('expected.pkl')
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMetricMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, HTTP_OK
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_IP_ADDRESS = "ip"
CONF_VERSION = "version"
DEFAULT_NAME = "Current Energy Usage"
DEFAULT_VERSION = 1
ICON = "mdi:flash"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.All(
vol.Coerce(int), vol.Any(1, 2)
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the DTE energy bridge sensor."""
name = config[CONF_NAME]
ip_address = config[CONF_IP_ADDRESS]
version = config[CONF_VERSION]
add_entities([DteEnergyBridgeSensor(ip_address, name, version)], True)
class DteEnergyBridgeSensor(Entity):
"""Implementation of the DTE Energy Bridge sensors."""
def __init__(self, ip_address, name, version):
"""Initialize the sensor."""
self._version = version
if self._version == 1:
self._url = f"http://{ip_address}/instantaneousdemand"
elif self._version == 2:
self._url = f"http://{ip_address}:8888/zigbee/se/instantaneousdemand"
self._name = name
self._unit_of_measurement = "kW"
self._state = None
@property
def name(self):
"""Return the name of th sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the energy usage data from the DTE energy bridge."""
try:
response = requests.get(self._url, timeout=5)
except (requests.exceptions.RequestException, ValueError):
_LOGGER.warning(
"Could not update status for DTE Energy Bridge (%s)", self._name
)
return
if response.status_code != HTTP_OK:
_LOGGER.warning(
"Invalid status_code from DTE Energy Bridge: %s (%s)",
response.status_code,
self._name,
)
return
response_split = response.text.split()
if len(response_split) != 2:
_LOGGER.warning(
'Invalid response from DTE Energy Bridge: "%s" (%s)',
response.text,
self._name,
)
return
val = float(response_split[0])
# A workaround for a bug in the DTE energy bridge.
# The returned value can randomly be in W or kW. Checking for a
# a decimal seems to be a reliable way to determine the units.
# Limiting to version 1 because version 2 apparently always returns
# values in the format 000000.000 kW, but the scaling is Watts
# NOT kWatts
if self._version == 1 and "." in response_split[0]:
self._state = val
else:
self._state = val / 1000
|
from io import BytesIO
from urllib.parse import urlsplit
from xml.dom import minidom
from zipfile import ZipFile
from django.conf import settings
from django.contrib.messages import get_messages
from django.contrib.messages.storage.fallback import FallbackStorage
from django.core import mail
from django.core.cache import cache
from django.core.management import call_command
from django.test.client import RequestFactory
from django.urls import reverse
from PIL import Image
from weblate.auth.models import Group, setup_project_groups
from weblate.lang.models import Language
from weblate.trans.models import Component, ComponentList, Project
from weblate.trans.tests.test_models import RepoTestCase
from weblate.trans.tests.utils import (
create_another_user,
create_test_user,
wait_for_celery,
)
from weblate.utils.hash import hash_to_checksum
class RegistrationTestMixin:
"""Helper to share code for registration testing."""
def assert_registration_mailbox(self, match=None):
if match is None:
match = "[Weblate] Your registration on Weblate"
# Check mailbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, match)
live_url = getattr(self, "live_server_url", None)
# Parse URL
for line in mail.outbox[0].body.splitlines():
if "verification_code" not in line:
continue
if "(" in line or ")" in line or "<" in line or ">" in line:
continue
if live_url and line.startswith(live_url):
return line + "&confirm=1"
if line.startswith("http://example.com/"):
return line[18:] + "&confirm=1"
self.fail("Confirmation URL not found")
return ""
def assert_notify_mailbox(self, sent_mail):
self.assertEqual(
sent_mail.subject, "[Weblate] Activity on your account at Weblate"
)
class ViewTestCase(RepoTestCase):
def setUp(self):
super().setUp()
# Many tests needs access to the request factory.
self.factory = RequestFactory()
# Create user
self.user = create_test_user()
group = Group.objects.get(name="Users")
self.user.groups.add(group)
# Create another user
self.anotheruser = create_another_user()
self.user.groups.add(group)
# Create project to have some test base
self.component = self.create_component()
self.project = self.component.project
# Invalidate caches
self.project.stats.invalidate()
cache.clear()
# Login
self.client.login(username="testuser", password="testpassword")
# Prepopulate kwargs
self.kw_project = {"project": self.project.slug}
self.kw_component = {
"project": self.project.slug,
"component": self.component.slug,
}
self.kw_translation = {
"project": self.project.slug,
"component": self.component.slug,
"lang": "cs",
}
self.kw_lang_project = {"project": self.project.slug, "lang": "cs"}
# Store URL for testing
self.translation_url = self.get_translation().get_absolute_url()
self.project_url = self.project.get_absolute_url()
self.component_url = self.component.get_absolute_url()
def update_fulltext_index(self):
wait_for_celery()
def make_manager(self):
"""Make user a Manager."""
# Sitewide privileges
self.user.groups.add(Group.objects.get(name="Managers"))
# Project privileges
self.project.add_user(self.user, "@Administration")
def get_request(self, user=None):
"""Wrapper to get fake request object."""
request = self.factory.get("/")
request.user = user if user else self.user
request.session = "session"
messages = FallbackStorage(request)
request._messages = messages
return request
def get_translation(self, language="cs"):
return self.component.translation_set.get(language__code=language)
def get_unit(self, source="Hello, world!\n", language="cs"):
translation = self.get_translation(language)
return translation.unit_set.get(source__startswith=source)
def change_unit(self, target, source="Hello, world!\n", language="cs", user=None):
unit = self.get_unit(source, language)
unit.target = target
unit.save_backend(user or self.user)
def edit_unit(self, source, target, language="cs", **kwargs):
"""Do edit single unit using web interface."""
unit = self.get_unit(source, language)
params = {
"checksum": unit.checksum,
"contentsum": hash_to_checksum(unit.content_hash),
"translationsum": hash_to_checksum(unit.get_target_hash()),
"target_0": target,
"review": "20",
}
params.update(kwargs)
return self.client.post(
self.get_translation(language).get_translate_url(), params
)
def assert_redirects_offset(self, response, exp_path, exp_offset):
"""Assert that offset in response matches expected one."""
self.assertEqual(response.status_code, 302)
# We don't use all variables
# pylint: disable=unused-variable
scheme, netloc, path, query, fragment = urlsplit(response["Location"])
self.assertEqual(path, exp_path)
exp_offset = f"offset={exp_offset:d}"
self.assertTrue(exp_offset in query, f"Offset {exp_offset} not in {query}")
def assert_png(self, response):
"""Check whether response contains valid PNG image."""
# Check response status code
self.assertEqual(response.status_code, 200)
self.assert_png_data(response.content)
def assert_png_data(self, content):
"""Check whether data is PNG image."""
# Try to load PNG with PIL
image = Image.open(BytesIO(content))
self.assertEqual(image.format, "PNG")
def assert_zip(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response["Content-Type"], "application/zip")
with ZipFile(BytesIO(response.content), "r") as zipfile:
self.assertIsNone(zipfile.testzip())
def assert_svg(self, response):
"""Check whether response is a SVG image."""
# Check response status code
self.assertEqual(response.status_code, 200)
dom = minidom.parseString(response.content)
self.assertEqual(dom.firstChild.nodeName, "svg")
def assert_backend(self, expected_translated, language="cs"):
"""Check that backend has correct data."""
translation = self.get_translation(language)
translation.commit_pending("test", None)
store = translation.component.file_format_cls(translation.get_filename(), None)
messages = set()
translated = 0
for unit in store.content_units:
id_hash = unit.id_hash
self.assertFalse(
id_hash in messages, "Duplicate string in in backend file!"
)
if unit.is_translated():
translated += 1
self.assertEqual(
translated,
expected_translated,
"Did not found expected number of translations ({} != {}).".format(
translated, expected_translated
),
)
def log_as_jane(self):
self.client.login(username="jane", password="testpassword")
class FixtureTestCase(ViewTestCase):
@classmethod
def setUpTestData(cls):
"""Manually load fixture."""
# Ensure there are no Language objects, we add
# them in defined order in fixture
Language.objects.all().delete()
# Stolen from setUpClass, we just need to do it
# after transaction checkpoint and deleting languages
for db_name in cls._databases_names(include_mirrors=False):
call_command(
"loaddata", "simple-project.json", verbosity=0, database=db_name
)
# Apply group project/language automation
for group in Group.objects.iterator():
group.save()
super().setUpTestData()
def clone_test_repos(self):
return
def create_project(self):
project = Project.objects.all()[0]
setup_project_groups(self, project)
return project
def create_component(self):
component = self.create_project().component_set.all()[0]
component.create_path()
return component
class TranslationManipulationTest(ViewTestCase):
def setUp(self):
super().setUp()
self.component.new_lang = "add"
self.component.save()
def create_component(self):
return self.create_po_new_base()
def test_model_add(self):
self.assertTrue(
self.component.add_new_language(
Language.objects.get(code="af"), self.get_request()
)
)
self.assertTrue(
self.component.translation_set.filter(language_code="af").exists()
)
def test_model_add_duplicate(self):
request = self.get_request()
self.assertFalse(get_messages(request))
self.assertTrue(
self.component.add_new_language(Language.objects.get(code="de"), request)
)
self.assertTrue(get_messages(request))
def test_model_add_disabled(self):
self.component.new_lang = "contact"
self.component.save()
self.assertFalse(
self.component.add_new_language(
Language.objects.get(code="af"), self.get_request()
)
)
def test_model_add_superuser(self):
self.component.new_lang = "contact"
self.component.save()
self.user.is_superuser = True
self.user.save()
self.assertTrue(
self.component.add_new_language(
Language.objects.get(code="af"), self.get_request()
)
)
def test_remove(self):
translation = self.component.translation_set.get(language_code="de")
translation.remove(self.user)
# Force scanning of the repository
self.component.create_translations()
self.assertFalse(
self.component.translation_set.filter(language_code="de").exists()
)
class BasicViewTest(ViewTestCase):
def test_view_project(self):
response = self.client.get(reverse("project", kwargs=self.kw_project))
self.assertContains(response, "test/test")
self.assertNotContains(response, "Spanish")
def test_view_project_ghost(self):
self.user.profile.languages.add(Language.objects.get(code="es"))
response = self.client.get(reverse("project", kwargs=self.kw_project))
self.assertContains(response, "Spanish")
def test_view_component(self):
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Test/Test")
self.assertNotContains(response, "Spanish")
def test_view_component_ghost(self):
self.user.profile.languages.add(Language.objects.get(code="es"))
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertContains(response, "Spanish")
def test_view_component_guide(self):
response = self.client.get(reverse("guide", kwargs=self.kw_component))
self.assertContains(response, "Test/Test")
def test_view_translation(self):
response = self.client.get(reverse("translation", kwargs=self.kw_translation))
self.assertContains(response, "Test/Test")
def test_view_translation_others(self):
other = Component.objects.create(
name="RESX component",
slug="resx",
project=self.project,
repo="weblate://test/test",
file_format="resx",
filemask="resx/*.resx",
template="resx/en.resx",
new_lang="add",
)
# Existing translation
response = self.client.get(reverse("translation", kwargs=self.kw_translation))
self.assertContains(response, other.name)
# Ghost translation
kwargs = {}
kwargs.update(self.kw_translation)
kwargs["lang"] = "it"
response = self.client.get(reverse("translation", kwargs=kwargs))
self.assertContains(response, other.name)
def test_view_redirect(self):
"""Test case insentivite lookups and aliases in middleware."""
# Non existing fails with 404
kwargs = {"project": "invalid"}
response = self.client.get(reverse("project", kwargs=kwargs))
self.assertEqual(response.status_code, 404)
# Different casing should redirect, MySQL always does case insensitive lookups
kwargs["project"] = self.project.slug.upper()
if settings.DATABASES["default"]["ENGINE"] != "django.db.backends.mysql":
response = self.client.get(reverse("project", kwargs=kwargs))
self.assertRedirects(
response, reverse("project", kwargs=self.kw_project), status_code=301
)
# Non existing fails with 404
kwargs["component"] = "invalid"
response = self.client.get(reverse("component", kwargs=kwargs))
self.assertEqual(response.status_code, 404)
# Different casing should redirect, MySQL always does case insensitive lookups
kwargs["component"] = self.component.slug.upper()
if settings.DATABASES["default"]["ENGINE"] != "django.db.backends.mysql":
response = self.client.get(reverse("component", kwargs=kwargs))
self.assertRedirects(
response,
reverse("component", kwargs=self.kw_component),
status_code=301,
)
# Non existing fails with 404
kwargs["lang"] = "cs-DE"
response = self.client.get(reverse("translation", kwargs=kwargs))
self.assertEqual(response.status_code, 404)
# Aliased language should redirect
kwargs["lang"] = "czech"
response = self.client.get(reverse("translation", kwargs=kwargs))
self.assertRedirects(
response,
reverse("translation", kwargs=self.kw_translation),
status_code=301,
)
def test_view_unit(self):
unit = self.get_unit()
response = self.client.get(unit.get_absolute_url())
self.assertContains(response, "Hello, world!")
def test_view_component_list(self):
clist = ComponentList.objects.create(name="TestCL", slug="testcl")
clist.components.add(self.component)
response = self.client.get(reverse("component-list", kwargs={"name": "testcl"}))
self.assertContains(response, "TestCL")
self.assertContains(response, self.component.name)
class BasicMonolingualViewTest(BasicViewTest):
def create_component(self):
return self.create_po_mono()
class SourceStringsTest(ViewTestCase):
def test_edit_priority(self):
# Need extra power
self.user.is_superuser = True
self.user.save()
source = self.get_unit().source_unit
response = self.client.post(
reverse("edit_context", kwargs={"pk": source.pk}),
{"extra_flags": "priority:60"},
)
self.assertRedirects(response, source.get_absolute_url())
unit = self.get_unit()
self.assertEqual(unit.priority, 60)
def test_edit_readonly(self):
# Need extra power
self.user.is_superuser = True
self.user.save()
unit = self.get_unit()
old_state = unit.state
source = unit.source_unit
response = self.client.post(
reverse("edit_context", kwargs={"pk": source.pk}),
{"extra_flags": "read-only"},
)
self.assertRedirects(response, source.get_absolute_url())
unit = self.get_unit()
self.assertTrue(unit.readonly)
self.assertNotEqual(unit.state, old_state)
response = self.client.post(
reverse("edit_context", kwargs={"pk": source.pk}), {"extra_flags": ""}
)
self.assertRedirects(response, source.get_absolute_url())
unit = self.get_unit()
self.assertFalse(unit.readonly)
self.assertEqual(unit.state, old_state)
def test_edit_context(self):
# Need extra power
self.user.is_superuser = True
self.user.save()
source = self.get_unit().source_unit
response = self.client.post(
reverse("edit_context", kwargs={"pk": source.pk}),
{"explanation": "Extra context"},
)
self.assertRedirects(response, source.get_absolute_url())
unit = self.get_unit().source_unit
self.assertEqual(unit.context, "")
self.assertEqual(unit.explanation, "Extra context")
def test_edit_check_flags(self):
# Need extra power
self.user.is_superuser = True
self.user.save()
source = self.get_unit().source_unit
response = self.client.post(
reverse("edit_context", kwargs={"pk": source.pk}),
{"extra_flags": "ignore-same"},
)
self.assertRedirects(response, source.get_absolute_url())
unit = self.get_unit().source_unit
self.assertEqual(unit.extra_flags, "ignore-same")
def test_view_source(self):
kwargs = {"lang": "en"}
kwargs.update(self.kw_component)
response = self.client.get(reverse("translation", kwargs=kwargs))
self.assertContains(response, "Test/Test")
def test_matrix(self):
response = self.client.get(reverse("matrix", kwargs=self.kw_component))
self.assertContains(response, "Czech")
def test_matrix_load(self):
response = self.client.get(
reverse("matrix-load", kwargs=self.kw_component) + "?offset=0&lang=cs"
)
self.assertContains(response, 'lang="cs"')
|
import string
from django.contrib.sites.models import Site
from django.urls import reverse
from zinnia.settings import PROTOCOL
BASE36_ALPHABET = string.digits + string.ascii_uppercase
def base36(value):
"""
Encode int to base 36.
"""
result = ''
while value:
value, i = divmod(value, 36)
result = BASE36_ALPHABET[i] + result
return result
def backend(entry):
"""
Default URL shortener backend for Zinnia.
"""
return '%s://%s%s' % (
PROTOCOL, Site.objects.get_current().domain,
reverse('zinnia:entry_shortlink', args=[base36(entry.pk)]))
|
import logging
import CO2Signal
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_TOKEN,
ENERGY_KILO_WATT_HOUR,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
CONF_COUNTRY_CODE = "country_code"
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by CO2signal"
MSG_LOCATION = (
"Please use either coordinates or the country code. "
"For the coordinates, "
"you need to use both latitude and longitude."
)
CO2_INTENSITY_UNIT = f"CO2eq/{ENERGY_KILO_WATT_HOUR}"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_TOKEN): cv.string,
vol.Inclusive(CONF_LATITUDE, "coords", msg=MSG_LOCATION): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coords", msg=MSG_LOCATION): cv.longitude,
vol.Optional(CONF_COUNTRY_CODE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the CO2signal sensor."""
token = config[CONF_TOKEN]
lat = config.get(CONF_LATITUDE, hass.config.latitude)
lon = config.get(CONF_LONGITUDE, hass.config.longitude)
country_code = config.get(CONF_COUNTRY_CODE)
_LOGGER.debug("Setting up the sensor using the %s", country_code)
devs = []
devs.append(CO2Sensor(token, country_code, lat, lon))
add_entities(devs, True)
class CO2Sensor(Entity):
"""Implementation of the CO2Signal sensor."""
def __init__(self, token, country_code, lat, lon):
"""Initialize the sensor."""
self._token = token
self._country_code = country_code
self._latitude = lat
self._longitude = lon
self._data = None
if country_code is not None:
device_name = country_code
else:
device_name = f"{round(self._latitude, 2)}/{round(self._longitude, 2)}"
self._friendly_name = f"CO2 intensity - {device_name}"
@property
def name(self):
"""Return the name of the sensor."""
return self._friendly_name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return "mdi:molecule-co2"
@property
def state(self):
"""Return the state of the device."""
return self._data
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return CO2_INTENSITY_UNIT
@property
def device_state_attributes(self):
"""Return the state attributes of the last update."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Update data for %s", self._friendly_name)
if self._country_code is not None:
self._data = CO2Signal.get_latest_carbon_intensity(
self._token, country_code=self._country_code
)
else:
self._data = CO2Signal.get_latest_carbon_intensity(
self._token, latitude=self._latitude, longitude=self._longitude
)
self._data = round(self._data, 2)
|
import json
import logging
import mimetypes
import os
import pathlib
from typing import Any, Dict, Optional, Set, Tuple
from aiohttp import hdrs, web, web_urldispatcher
import jinja2
import voluptuous as vol
from yarl import URL
from homeassistant.components import websocket_api
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.config import async_hass_config_yaml
from homeassistant.const import CONF_NAME, EVENT_THEMES_UPDATED
from homeassistant.core import callback
from homeassistant.helpers import service
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.translation import async_get_translations
from homeassistant.loader import async_get_integration, bind_hass
from .storage import async_setup_frontend_storage
# mypy: allow-untyped-defs, no-check-untyped-defs
# Fix mimetypes for borked Windows machines
# https://github.com/home-assistant/home-assistant-polymer/issues/3336
mimetypes.add_type("text/css", ".css")
mimetypes.add_type("application/javascript", ".js")
DOMAIN = "frontend"
CONF_THEMES = "themes"
CONF_EXTRA_HTML_URL = "extra_html_url"
CONF_EXTRA_HTML_URL_ES5 = "extra_html_url_es5"
CONF_EXTRA_MODULE_URL = "extra_module_url"
CONF_EXTRA_JS_URL_ES5 = "extra_js_url_es5"
CONF_FRONTEND_REPO = "development_repo"
CONF_JS_VERSION = "javascript_version"
EVENT_PANELS_UPDATED = "panels_updated"
DEFAULT_THEME_COLOR = "#03A9F4"
MANIFEST_JSON = {
"background_color": "#FFFFFF",
"description": "Home automation platform that puts local control and privacy first.",
"dir": "ltr",
"display": "standalone",
"icons": [
{
"src": f"/static/icons/favicon-{size}x{size}.png",
"sizes": f"{size}x{size}",
"type": "image/png",
"purpose": "maskable any",
}
for size in (192, 384, 512, 1024)
],
"lang": "en-US",
"name": "Home Assistant",
"short_name": "Assistant",
"start_url": "/?homescreen=1",
"theme_color": DEFAULT_THEME_COLOR,
"prefer_related_applications": True,
"related_applications": [
{"platform": "play", "id": "io.homeassistant.companion.android"}
],
}
DATA_PANELS = "frontend_panels"
DATA_JS_VERSION = "frontend_js_version"
DATA_EXTRA_MODULE_URL = "frontend_extra_module_url"
DATA_EXTRA_JS_URL_ES5 = "frontend_extra_js_url_es5"
THEMES_STORAGE_KEY = f"{DOMAIN}_theme"
THEMES_STORAGE_VERSION = 1
THEMES_SAVE_DELAY = 60
DATA_THEMES_STORE = "frontend_themes_store"
DATA_THEMES = "frontend_themes"
DATA_DEFAULT_THEME = "frontend_default_theme"
DATA_DEFAULT_DARK_THEME = "frontend_default_dark_theme"
DEFAULT_THEME = "default"
VALUE_NO_THEME = "none"
PRIMARY_COLOR = "primary-color"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_FRONTEND_REPO): cv.isdir,
vol.Optional(CONF_THEMES): vol.Schema(
{cv.string: {cv.string: cv.string}}
),
vol.Optional(CONF_EXTRA_MODULE_URL): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_EXTRA_JS_URL_ES5): vol.All(
cv.ensure_list, [cv.string]
),
# We no longer use these options.
vol.Optional(CONF_EXTRA_HTML_URL): cv.match_all,
vol.Optional(CONF_EXTRA_HTML_URL_ES5): cv.match_all,
vol.Optional(CONF_JS_VERSION): cv.match_all,
},
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_SET_THEME = "set_theme"
SERVICE_RELOAD_THEMES = "reload_themes"
CONF_MODE = "mode"
class Panel:
"""Abstract class for panels."""
# Name of the webcomponent
component_name: Optional[str] = None
# Icon to show in the sidebar
sidebar_icon: Optional[str] = None
# Title to show in the sidebar
sidebar_title: Optional[str] = None
# Url to show the panel in the frontend
frontend_url_path: Optional[str] = None
# Config to pass to the webcomponent
config: Optional[Dict[str, Any]] = None
# If the panel should only be visible to admins
require_admin = False
def __init__(
self,
component_name,
sidebar_title,
sidebar_icon,
frontend_url_path,
config,
require_admin,
):
"""Initialize a built-in panel."""
self.component_name = component_name
self.sidebar_title = sidebar_title
self.sidebar_icon = sidebar_icon
self.frontend_url_path = frontend_url_path or component_name
self.config = config
self.require_admin = require_admin
@callback
def to_response(self):
"""Panel as dictionary."""
return {
"component_name": self.component_name,
"icon": self.sidebar_icon,
"title": self.sidebar_title,
"config": self.config,
"url_path": self.frontend_url_path,
"require_admin": self.require_admin,
}
@bind_hass
@callback
def async_register_built_in_panel(
hass,
component_name,
sidebar_title=None,
sidebar_icon=None,
frontend_url_path=None,
config=None,
require_admin=False,
*,
update=False,
):
"""Register a built-in panel."""
panel = Panel(
component_name,
sidebar_title,
sidebar_icon,
frontend_url_path,
config,
require_admin,
)
panels = hass.data.setdefault(DATA_PANELS, {})
if not update and panel.frontend_url_path in panels:
raise ValueError(f"Overwriting panel {panel.frontend_url_path}")
panels[panel.frontend_url_path] = panel
hass.bus.async_fire(EVENT_PANELS_UPDATED)
@bind_hass
@callback
def async_remove_panel(hass, frontend_url_path):
"""Remove a built-in panel."""
panel = hass.data.get(DATA_PANELS, {}).pop(frontend_url_path, None)
if panel is None:
_LOGGER.warning("Removing unknown panel %s", frontend_url_path)
hass.bus.async_fire(EVENT_PANELS_UPDATED)
def add_extra_js_url(hass, url, es5=False):
"""Register extra js or module url to load."""
key = DATA_EXTRA_JS_URL_ES5 if es5 else DATA_EXTRA_MODULE_URL
url_set = hass.data.get(key)
if url_set is None:
url_set = hass.data[key] = set()
url_set.add(url)
def add_manifest_json_key(key, val):
"""Add a keyval to the manifest.json."""
MANIFEST_JSON[key] = val
def _frontend_root(dev_repo_path):
"""Return root path to the frontend files."""
if dev_repo_path is not None:
return pathlib.Path(dev_repo_path) / "hass_frontend"
# Keep import here so that we can import frontend without installing reqs
# pylint: disable=import-outside-toplevel
import hass_frontend
return hass_frontend.where()
async def async_setup(hass, config):
"""Set up the serving of the frontend."""
await async_setup_frontend_storage(hass)
hass.components.websocket_api.async_register_command(websocket_get_panels)
hass.components.websocket_api.async_register_command(websocket_get_themes)
hass.components.websocket_api.async_register_command(websocket_get_translations)
hass.components.websocket_api.async_register_command(websocket_get_version)
hass.http.register_view(ManifestJSONView)
conf = config.get(DOMAIN, {})
for key in (CONF_EXTRA_HTML_URL, CONF_EXTRA_HTML_URL_ES5, CONF_JS_VERSION):
if key in conf:
_LOGGER.error(
"Please remove %s from your frontend config. It is no longer supported",
key,
)
repo_path = conf.get(CONF_FRONTEND_REPO)
is_dev = repo_path is not None
root_path = _frontend_root(repo_path)
for path, should_cache in (
("service_worker.js", False),
("robots.txt", False),
("onboarding.html", True),
("static", True),
("frontend_latest", True),
("frontend_es5", True),
):
hass.http.register_static_path(f"/{path}", str(root_path / path), should_cache)
hass.http.register_static_path(
"/auth/authorize", str(root_path / "authorize.html"), False
)
# https://wicg.github.io/change-password-url/
hass.http.register_redirect(
"/.well-known/change-password", "/profile", redirect_exc=web.HTTPFound
)
local = hass.config.path("www")
if os.path.isdir(local):
hass.http.register_static_path("/local", local, not is_dev)
hass.http.app.router.register_resource(IndexView(repo_path, hass))
async_register_built_in_panel(hass, "profile")
# To smooth transition to new urls, add redirects to new urls of dev tools
# Added June 27, 2019. Can be removed in 2021.
for panel in ("event", "service", "state", "template"):
hass.http.register_redirect(f"/dev-{panel}", f"/developer-tools/{panel}")
for panel in ("logs", "info", "mqtt"):
# Can be removed in 2021.
hass.http.register_redirect(f"/dev-{panel}", f"/config/{panel}")
# Added June 20 2020. Can be removed in 2022.
hass.http.register_redirect(f"/developer-tools/{panel}", f"/config/{panel}")
async_register_built_in_panel(
hass,
"developer-tools",
require_admin=True,
sidebar_title="developer_tools",
sidebar_icon="hass:hammer",
)
if DATA_EXTRA_MODULE_URL not in hass.data:
hass.data[DATA_EXTRA_MODULE_URL] = set()
for url in conf.get(CONF_EXTRA_MODULE_URL, []):
add_extra_js_url(hass, url)
if DATA_EXTRA_JS_URL_ES5 not in hass.data:
hass.data[DATA_EXTRA_JS_URL_ES5] = set()
for url in conf.get(CONF_EXTRA_JS_URL_ES5, []):
add_extra_js_url(hass, url, True)
await _async_setup_themes(hass, conf.get(CONF_THEMES))
return True
async def _async_setup_themes(hass, themes):
"""Set up themes data and services."""
hass.data[DATA_THEMES] = themes or {}
store = hass.data[DATA_THEMES_STORE] = hass.helpers.storage.Store(
THEMES_STORAGE_VERSION, THEMES_STORAGE_KEY
)
theme_data = await store.async_load() or {}
theme_name = theme_data.get(DATA_DEFAULT_THEME, DEFAULT_THEME)
dark_theme_name = theme_data.get(DATA_DEFAULT_DARK_THEME)
if theme_name == DEFAULT_THEME or theme_name in hass.data[DATA_THEMES]:
hass.data[DATA_DEFAULT_THEME] = theme_name
else:
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
if dark_theme_name == DEFAULT_THEME or dark_theme_name in hass.data[DATA_THEMES]:
hass.data[DATA_DEFAULT_DARK_THEME] = dark_theme_name
@callback
def update_theme_and_fire_event():
"""Update theme_color in manifest."""
name = hass.data[DATA_DEFAULT_THEME]
themes = hass.data[DATA_THEMES]
MANIFEST_JSON["theme_color"] = DEFAULT_THEME_COLOR
if name != DEFAULT_THEME:
MANIFEST_JSON["theme_color"] = themes[name].get(
"app-header-background-color",
themes[name].get(PRIMARY_COLOR, DEFAULT_THEME_COLOR),
)
hass.bus.async_fire(EVENT_THEMES_UPDATED)
@callback
def set_theme(call):
"""Set backend-preferred theme."""
name = call.data[CONF_NAME]
mode = call.data.get("mode", "light")
if (
name not in (DEFAULT_THEME, VALUE_NO_THEME)
and name not in hass.data[DATA_THEMES]
):
_LOGGER.warning("Theme %s not found", name)
return
light_mode = mode == "light"
theme_key = DATA_DEFAULT_THEME if light_mode else DATA_DEFAULT_DARK_THEME
if name == VALUE_NO_THEME:
to_set = DEFAULT_THEME if light_mode else None
else:
_LOGGER.info("Theme %s set as default %s theme", name, mode)
to_set = name
hass.data[theme_key] = to_set
store.async_delay_save(
lambda: {
DATA_DEFAULT_THEME: hass.data[DATA_DEFAULT_THEME],
DATA_DEFAULT_DARK_THEME: hass.data.get(DATA_DEFAULT_DARK_THEME),
},
THEMES_SAVE_DELAY,
)
update_theme_and_fire_event()
async def reload_themes(_):
"""Reload themes."""
config = await async_hass_config_yaml(hass)
new_themes = config[DOMAIN].get(CONF_THEMES, {})
hass.data[DATA_THEMES] = new_themes
if hass.data[DATA_DEFAULT_THEME] not in new_themes:
hass.data[DATA_DEFAULT_THEME] = DEFAULT_THEME
if (
hass.data.get(DATA_DEFAULT_DARK_THEME)
and hass.data.get(DATA_DEFAULT_DARK_THEME) not in new_themes
):
hass.data[DATA_DEFAULT_DARK_THEME] = None
update_theme_and_fire_event()
service.async_register_admin_service(
hass,
DOMAIN,
SERVICE_SET_THEME,
set_theme,
vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_MODE): vol.Any("dark", "light"),
}
),
)
service.async_register_admin_service(
hass, DOMAIN, SERVICE_RELOAD_THEMES, reload_themes
)
class IndexView(web_urldispatcher.AbstractResource):
"""Serve the frontend."""
def __init__(self, repo_path, hass):
"""Initialize the frontend view."""
super().__init__(name="frontend:index")
self.repo_path = repo_path
self.hass = hass
self._template_cache = None
@property
def canonical(self) -> str:
"""Return resource's canonical path."""
return "/"
@property
def _route(self):
"""Return the index route."""
return web_urldispatcher.ResourceRoute("GET", self.get, self)
def url_for(self, **kwargs: str) -> URL:
"""Construct url for resource with additional params."""
return URL("/")
async def resolve(
self, request: web.Request
) -> Tuple[Optional[web_urldispatcher.UrlMappingMatchInfo], Set[str]]:
"""Resolve resource.
Return (UrlMappingMatchInfo, allowed_methods) pair.
"""
if (
request.path != "/"
and request.url.parts[1] not in self.hass.data[DATA_PANELS]
):
return None, set()
if request.method != hdrs.METH_GET:
return None, {"GET"}
return web_urldispatcher.UrlMappingMatchInfo({}, self._route), {"GET"}
def add_prefix(self, prefix: str) -> None:
"""Add a prefix to processed URLs.
Required for subapplications support.
"""
def get_info(self):
"""Return a dict with additional info useful for introspection."""
return {"panels": list(self.hass.data[DATA_PANELS])}
def freeze(self) -> None:
"""Freeze the resource."""
def raw_match(self, path: str) -> bool:
"""Perform a raw match against path."""
def get_template(self):
"""Get template."""
tpl = self._template_cache
if tpl is None:
with open(str(_frontend_root(self.repo_path) / "index.html")) as file:
tpl = jinja2.Template(file.read())
# Cache template if not running from repository
if self.repo_path is None:
self._template_cache = tpl
return tpl
async def get(self, request: web.Request) -> web.Response:
"""Serve the index page for panel pages."""
hass = request.app["hass"]
if not hass.components.onboarding.async_is_onboarded():
return web.Response(status=302, headers={"location": "/onboarding.html"})
template = self._template_cache
if template is None:
template = await hass.async_add_executor_job(self.get_template)
return web.Response(
text=template.render(
theme_color=MANIFEST_JSON["theme_color"],
extra_modules=hass.data[DATA_EXTRA_MODULE_URL],
extra_js_es5=hass.data[DATA_EXTRA_JS_URL_ES5],
),
content_type="text/html",
)
def __len__(self) -> int:
"""Return length of resource."""
return 1
def __iter__(self):
"""Iterate over routes."""
return iter([self._route])
class ManifestJSONView(HomeAssistantView):
"""View to return a manifest.json."""
requires_auth = False
url = "/manifest.json"
name = "manifestjson"
@callback
def get(self, request): # pylint: disable=no-self-use
"""Return the manifest.json."""
msg = json.dumps(MANIFEST_JSON, sort_keys=True)
return web.Response(text=msg, content_type="application/manifest+json")
@callback
@websocket_api.websocket_command({"type": "get_panels"})
def websocket_get_panels(hass, connection, msg):
"""Handle get panels command."""
user_is_admin = connection.user.is_admin
panels = {
panel_key: panel.to_response()
for panel_key, panel in connection.hass.data[DATA_PANELS].items()
if user_is_admin or not panel.require_admin
}
connection.send_message(websocket_api.result_message(msg["id"], panels))
@callback
@websocket_api.websocket_command({"type": "frontend/get_themes"})
def websocket_get_themes(hass, connection, msg):
"""Handle get themes command."""
if hass.config.safe_mode:
connection.send_message(
websocket_api.result_message(
msg["id"],
{
"themes": {
"safe_mode": {
"primary-color": "#db4437",
"accent-color": "#ffca28",
}
},
"default_theme": "safe_mode",
},
)
)
return
connection.send_message(
websocket_api.result_message(
msg["id"],
{
"themes": hass.data[DATA_THEMES],
"default_theme": hass.data[DATA_DEFAULT_THEME],
"default_dark_theme": hass.data.get(DATA_DEFAULT_DARK_THEME),
},
)
)
@websocket_api.websocket_command(
{
"type": "frontend/get_translations",
vol.Required("language"): str,
vol.Required("category"): str,
vol.Optional("integration"): str,
vol.Optional("config_flow"): bool,
}
)
@websocket_api.async_response
async def websocket_get_translations(hass, connection, msg):
"""Handle get translations command."""
resources = await async_get_translations(
hass,
msg["language"],
msg["category"],
msg.get("integration"),
msg.get("config_flow"),
)
connection.send_message(
websocket_api.result_message(msg["id"], {"resources": resources})
)
@websocket_api.websocket_command({"type": "frontend/get_version"})
@websocket_api.async_response
async def websocket_get_version(hass, connection, msg):
"""Handle get version command."""
integration = await async_get_integration(hass, "frontend")
frontend = None
for req in integration.requirements:
if req.startswith("home-assistant-frontend=="):
frontend = req.split("==", 1)[1]
if frontend is None:
connection.send_error(msg["id"], "unknown_version", "Version not found")
else:
connection.send_result(msg["id"], {"version": frontend})
|
RESULTS = [(124863, 'CPython 3.4', 'blue'),
(137250, 'CPython 3.5', 'blue'),
(3927770, 'Pypy4', 'blue'),
(3739170, 'Pypy3', 'blue'),
(127957, 'PScript on Firefox', 'orange'),
(79517, 'PScript on Chrome', 'orange'),
(128325, 'PScript on MS Edge', 'orange'),
(2982, 'Brython', 'magenta'),
(2780, 'Skulpt', 'magenta'),
(268817, 'PypyJS', 'magenta'),
]
import sys
from time import time
import platform
# from test.pystone import main as pystone_main
# from test import pystone
from flexx import app, event
# Mark the pystone module to be transpiled as a whole. It uses globals
# a lot, which somehow causes inifinite loops if its transpiled in parts.
# pystone.__pscript__ = True
# Backend selection
BACKEND = 'firefox-app or chrome-app'
if sys.argv[1:]:
BACKEND = sys.argv[1]
def plot_results():
import matplotlib.pyplot as plt
plt.ion()
data = list(reversed(RESULTS))
plt.figure(1)
plt.clf()
ax = plt.subplot(111)
ax.barh([i for i in range(len(data))], [x[0] for x in data],
color=[x[2] for x in data])
ax.set_yticks([i+0.3 for i in range(len(data))])
ax.set_yticklabels([x[1] for x in data])
ax.set_xscale('log')
class window:
# Trick to be able to use the same code in JS and Python
@classmethod
def Float32Array(cls, n):
""" Factory function. """
return [0.0] * n
def convolve():
N = 400000
data = window.Float32Array(N)
support = 3
t0 = time()
for i in range(support, N-support):
for j in range(-support, support+1):
data[i] += data[i+j] * (1/support*2)
t1 = time()
print('convolution took %f s' % (t1-t0))
def bench_str():
""" From http://brythonista.wordpress.com/2015/03/28
"""
print('String benchmarks:')
t0 = time()
for i in range(1000000):
a = 1
print(" assignment.py", time()-t0)
t0 = time()
a = 0
for i in range(1000000):
a += 1
print(" augm_assign.py", time()-t0)
t0 = time()
for i in range(1000000):
a = 1.0
print(" assignment_float.py", time()-t0)
t0 = time()
for i in range(1000000):
a = {0: 0}
print(" build_dict.py", time()-t0)
t0 = time()
a = {0: 0}
for i in range(1000000):
a[0] = i
assert a[0]==999999
print(" set_dict_item.py", time()-t0)
t0 = time()
for i in range(1000000):
a = [1, 2, 3]
print(" build_list.py", time()-t0)
t0 = time()
a = [0]
for i in range(1000000):
a[0] = i
print(" set_list_item.py", time()-t0)
t0 = time()
a, b, c = 1, 2, 3
for i in range(1000000):
a + b + c
print(" add_integers.py", time()-t0)
t0 = time()
a, b, c = 'a', 'b', 'c'
for i in range(1000000):
a + b + c
print(" add_strings.py", time()-t0)
t0 = time()
for _i in range(100000):
str(_i)
print(" str_of_int.py", time()-t0)
t0 = time()
for i in range(1000000):
def f():
pass
print(" create_function.py", time()-t0)
t0 = time()
def g(x):
return x
for i in range(1000000):
g(i)
print(" function_call.py", time()-t0)
class BenchmarkerPy(app.PyComponent):
@event.action
def benchmark(self):
print('\n==== Python %s %s =====\n' % (platform.python_implementation(),
platform.python_version()))
# pystone_main()
convolve()
bench_str()
class BenchmarkerJs(app.JsComponent):
@event.action
def benchmark(self):
print()
print('==== PScript on %s =====' % BACKEND)
print()
# pystone_main()
convolve()
bench_str()
b1 = app.launch(BenchmarkerPy, BACKEND)
with b1:
b2 = BenchmarkerJs()
b1.benchmark()
b2.benchmark()
app.run()
|
import numpy as np
import pandas as pd
from scipy.stats import multivariate_normal
from pgmpy.factors.base import BaseFactor
class LinearGaussianCPD(BaseFactor):
r"""
For, X -> Y the Linear Gaussian model assumes that the mean
of Y is a linear function of mean of X and the variance of Y does
not depend on X.
For example,
.. math::
p(Y|X) = N(-2x + 0.9 ; 1)
Here, :math:`x` is the mean of the variable :math:`X`.
Let :math:`Y` be a continuous variable with continuous parents
:math:`X1, X2, \cdots, Xk`. We say that :math:`Y` has a linear Gaussian CPD
if there are parameters :math:`\beta_0, \beta_1, ..., \beta_k`
and :math:`\sigma_2` such that,
.. math:: p(Y |x1, x2, ..., xk) = \mathcal{N}(\beta_0 + x1*\beta_1 + ......... + xk*\beta_k ; \sigma_2)
In vector notation,
.. math:: p(Y |x) = \mathcal{N}(\beta_0 + \boldmath{β}.T * \boldmath{x} ; \sigma_2)
References
----------
.. [1] https://cedar.buffalo.edu/~srihari/CSE574/Chap8/Ch8-PGM-GaussianBNs/8.5%20GaussianBNs.pdf
"""
def __init__(
self, variable, evidence_mean, evidence_variance, evidence=[], beta=None
):
"""
Parameters
----------
variable: any hashable python object
The variable whose CPD is defined.
evidence_mean: Mean vector (numpy array) of the joint distribution, X
evidence_variance: int, float
The variance of the multivariate gaussian, X = ['x1', 'x2', ..., 'xn']
evidence: iterable of any hashable python objects
An iterable of the parents of the variable. None if there are no parents.
beta (optional): iterable of int or float
An iterable representing the coefficient vector of the linear equation.
The first term represents the constant term in the linear equation.
Examples
--------
# For P(Y| X1, X2, X3) = N(-2x1 + 3x2 + 7x3 + 0.2; 9.6)
>>> cpd = LinearGaussianCPD('Y', [0.2, -2, 3, 7], 9.6, ['X1', 'X2', 'X3'])
>>> cpd.variable
'Y'
>>> cpd.evidence
['x1', 'x2', 'x3']
>>> cpd.beta_vector
[0.2, -2, 3, 7]
"""
self.variable = variable
self.mean = evidence_mean
self.variance = evidence_variance
self.evidence = evidence
self.sigma_yx = None
self.variables = [variable] + evidence
super(LinearGaussianCPD, self).__init__(
self.variables, pdf="gaussian", mean=self.mean, covariance=self.variance
)
def sum_of_product(self, xi, xj):
prod_xixj = xi * xj
return np.sum(prod_xixj)
def maximum_likelihood_estimator(self, data, states):
"""
Fit using MLE method.
Parameters
----------
data: pandas.DataFrame or 2D array
Dataframe of values containing samples from the conditional distribution, (Y|X)
and corresponding X values.
states: All the input states that are jointly gaussian.
Returns
-------
beta, variance (tuple): Returns estimated betas and the variance.
"""
x_df = pd.DataFrame(data, columns=states)
x_len = len(self.evidence)
sym_coefs = []
for i in range(0, x_len):
sym_coefs.append("b" + str(i + 1) + "_coef")
sum_x = x_df.sum()
x = [sum_x["(Y|X)"]]
coef_matrix = pd.DataFrame(columns=sym_coefs)
# First we compute just the coefficients of beta_1 to beta_N.
# Later we compute beta_0 and append it.
for i in range(0, x_len):
x.append(self.sum_of_product(x_df["(Y|X)"], x_df[self.evidence[i]]))
for j in range(0, x_len):
coef_matrix.loc[i, sym_coefs[j]] = self.sum_of_product(
x_df[self.evidence[i]], x_df[self.evidence[j]]
)
coef_matrix.insert(0, "b0_coef", sum_x[self.evidence].values)
row_1 = np.append([len(x_df)], sum_x[self.evidence].values)
coef_matrix.loc[-1] = row_1
coef_matrix.index = coef_matrix.index + 1 # shifting index
coef_matrix.sort_index(inplace=True)
beta_coef_matrix = np.matrix(coef_matrix.values, dtype="float")
coef_inv = np.linalg.inv(beta_coef_matrix)
beta_est = np.array(np.matmul(coef_inv, np.transpose(x)))
self.beta = beta_est[0]
sigma_est = 0
x_len_df = len(x_df)
for i in range(0, x_len):
for j in range(0, x_len):
sigma_est += (
self.beta[i + 1]
* self.beta[j + 1]
* (
self.sum_of_product(
x_df[self.evidence[i]], x_df[self.evidence[j]]
)
/ x_len_df
- np.mean(x_df[self.evidence[i]])
* np.mean(x_df[self.evidence[j]])
)
)
sigma_est = np.sqrt(
self.sum_of_product(x_df["(Y|X)"], x_df["(Y|X)"]) / x_len_df
- np.mean(x_df["(Y|X)"]) * np.mean(x_df["(Y|X)"])
- sigma_est
)
self.sigma_yx = sigma_est
return self.beta, self.sigma_yx
def fit(self, data, states, estimator=None, complete_samples_only=True, **kwargs):
"""
Determine βs from data
Parameters
----------
data: pandas.DataFrame
Dataframe containing samples from the conditional distribution, p(Y|X)
estimator: 'MLE' or 'MAP'
completely_samples_only: boolean (True or False)
Are they downsampled or complete? Defaults to True
"""
if estimator == "MLE":
mean, variance = self.maximum_likelihood_estimator(data, states)
elif estimator == "MAP":
raise NotImplementedError(
"fit method has not been implemented using Maximum A-Priori (MAP)"
)
return mean, variance
@property
def pdf(self):
def _pdf(*args):
# The first element of args is the value of the variable on which CPD is defined
# and the rest of the elements give the mean values of the parent
# variables.
mean = (
sum([arg * coeff for (arg, coeff) in zip(args[1:], self.mean)])
+ self.mean[0]
)
return multivariate_normal.pdf(
args[0], np.array(mean), np.array([[self.variance]])
)
return _pdf
def copy(self):
"""
Returns a copy of the distribution.
Returns
-------
LinearGaussianCPD: copy of the distribution
Examples
--------
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> cpd = LinearGaussianCPD('Y', [0.2, -2, 3, 7], 9.6, ['X1', 'X2', 'X3'])
>>> copy_cpd = cpd.copy()
>>> copy_cpd.variable
'Y'
>>> copy_cpd.evidence
['X1', 'X2', 'X3']
"""
copy_cpd = LinearGaussianCPD(
self.variable, self.beta, self.variance, list(self.evidence)
)
return copy_cpd
def __str__(self):
if self.evidence and list(self.mean):
# P(Y| X1, X2, X3) = N(-2*X1_mu + 3*X2_mu + 7*X3_mu; 0.2)
rep_str = "P({node} | {parents}) = N({mu} + {b_0}; {sigma})".format(
node=str(self.variable),
parents=", ".join([str(var) for var in self.evidence]),
mu=" + ".join(
[
f"{coeff}*{parent}"
for coeff, parent in zip(self.mean, self.evidence)
]
),
b_0=str(self.mean[0]),
sigma=str(self.variance),
)
else:
# P(X) = N(1, 4)
rep_str = "P({X}) = N({beta_0}; {variance})".format(
X=str(self.variable),
beta_0=str(self.mean[0]),
variance=str(self.variance),
)
return rep_str
|
from __future__ import print_function
import argparse
import time
import sys
import fileinput
def tail_f(f, wait_sec):
while True:
l = f.readline()
if l:
yield l
else:
# print('!!READ NOTHING!!')
time.sleep(wait_sec)
_first_file = True
def write_header(fname):
global _first_file
header_fmt = '{}==> {} <==\n'
print(header_fmt.format('' if _first_file else '\n', fname), end='')
_first_file = False
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument(
"-c",
"--bytes",
default="",
type=str,
metavar='K',
help="""output the last K bytes; or -c +K starting with the Kth"""
)
p.add_argument("-f", "--follow", action="store_true", help="""follow specified files""")
p.add_argument(
"-n",
"--lines",
default="10",
type=str,
metavar='K',
help="""print the last K lines instead of 10;
or use -n +K to print lines starting with the Kth"""
)
p.add_argument("-q", "--quiet", "--silent", action='store_true', help="never print headers for each file")
p.add_argument("-v", "--verbose", action='store_true', help="always print headers for each file")
p.add_argument(
"-s",
"--sleep-interval",
type=float,
default=1.0,
help="with -f, sleep for approximately N seconds (default 1.0) between iterations."
)
p.add_argument("files", action="store", nargs="*", help="files to print")
ns = p.parse_args(args)
status = 0
if len(ns.files) == 0:
ns.files = ['-']
if ns.follow and '-' in ns.files:
print('tail: warning: following stdin indefinitely is ineffective')
if ns.bytes:
use_bytes = True
if ns.bytes[0] == '+':
from_start = True
else:
from_start = False
count = abs(int(ns.bytes)) # '-n -3' is equivalent to '-n 3'
else:
use_bytes = False
if ns.lines[0] == '+':
from_start = True
else:
from_start = False
count = abs(int(ns.lines)) # '-n -3' is equivalent to '-n 3'
try:
for i, fname in enumerate(ns.files):
if ns.verbose or (len(ns.files) > 1 and not ns.quiet):
write_header(fname if fname != '-' else 'standard input')
try:
if fname == '-':
f = sys.stdin
else:
f = open(fname)
buf = []
j = -1
while True:
j += 1
if use_bytes:
l = f.read(1)
else:
l = f.readline()
if not l:
break
buf.append(l)
if from_start:
if j >= count - 1: break
elif len(buf) > count:
del buf[0]
for item in buf:
print(item, end='')
if i == len(ns.files) - 1 and ns.follow:
for l in tail_f(f, ns.sleep_interval):
print(l, end='')
sys.stdout.flush()
finally:
if fname != '-':
f.close()
except Exception as e:
print('tail :%s' % str(e))
status = 1
finally:
fileinput.close()
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
import errno
from contextlib import contextmanager
from plumbum.path.base import Path, FSUser
from plumbum.lib import _setdoc, six
from plumbum.commands import shquote, ProcessExecutionError
import os
import sys
try: # Py3
import urllib.request as urllib
except ImportError:
import urllib # type: ignore
class StatRes(object):
"""POSIX-like stat result"""
def __init__(self, tup):
self._tup = tuple(tup)
def __getitem__(self, index):
return self._tup[index]
st_mode = mode = property(lambda self: self[0])
st_ino = ino = property(lambda self: self[1])
st_dev = dev = property(lambda self: self[2])
st_nlink = nlink = property(lambda self: self[3])
st_uid = uid = property(lambda self: self[4])
st_gid = gid = property(lambda self: self[5])
st_size = size = property(lambda self: self[6])
st_atime = atime = property(lambda self: self[7])
st_mtime = mtime = property(lambda self: self[8])
st_ctime = ctime = property(lambda self: self[9])
class RemotePath(Path):
"""The class implementing remote-machine paths"""
def __new__(cls, remote, *parts):
if not parts:
raise TypeError("At least one path part is required (none given)")
windows = (remote.uname.lower() == "windows")
normed = []
parts = tuple(map(str, parts)) # force the paths into string, so subscription works properly
# Simple skip if path is absolute
if parts[0] and parts[0][0] not in ("/", "\\"):
cwd = (remote._cwd if hasattr(remote, '_cwd') else
remote._session.run("pwd")[1].strip())
parts = (cwd, ) + parts
for p in parts:
if windows:
plist = str(p).replace("\\", "/").split("/")
else:
plist = str(p).split("/")
if not plist[0]:
plist.pop(0)
del normed[:]
for item in plist:
if item == "" or item == ".":
continue
if item == "..":
if normed:
normed.pop(-1)
else:
normed.append(item)
if windows:
self = super(RemotePath, cls).__new__(cls, "\\".join(normed))
self.CASE_SENSITIVE = False # On this object only
else:
self = super(RemotePath, cls).__new__(cls, "/" + "/".join(normed))
self.CASE_SENSITIVE = True
self.remote = remote
return self
def _form(self, *parts):
return RemotePath(self.remote, *parts)
@property
def _path(self):
return str(self)
@property # type: ignore
@_setdoc(Path)
def name(self):
if not "/" in str(self):
return str(self)
return str(self).rsplit("/", 1)[1]
@property # type: ignore
@_setdoc(Path)
def dirname(self):
if not "/" in str(self):
return str(self)
return self.__class__(self.remote, str(self).rsplit("/", 1)[0])
@property # type: ignore
@_setdoc(Path)
def suffix(self):
return '.' + self.name.rsplit('.', 1)[1]
@property # type: ignore
@_setdoc(Path)
def suffixes(self):
name = self.name
exts = []
while '.' in name:
name, ext = name.rsplit('.', 1)
exts.append('.' + ext)
return list(reversed(exts))
@property # type: ignore
@_setdoc(Path)
def uid(self):
uid, name = self.remote._path_getuid(self)
return FSUser(int(uid), name)
@property # type: ignore
@_setdoc(Path)
def gid(self):
gid, name = self.remote._path_getgid(self)
return FSUser(int(gid), name)
def _get_info(self):
return (self.remote, self._path)
@_setdoc(Path)
def join(self, *parts):
return RemotePath(self.remote, self, *parts)
@_setdoc(Path)
def list(self):
if not self.is_dir():
return []
return [self.join(fn) for fn in self.remote._path_listdir(self)]
@_setdoc(Path)
def iterdir(self):
if not self.is_dir():
return ()
return (self.join(fn) for fn in self.remote._path_listdir(self))
@_setdoc(Path)
def is_dir(self):
res = self.remote._path_stat(self)
if not res:
return False
return res.text_mode == "directory"
@_setdoc(Path)
def is_file(self):
res = self.remote._path_stat(self)
if not res:
return False
return res.text_mode in ("regular file", "regular empty file")
@_setdoc(Path)
def is_symlink(self):
res = self.remote._path_stat(self)
if not res:
return False
return res.text_mode == "symbolic link"
@_setdoc(Path)
def exists(self):
return self.remote._path_stat(self) is not None
@_setdoc(Path)
def stat(self):
res = self.remote._path_stat(self)
if res is None:
raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), '')
return res
@_setdoc(Path)
def with_name(self, name):
return self.__class__(self.remote, self.dirname) / name
@_setdoc(Path)
def with_suffix(self, suffix, depth=1):
if (suffix and not suffix.startswith('.') or suffix == '.'):
raise ValueError("Invalid suffix %r" % (suffix))
name = self.name
depth = len(self.suffixes) if depth is None else min(
depth, len(self.suffixes))
for i in range(depth):
name, ext = name.rsplit('.', 1)
return self.__class__(self.remote, self.dirname) / (name + suffix)
@_setdoc(Path)
def glob(self, pattern):
fn = lambda pat: [RemotePath(self.remote, m) for m in self.remote._path_glob(self, pat)]
return self._glob(pattern, fn)
@_setdoc(Path)
def delete(self):
if not self.exists():
return
self.remote._path_delete(self)
unlink = delete
@_setdoc(Path)
def move(self, dst):
if isinstance(dst, RemotePath):
if dst.remote is not self.remote:
raise TypeError("dst points to a different remote machine")
elif not isinstance(dst, six.string_types):
raise TypeError(
"dst must be a string or a RemotePath (to the same remote machine), "
"got %r" % (dst, ))
self.remote._path_move(self, dst)
@_setdoc(Path)
def copy(self, dst, override=False):
if isinstance(dst, RemotePath):
if dst.remote is not self.remote:
raise TypeError("dst points to a different remote machine")
elif not isinstance(dst, six.string_types):
raise TypeError(
"dst must be a string or a RemotePath (to the same remote machine), "
"got %r" % (dst, ))
if override:
if isinstance(dst, six.string_types):
dst = RemotePath(self.remote, dst)
dst.delete()
else:
if isinstance(dst, six.string_types):
dst = RemotePath(self.remote, dst)
if dst.exists():
raise TypeError("Override not specified and dst exists")
self.remote._path_copy(self, dst)
@_setdoc(Path)
def mkdir(self, mode=None, parents=True, exist_ok=True):
if parents and exist_ok:
self.remote._path_mkdir(self, mode=mode, minus_p=True)
else:
if parents and len(self.parts) > 1:
self.remote._path_mkdir(self.parent, mode=mode, minus_p=True)
try:
self.remote._path_mkdir(self, mode=mode, minus_p=False)
except ProcessExecutionError:
_, ex, _ = sys.exc_info()
if "File exists" in ex.stderr:
if not exist_ok:
raise OSError(
errno.EEXIST, "File exists (on remote end)",
str(self))
else:
raise
@_setdoc(Path)
def read(self, encoding=None):
data = self.remote._path_read(self)
if encoding:
data = data.decode(encoding)
return data
@_setdoc(Path)
def write(self, data, encoding=None):
if encoding:
data = data.encode(encoding)
self.remote._path_write(self, data)
@_setdoc(Path)
def touch(self):
self.remote._path_touch(str(self))
@_setdoc(Path)
def chown(self, owner=None, group=None, recursive=None):
self.remote._path_chown(
self, owner, group,
self.is_dir() if recursive is None else recursive)
@_setdoc(Path)
def chmod(self, mode):
self.remote._path_chmod(mode, self)
@_setdoc(Path)
def access(self, mode=0):
mode = self._access_mode_to_flags(mode)
res = self.remote._path_stat(self)
if res is None:
return False
mask = res.st_mode & 0x1ff
return ((mask >> 6) & mode) or ((mask >> 3) & mode)
@_setdoc(Path)
def link(self, dst):
if isinstance(dst, RemotePath):
if dst.remote is not self.remote:
raise TypeError("dst points to a different remote machine")
elif not isinstance(dst, six.string_types):
raise TypeError(
"dst must be a string or a RemotePath (to the same remote machine), "
"got %r" % (dst, ))
self.remote._path_link(self, dst, False)
@_setdoc(Path)
def symlink(self, dst):
if isinstance(dst, RemotePath):
if dst.remote is not self.remote:
raise TypeError("dst points to a different remote machine")
elif not isinstance(dst, six.string_types):
raise TypeError(
"dst must be a string or a RemotePath (to the same remote machine), "
"got %r" % (dst, ))
self.remote._path_link(self, dst, True)
def open(self, mode="r", bufsize=-1):
"""
Opens this path as a file.
Only works for ParamikoMachine-associated paths for now.
"""
if hasattr(self.remote, "sftp") and hasattr(self.remote.sftp, "open"):
return self.remote.sftp.open(self, mode, bufsize)
else:
raise NotImplementedError(
"RemotePath.open only works for ParamikoMachine-associated "
"paths for now")
@_setdoc(Path)
def as_uri(self, scheme='ssh'):
return '{0}://{1}{2}'.format(scheme, self.remote._fqhost,
urllib.pathname2url(str(self)))
@property # type: ignore
@_setdoc(Path)
def stem(self):
return self.name.rsplit('.')[0]
@property # type: ignore
@_setdoc(Path)
def root(self):
return '/'
@property # type: ignore
@_setdoc(Path)
def drive(self):
return ''
class RemoteWorkdir(RemotePath):
"""Remote working directory manipulator"""
def __new__(cls, remote):
self = super(RemoteWorkdir, cls).__new__(
cls, remote,
remote._session.run("pwd")[1].strip())
return self
def __hash__(self):
raise TypeError("unhashable type")
def chdir(self, newdir):
"""Changes the current working directory to the given one"""
self.remote._session.run("cd %s" % (shquote(newdir), ))
if hasattr(self.remote, '_cwd'):
del self.remote._cwd
return self.__class__(self.remote)
def getpath(self):
"""Returns the current working directory as a
`remote path <plumbum.path.remote.RemotePath>` object"""
return RemotePath(self.remote, self)
@contextmanager
def __call__(self, newdir):
"""A context manager used to ``chdir`` into a directory and then ``chdir`` back to
the previous location; much like ``pushd``/``popd``.
:param newdir: The destination director (a string or a
:class:`RemotePath <plumbum.path.remote.RemotePath>`)
"""
prev = self._path
changed_dir = self.chdir(newdir)
try:
yield changed_dir
finally:
self.chdir(prev)
|
import mock
import pytest
from mock import sentinel
from paasta_tools.monitoring.check_capacity import calc_percent_usage
from paasta_tools.monitoring.check_capacity import get_check_from_overrides
from paasta_tools.monitoring.check_capacity import run_capacity_check
overrides = [
{
"groupings": {"foo": "bar"},
"crit": {"cpus": 90, "mem": 95, "disk": 99},
"warn": {"cpus": 80, "mem": 85, "disk": 89},
}
]
check_types = ["cpus", "mem", "disk"]
def test_calc_percent_usage():
item = {
"cpus": {"free": 10, "total": 20, "used": 10},
"mem": {"free": 100, "total": 200, "used": 100},
"disk": {"free": 1000, "total": 2000, "used": 1000},
}
for v in check_types:
assert calc_percent_usage(item, v) == 50
item = {"cpus": {"free": 0, "total": 0, "used": 0}}
assert calc_percent_usage(item, "cpus") == 0
def test_get_check_from_overrides_default():
default_check = sentinel.default
groupings = {"foo": "baz"}
assert (
get_check_from_overrides(overrides, default_check, groupings) == default_check
)
def test_get_check_from_overrides_override():
default_check = sentinel.default
groupings = {"foo": "bar"}
assert get_check_from_overrides(overrides, default_check, groupings) == overrides[0]
def test_get_check_from_overrides_error():
default_check = sentinel.default_check
bad_overrides = overrides + [{"groupings": {"foo": "bar"}}]
groupings = {"foo": "bar"}
with pytest.raises(SystemExit) as error:
get_check_from_overrides(bad_overrides, default_check, groupings)
assert error.value.code == 3
def test_capacity_check_ok(capfd):
mock_api_client = mock.MagicMock()
mock_api_client.resources.resources.result.return_value.value = [
{
"groupings": {"foo", "baz"},
"cpus": {"total": 2, "free": 1, "used": 1},
"mem": {"total": 2, "free": 1, "used": 1},
"disk": {"total": 2, "free": 1, "used": 1},
}
]
for t in check_types:
options = mock.MagicMock()
options.type = t
options.overrides = None
options.cluster = "fake_cluster"
options.attributes = "foo"
options.warn = 80
options.crit = 90
with mock.patch(
"paasta_tools.monitoring.check_capacity.parse_capacity_check_options",
autospec=True,
return_value=options,
), mock.patch(
"paasta_tools.monitoring.check_capacity.load_system_paasta_config",
autospec=True,
), mock.patch(
"paasta_tools.monitoring.check_capacity.get_paasta_oapi_client",
autospec=True,
return_value=mock_api_client,
):
with pytest.raises(SystemExit) as error:
run_capacity_check()
out, err = capfd.readouterr()
assert error.value.code == 0
assert "OK" in out
assert "fake_cluster" in out
assert t in out
def test_capacity_check_warn(capfd):
mock_api_client = mock.MagicMock()
mock_api_client.resources.resources.return_value.value = [
{
"groupings": {"foo": "baz"},
"cpus": {"total": 2, "free": 1, "used": 1},
"mem": {"total": 2, "free": 1, "used": 1},
"disk": {"total": 2, "free": 1, "used": 1},
}
]
for t in check_types:
options = mock.MagicMock()
options.type = t
options.overrides = None
options.cluster = "fake_cluster"
options.attributes = "foo"
options.warn = 45
options.crit = 80
with mock.patch(
"paasta_tools.monitoring.check_capacity.parse_capacity_check_options",
autospec=True,
return_value=options,
), mock.patch(
"paasta_tools.monitoring.check_capacity.load_system_paasta_config",
autospec=True,
), mock.patch(
"paasta_tools.monitoring.check_capacity.get_paasta_oapi_client",
autospec=True,
return_value=mock_api_client,
):
with pytest.raises(SystemExit) as error:
run_capacity_check()
out, err = capfd.readouterr()
assert error.value.code == 1, out
assert "WARNING" in out
assert "fake_cluster" in out
assert t in out
def test_capacity_check_crit(capfd):
mock_api_client = mock.MagicMock()
mock_api_client.resources.resources.return_value.value = [
{
"groupings": {"foo": "baz"},
"cpus": {"total": 2, "free": 1, "used": 1},
"mem": {"total": 2, "free": 1, "used": 1},
"disk": {"total": 2, "free": 1, "used": 1},
}
]
for t in check_types:
options = mock.MagicMock()
options.type = t
options.overrides = None
options.cluster = "fake_cluster"
options.attributes = "foo"
options.warn = 45
options.crit = 49
with mock.patch(
"paasta_tools.monitoring.check_capacity.parse_capacity_check_options",
autospec=True,
return_value=options,
), mock.patch(
"paasta_tools.monitoring.check_capacity.load_system_paasta_config",
autospec=True,
), mock.patch(
"paasta_tools.monitoring.check_capacity.get_paasta_oapi_client",
autospec=True,
return_value=mock_api_client,
):
with pytest.raises(SystemExit) as error:
run_capacity_check()
out, err = capfd.readouterr()
assert error.value.code == 2, out
assert "CRITICAL" in out
assert "fake_cluster" in out
assert t in out
def test_capacity_check_overrides(capfd):
mock_api_client = mock.MagicMock()
mock_api_client.resources.resources.return_value.value = [
{
"groupings": {"foo": "bar"},
"cpus": {"total": 2, "free": 1, "used": 1},
"mem": {"total": 2, "free": 1, "used": 1},
"disk": {"total": 2, "free": 1, "used": 1},
},
{
"groupings": {"foo": "baz"},
"cpus": {"total": 2, "free": 1, "used": 1},
"mem": {"total": 2, "free": 1, "used": 1},
"disk": {"total": 2, "free": 1, "used": 1},
},
]
mock_overrides = [
{
"groupings": {"foo": "bar"},
"warn": {"cpus": 99, "mem": 99, "disk": 99},
"crit": {"cpus": 10, "mem": 10, "disk": 10},
}
]
for t in check_types:
options = mock.MagicMock()
options.type = t
options.overrides = "/fake/file.json"
options.cluster = "fake_cluster"
options.attributes = "foo"
options.warn = 99
options.crit = 99
with mock.patch(
"paasta_tools.monitoring.check_capacity.parse_capacity_check_options",
autospec=True,
return_value=options,
), mock.patch(
"paasta_tools.monitoring.check_capacity.load_system_paasta_config",
autospec=True,
), mock.patch(
"paasta_tools.monitoring.check_capacity.get_paasta_oapi_client",
autospec=True,
return_value=mock_api_client,
), mock.patch(
"paasta_tools.monitoring.check_capacity.read_overrides",
autospec=True,
return_value=mock_overrides,
):
with pytest.raises(SystemExit) as error:
run_capacity_check()
out, err = capfd.readouterr()
assert error.value.code == 2, out
assert "CRITICAL" in out
assert "fake_cluster" in out
assert t in out
assert "baz" not in out
|
import pandas as pd
from scattertext.termranking.TermRanker import TermRanker
class OncePerDocFrequencyRanker(TermRanker):
def get_ranks(self, label_append=' freq'):
mat = self._term_doc_matrix.get_term_count_mat()
return self.get_ranks_from_mat(mat, label_append)
def get_ranks_from_mat(self, mat, label_append=' freq'):
return pd.DataFrame(mat,
index=pd.Series(self._term_doc_matrix.get_terms(), name='term'),
columns=[str(c) + label_append for c
in self._term_doc_matrix.get_categories()])
|
from __future__ import print_function
import sys
import argparse
import threading
def main(args):
ap = argparse.ArgumentParser()
ap.parse_args(args)
current_worker = threading.currentThread()
_stash = globals()['_stash']
""":type : StaSh"""
for worker in _stash.get_workers():
if worker.job_id != current_worker.job_id:
print(worker)
if __name__ == '__main__':
main(sys.argv[1:])
|
import unittest
import mock
from docker_registry.lib import index
class TestIndex(unittest.TestCase):
def setUp(self):
self.index = index.Index()
def test_cover_passed_methods(self):
self.index._handle_repository_created(None, None, None, None)
self.index._handle_repository_updated(None, None, None, None)
self.index._handle_repository_deleted(None, None, None)
def test_results(self):
self.assertRaises(NotImplementedError, self.index.results, None)
class TestLoad(unittest.TestCase):
@mock.patch('docker_registry.lib.config.load')
def test_search_backend(self, load):
load.return_value = mock.MagicMock(search_backend='x')
self.assertRaises(NotImplementedError, index.load)
|
from time import time
from .common import setup_platform
from tests.async_mock import patch
async def test_binary_sensor(hass, requests_mock):
"""Test the Ring binary sensors."""
with patch(
"ring_doorbell.Ring.active_alerts",
return_value=[
{
"kind": "motion",
"doorbot_id": 987654,
"state": "ringing",
"now": time(),
"expires_in": 180,
}
],
):
await setup_platform(hass, "binary_sensor")
motion_state = hass.states.get("binary_sensor.front_door_motion")
assert motion_state is not None
assert motion_state.state == "on"
assert motion_state.attributes["device_class"] == "motion"
ding_state = hass.states.get("binary_sensor.front_door_ding")
assert ding_state is not None
assert ding_state.state == "off"
|
import asyncio
import functools
import logging
from typing import List, Optional
from zigpy.zcl.foundation import Status
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DEVICE_CLASS_DAMPER,
DEVICE_CLASS_SHADE,
DOMAIN,
CoverEntity,
)
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core import discovery
from .core.const import (
CHANNEL_COVER,
CHANNEL_LEVEL,
CHANNEL_ON_OFF,
CHANNEL_SHADE,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
SIGNAL_SET_LEVEL,
)
from .core.registries import ZHA_ENTITIES
from .core.typing import ChannelType, ZhaDeviceType
from .entity import ZhaEntity
_LOGGER = logging.getLogger(__name__)
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation cover from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
@STRICT_MATCH(channel_names=CHANNEL_COVER)
class ZhaCover(ZhaEntity, CoverEntity):
"""Representation of a ZHA cover."""
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Init this sensor."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._cover_channel = self.cluster_channels.get(CHANNEL_COVER)
self._current_position = None
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._cover_channel, SIGNAL_ATTR_UPDATED, self.async_set_position
)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = last_state.state
if "current_position" in last_state.attributes:
self._current_position = last_state.attributes["current_position"]
@property
def is_closed(self):
"""Return if the cover is closed."""
if self.current_cover_position is None:
return None
return self.current_cover_position == 0
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._state == STATE_OPENING
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._state == STATE_CLOSING
@property
def current_cover_position(self):
"""Return the current position of ZHA cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._current_position
@callback
def async_set_position(self, attr_id, attr_name, value):
"""Handle position update from channel."""
_LOGGER.debug("setting position: %s", value)
self._current_position = 100 - value
if self._current_position == 0:
self._state = STATE_CLOSED
elif self._current_position == 100:
self._state = STATE_OPEN
self.async_write_ha_state()
@callback
def async_update_state(self, state):
"""Handle state update from channel."""
_LOGGER.debug("state=%s", state)
self._state = state
self.async_write_ha_state()
async def async_open_cover(self, **kwargs):
"""Open the window cover."""
res = await self._cover_channel.up_open()
if isinstance(res, list) and res[1] is Status.SUCCESS:
self.async_update_state(STATE_OPENING)
async def async_close_cover(self, **kwargs):
"""Close the window cover."""
res = await self._cover_channel.down_close()
if isinstance(res, list) and res[1] is Status.SUCCESS:
self.async_update_state(STATE_CLOSING)
async def async_set_cover_position(self, **kwargs):
"""Move the roller shutter to a specific position."""
new_pos = kwargs[ATTR_POSITION]
res = await self._cover_channel.go_to_lift_percentage(100 - new_pos)
if isinstance(res, list) and res[1] is Status.SUCCESS:
self.async_update_state(
STATE_CLOSING if new_pos < self._current_position else STATE_OPENING
)
async def async_stop_cover(self, **kwargs):
"""Stop the window cover."""
res = await self._cover_channel.stop()
if isinstance(res, list) and res[1] is Status.SUCCESS:
self._state = STATE_OPEN if self._current_position > 0 else STATE_CLOSED
self.async_write_ha_state()
async def async_update(self):
"""Attempt to retrieve the open/close state of the cover."""
await super().async_update()
await self.async_get_state()
async def async_get_state(self, from_cache=True):
"""Fetch the current state."""
_LOGGER.debug("polling current state")
if self._cover_channel:
pos = await self._cover_channel.get_attribute_value(
"current_position_lift_percentage", from_cache=from_cache
)
_LOGGER.debug("read pos=%s", pos)
if pos is not None:
self._current_position = 100 - pos
self._state = (
STATE_OPEN if self.current_cover_position > 0 else STATE_CLOSED
)
else:
self._current_position = None
self._state = None
@STRICT_MATCH(channel_names={CHANNEL_LEVEL, CHANNEL_ON_OFF, CHANNEL_SHADE})
class Shade(ZhaEntity, CoverEntity):
"""ZHA Shade."""
def __init__(
self,
unique_id: str,
zha_device: ZhaDeviceType,
channels: List[ChannelType],
**kwargs,
):
"""Initialize the ZHA light."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._on_off_channel = self.cluster_channels[CHANNEL_ON_OFF]
self._level_channel = self.cluster_channels[CHANNEL_LEVEL]
self._position = None
self._is_open = None
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._position
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_SHADE
@property
def is_closed(self) -> Optional[bool]:
"""Return True if shade is closed."""
if self._is_open is None:
return None
return not self._is_open
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_open_closed
)
self.async_accept_signal(
self._level_channel, SIGNAL_SET_LEVEL, self.async_set_level
)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._is_open = last_state.state == STATE_OPEN
if ATTR_CURRENT_POSITION in last_state.attributes:
self._position = last_state.attributes[ATTR_CURRENT_POSITION]
@callback
def async_set_open_closed(self, attr_id: int, attr_name: str, value: bool) -> None:
"""Set open/closed state."""
self._is_open = bool(value)
self.async_write_ha_state()
@callback
def async_set_level(self, value: int) -> None:
"""Set the reported position."""
value = max(0, min(255, value))
self._position = int(value * 100 / 255)
self.async_write_ha_state()
async def async_open_cover(self, **kwargs):
"""Open the window cover."""
res = await self._on_off_channel.on()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't open cover: %s", res)
return
self._is_open = True
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Close the window cover."""
res = await self._on_off_channel.off()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't open cover: %s", res)
return
self._is_open = False
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the roller shutter to a specific position."""
new_pos = kwargs[ATTR_POSITION]
res = await self._level_channel.move_to_level_with_on_off(
new_pos * 255 / 100, 1
)
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't set cover's position: %s", res)
return
self._position = new_pos
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the cover."""
res = await self._level_channel.stop()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't stop cover: %s", res)
return
@STRICT_MATCH(
channel_names={CHANNEL_LEVEL, CHANNEL_ON_OFF}, manufacturers="Keen Home Inc"
)
class KeenVent(Shade):
"""Keen vent cover."""
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_DAMPER
async def async_open_cover(self, **kwargs):
"""Open the cover."""
position = self._position or 100
tasks = [
self._level_channel.move_to_level_with_on_off(position * 255 / 100, 1),
self._on_off_channel.on(),
]
results = await asyncio.gather(*tasks, return_exceptions=True)
if any([isinstance(result, Exception) for result in results]):
self.debug("couldn't open cover")
return
self._is_open = True
self._position = position
self.async_write_ha_state()
|
from datetime import datetime
import logging
import time
from homeassistant.const import DEVICE_CLASS_TIMESTAMP
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from . import REPETIER_API, SENSOR_TYPES, UPDATE_SIGNAL
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available Repetier Server sensors."""
if discovery_info is None:
return
sensor_map = {
"bed_temperature": RepetierTempSensor,
"extruder_temperature": RepetierTempSensor,
"chamber_temperature": RepetierTempSensor,
"current_state": RepetierSensor,
"current_job": RepetierJobSensor,
"job_end": RepetierJobEndSensor,
"job_start": RepetierJobStartSensor,
}
entities = []
for info in discovery_info:
printer_name = info["printer_name"]
api = hass.data[REPETIER_API][printer_name]
printer_id = info["printer_id"]
sensor_type = info["sensor_type"]
temp_id = info["temp_id"]
name = f"{info['name']}{SENSOR_TYPES[sensor_type][3]}"
if temp_id is not None:
_LOGGER.debug("%s Temp_id: %s", sensor_type, temp_id)
name = f"{name}{temp_id}"
sensor_class = sensor_map[sensor_type]
entity = sensor_class(api, temp_id, name, printer_id, sensor_type)
entities.append(entity)
add_entities(entities, True)
class RepetierSensor(Entity):
"""Class to create and populate a Repetier Sensor."""
def __init__(self, api, temp_id, name, printer_id, sensor_type):
"""Init new sensor."""
self._api = api
self._attributes = {}
self._available = False
self._temp_id = temp_id
self._name = name
self._printer_id = printer_id
self._sensor_type = sensor_type
self._state = None
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def device_state_attributes(self):
"""Return sensor attributes."""
return self._attributes
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return SENSOR_TYPES[self._sensor_type][1]
@property
def icon(self):
"""Icon to use in the frontend."""
return SENSOR_TYPES[self._sensor_type][2]
@property
def should_poll(self):
"""Return False as entity is updated from the component."""
return False
@property
def state(self):
"""Return sensor state."""
return self._state
@callback
def update_callback(self):
"""Get new data and update state."""
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Connect update callbacks."""
self.async_on_remove(
async_dispatcher_connect(self.hass, UPDATE_SIGNAL, self.update_callback)
)
def _get_data(self):
"""Return new data from the api cache."""
data = self._api.get_data(self._printer_id, self._sensor_type, self._temp_id)
if data is None:
_LOGGER.debug(
"Data not found for %s and %s", self._sensor_type, self._temp_id
)
self._available = False
return None
self._available = True
return data
def update(self):
"""Update the sensor."""
data = self._get_data()
if data is None:
return
state = data.pop("state")
_LOGGER.debug("Printer %s State %s", self._name, state)
self._attributes.update(data)
self._state = state
class RepetierTempSensor(RepetierSensor):
"""Represent a Repetier temp sensor."""
@property
def state(self):
"""Return sensor state."""
if self._state is None:
return None
return round(self._state, 2)
def update(self):
"""Update the sensor."""
data = self._get_data()
if data is None:
return
state = data.pop("state")
temp_set = data["temp_set"]
_LOGGER.debug("Printer %s Setpoint: %s, Temp: %s", self._name, temp_set, state)
self._attributes.update(data)
self._state = state
class RepetierJobSensor(RepetierSensor):
"""Represent a Repetier job sensor."""
@property
def state(self):
"""Return sensor state."""
if self._state is None:
return None
return round(self._state, 2)
class RepetierJobEndSensor(RepetierSensor):
"""Class to create and populate a Repetier Job End timestamp Sensor."""
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
def update(self):
"""Update the sensor."""
data = self._get_data()
if data is None:
return
job_name = data["job_name"]
start = data["start"]
print_time = data["print_time"]
from_start = data["from_start"]
time_end = start + round(print_time, 0)
self._state = datetime.utcfromtimestamp(time_end).isoformat()
remaining = print_time - from_start
remaining_secs = int(round(remaining, 0))
_LOGGER.debug(
"Job %s remaining %s",
job_name,
time.strftime("%H:%M:%S", time.gmtime(remaining_secs)),
)
class RepetierJobStartSensor(RepetierSensor):
"""Class to create and populate a Repetier Job Start timestamp Sensor."""
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
def update(self):
"""Update the sensor."""
data = self._get_data()
if data is None:
return
job_name = data["job_name"]
start = data["start"]
from_start = data["from_start"]
self._state = datetime.utcfromtimestamp(start).isoformat()
elapsed_secs = int(round(from_start, 0))
_LOGGER.debug(
"Job %s elapsed %s",
job_name,
time.strftime("%H:%M:%S", time.gmtime(elapsed_secs)),
)
|
from datetime import timedelta
import functools as ft
import logging
from typing import Any, Iterable, cast
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.loader import bind_hass
# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_ACTIVITY = "activity"
ATTR_COMMAND = "command"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE = "device"
ATTR_NUM_REPEATS = "num_repeats"
ATTR_DELAY_SECS = "delay_secs"
ATTR_HOLD_SECS = "hold_secs"
ATTR_ALTERNATIVE = "alternative"
ATTR_TIMEOUT = "timeout"
DOMAIN = "remote"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
SERVICE_SEND_COMMAND = "send_command"
SERVICE_LEARN_COMMAND = "learn_command"
SERVICE_DELETE_COMMAND = "delete_command"
SERVICE_SYNC = "sync"
DEFAULT_NUM_REPEATS = 1
DEFAULT_DELAY_SECS = 0.4
DEFAULT_HOLD_SECS = 0
SUPPORT_LEARN_COMMAND = 1
SUPPORT_DELETE_COMMAND = 2
REMOTE_SERVICE_ACTIVITY_SCHEMA = make_entity_service_schema(
{vol.Optional(ATTR_ACTIVITY): cv.string}
)
@bind_hass
def is_on(hass: HomeAssistantType, entity_id: str) -> bool:
"""Return if the remote is on based on the statemachine."""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Track states and offer events for remotes."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(
SERVICE_TURN_OFF, REMOTE_SERVICE_ACTIVITY_SCHEMA, "async_turn_off"
)
component.async_register_entity_service(
SERVICE_TURN_ON, REMOTE_SERVICE_ACTIVITY_SCHEMA, "async_turn_on"
)
component.async_register_entity_service(
SERVICE_TOGGLE, REMOTE_SERVICE_ACTIVITY_SCHEMA, "async_toggle"
)
component.async_register_entity_service(
SERVICE_SEND_COMMAND,
{
vol.Required(ATTR_COMMAND): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_DEVICE): cv.string,
vol.Optional(
ATTR_NUM_REPEATS, default=DEFAULT_NUM_REPEATS
): cv.positive_int,
vol.Optional(ATTR_DELAY_SECS): vol.Coerce(float),
vol.Optional(ATTR_HOLD_SECS, default=DEFAULT_HOLD_SECS): vol.Coerce(float),
},
"async_send_command",
)
component.async_register_entity_service(
SERVICE_LEARN_COMMAND,
{
vol.Optional(ATTR_DEVICE): cv.string,
vol.Optional(ATTR_COMMAND): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_COMMAND_TYPE): cv.string,
vol.Optional(ATTR_ALTERNATIVE): cv.boolean,
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
},
"async_learn_command",
)
component.async_register_entity_service(
SERVICE_DELETE_COMMAND,
{
vol.Required(ATTR_COMMAND): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_DEVICE): cv.string,
},
"async_delete_command",
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await cast(EntityComponent, hass.data[DOMAIN]).async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await cast(EntityComponent, hass.data[DOMAIN]).async_unload_entry(entry)
class RemoteEntity(ToggleEntity):
"""Representation of a remote."""
@property
def supported_features(self) -> int:
"""Flag supported features."""
return 0
def send_command(self, command: Iterable[str], **kwargs: Any) -> None:
"""Send commands to a device."""
raise NotImplementedError()
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
"""Send commands to a device."""
assert self.hass is not None
await self.hass.async_add_executor_job(
ft.partial(self.send_command, command, **kwargs)
)
def learn_command(self, **kwargs: Any) -> None:
"""Learn a command from a device."""
raise NotImplementedError()
async def async_learn_command(self, **kwargs: Any) -> None:
"""Learn a command from a device."""
assert self.hass is not None
await self.hass.async_add_executor_job(ft.partial(self.learn_command, **kwargs))
def delete_command(self, **kwargs: Any) -> None:
"""Delete commands from the database."""
raise NotImplementedError()
async def async_delete_command(self, **kwargs: Any) -> None:
"""Delete commands from the database."""
assert self.hass is not None
await self.hass.async_add_executor_job(
ft.partial(self.delete_command, **kwargs)
)
class RemoteDevice(RemoteEntity):
"""Representation of a remote (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"RemoteDevice is deprecated, modify %s to extend RemoteEntity",
cls.__name__,
)
|
import sys
from pytest import raises
from pygal._compat import _ellipsis, u
from pygal.util import (
_swap_curly, majorize, mergextend, minify_css, round_to_float,
round_to_int, template, truncate
)
def test_round_to_int():
"""Test round to int function"""
assert round_to_int(154231, 1000) == 154000
assert round_to_int(154231, 10) == 154230
assert round_to_int(154231, 100000) == 200000
assert round_to_int(154231, 50000) == 150000
assert round_to_int(154231, 500) == 154000
assert round_to_int(154231, 200) == 154200
assert round_to_int(154361, 200) == 154400
def test_round_to_float():
"""Test round to float function"""
assert round_to_float(12.01934, .01) == 12.02
assert round_to_float(12.01134, .01) == 12.01
assert round_to_float(12.1934, .1) == 12.2
assert round_to_float(12.1134, .1) == 12.1
assert round_to_float(12.1134, .001) == 12.113
assert round_to_float(12.1134, .00001) == 12.1134
assert round_to_float(12.1934, .5) == 12.0
assert round_to_float(12.2934, .5) == 12.5
def test_swap_curly():
"""Test swap curly function"""
for str in ('foo', u('foo foo foo bar'), 'foo béè b¡ð/ijə˘©þß®~¯æ',
u('foo béè b¡ð/ijə˘©þß®~¯æ')):
assert _swap_curly(str) == str
assert _swap_curly('foo{bar}baz') == 'foo{{bar}}baz'
assert _swap_curly('foo{{bar}}baz') == 'foo{bar}baz'
assert _swap_curly('{foo}{{bar}}{baz}') == '{{foo}}{bar}{{baz}}'
assert _swap_curly('{foo}{{{bar}}}{baz}') == '{{foo}}{{{bar}}}{{baz}}'
assert _swap_curly('foo{ bar }baz') == 'foo{{ bar }}baz'
assert _swap_curly('foo{ bar}baz') == 'foo{{ bar}}baz'
assert _swap_curly('foo{bar }baz') == 'foo{{bar }}baz'
assert _swap_curly('foo{{ bar }}baz') == 'foo{bar}baz'
assert _swap_curly('foo{{bar }}baz') == 'foo{bar}baz'
assert _swap_curly('foo{{ bar}}baz') == 'foo{bar}baz'
def test_format():
"""Test format function"""
assert template('foo {{ baz }}', baz='bar') == 'foo bar'
with raises(KeyError):
assert template('foo {{ baz }}') == 'foo baz'
class Object(object):
pass
obj = Object()
obj.a = 1
obj.b = True
obj.c = '3'
assert template('foo {{ o.a }} {{o.b}}-{{o.c}}', o=obj) == 'foo 1 True-3'
def test_truncate():
"""Test truncate function"""
assert truncate('1234567890', 50) == '1234567890'
assert truncate('1234567890', 5) == u('1234…')
assert truncate('1234567890', 1) == u('…')
assert truncate('1234567890', 9) == u('12345678…')
assert truncate('1234567890', 10) == '1234567890'
assert truncate('1234567890', 0) == '1234567890'
assert truncate('1234567890', -1) == '1234567890'
def test_minify_css():
"""Test css minifier function"""
css = '''
/*
* Font-sizes from config, override with care
*/
.title {
font-family: sans;
font-size: 12 ;
}
.legends .legend text {
font-family: monospace;
font-size: 14 ;}
'''
assert minify_css(css) == (
'.title{font-family:sans;font-size:12}'
'.legends .legend text{font-family:monospace;font-size:14}'
)
def test_majorize():
"""Test majorize function"""
assert majorize(()) == []
assert majorize((0, )) == []
assert majorize((0, 1)) == []
assert majorize((0, 1, 2)) == []
assert majorize((-1, 0, 1, 2)) == [0]
assert majorize((0, .1, .2, .3, .4, .5, .6, .7, .8, .9, 1)) == [0, .5, 1]
assert majorize((0, .2, .4, .6, .8, 1)) == [0, 1]
assert majorize((-.4, -.2, 0, .2, .4, .6, .8, 1)) == [0, 1]
assert majorize((-1, -.8, -.6, -.4, -.2, 0, .2, .4, .6, .8,
1)) == [-1, 0, 1]
assert majorize((0, .2, .4, .6, .8, 1, 1.2, 1.4, 1.6)) == [0, 1]
assert majorize((0, .2, .4, .6, .8, 1, 1.2, 1.4, 1.6, 1.8, 2)) == [0, 1, 2]
assert majorize((0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110,
120)) == [0, 50, 100]
assert majorize((
0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36
)) == [0, 10, 20, 30]
assert majorize((0, 1, 2, 3, 4, 5)) == [0, 5]
assert majorize((-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)) == [-5, 0, 5]
assert majorize((-5, 5, -4, 4, 0, 1, -1, 3, -2, 2, -3)) == [-5, 0, 5]
assert majorize((0, 1, 2, 3, 4)) == [0]
assert majorize((3, 4, 5, 6)) == [5]
assert majorize((0, 1, 2, 3, 4, 5, 6, 7, 8)) == [0, 5]
assert majorize((-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)) == [-5, 0, 5]
assert majorize((-6, -5, -4, -3, -2, -1, 0, 1, 2, 3)) == [-5, 0]
assert majorize((-6, -5, -4, -3)) == [-5]
assert majorize((1, 10, 100, 1000, 10000, 100000)) == []
assert majorize(range(30, 70, 5)) == [30, 40, 50, 60]
assert majorize(range(20, 55, 2)) == [20, 30, 40, 50]
assert majorize(range(21, 83, 3)) == [30, 45, 60, 75]
# TODO: handle crazy cases
# assert majorize(range(20, 83, 3)) == [20, 35, 50, 65, 80]
def test_mergextend():
"""Test mergextend function"""
assert mergextend(['a', 'b'], ['c', 'd']) == ['a', 'b']
assert mergextend([], ['c', 'd']) == []
assert mergextend(['a', 'b'], []) == ['a', 'b']
assert mergextend([_ellipsis], ['c', 'd']) == ['c', 'd']
assert mergextend([_ellipsis, 'b'], ['c', 'd']) == ['c', 'd', 'b']
assert mergextend(['a', _ellipsis], ['c', 'd']) == ['a', 'c', 'd']
assert mergextend(['a', _ellipsis, 'b'],
['c', 'd']) == ['a', 'c', 'd', 'b']
if sys.version_info[0] >= 3:
# For @#! sake it's 2016 now
assert eval("mergextend(['a', ..., 'b'], ['c', 'd'])") == [
'a', 'c', 'd', 'b'
]
|
import mock
import pytest
from paasta_tools import check_kubernetes_services_replication
from paasta_tools import check_services_replication_tools
from paasta_tools.utils import compose_job_id
check_kubernetes_services_replication.log = mock.Mock()
check_services_replication_tools.log = mock.Mock()
@pytest.fixture
def instance_config():
service = "fake_service"
instance = "fake_instance"
job_id = compose_job_id(service, instance)
mock_instance_config = mock.Mock(
service=service,
instance=instance,
cluster="fake_cluster",
soa_dir="fake_soa_dir",
job_id=job_id,
config_dict={},
)
mock_instance_config.get_replication_crit_percentage.return_value = 90
mock_instance_config.get_registrations.return_value = [job_id]
return mock_instance_config
def test_check_service_replication_for_normal_smartstack(instance_config):
instance_config.get_instances.return_value = 100
all_pods = []
with mock.patch(
"paasta_tools.check_kubernetes_services_replication.get_proxy_port_for_instance",
autospec=True,
return_value=666,
), mock.patch(
"paasta_tools.monitoring_tools.check_replication_for_instance", autospec=True,
) as mock_check_replication_for_service:
check_kubernetes_services_replication.check_kubernetes_pod_replication(
instance_config=instance_config,
all_tasks_or_pods=all_pods,
replication_checker=None,
)
mock_check_replication_for_service.assert_called_once_with(
instance_config=instance_config,
expected_count=100,
replication_checker=None,
)
def test_check_service_replication_for_smartstack_with_different_namespace(
instance_config,
):
instance_config.get_instances.return_value = 100
all_pods = []
with mock.patch(
"paasta_tools.check_kubernetes_services_replication.get_proxy_port_for_instance",
autospec=True,
return_value=666,
), mock.patch(
"paasta_tools.monitoring_tools.check_replication_for_instance", autospec=True,
) as mock_check_replication_for_service, mock.patch(
"paasta_tools.check_kubernetes_services_replication.check_healthy_kubernetes_tasks_for_service_instance",
autospec=True,
) as mock_check_healthy_kubernetes_tasks:
instance_config.get_registrations.return_value = ["some-random-other-namespace"]
check_kubernetes_services_replication.check_kubernetes_pod_replication(
instance_config=instance_config,
all_tasks_or_pods=all_pods,
replication_checker=None,
)
assert not mock_check_replication_for_service.called
mock_check_healthy_kubernetes_tasks.assert_called_once_with(
instance_config=instance_config, expected_count=100, all_pods=[]
)
def test_check_service_replication_for_non_smartstack(instance_config):
instance_config.get_instances.return_value = 100
with mock.patch(
"paasta_tools.check_kubernetes_services_replication.get_proxy_port_for_instance",
autospec=True,
return_value=None,
), mock.patch(
"paasta_tools.check_kubernetes_services_replication.check_healthy_kubernetes_tasks_for_service_instance",
autospec=True,
) as mock_check_healthy_kubernetes_tasks:
check_kubernetes_services_replication.check_kubernetes_pod_replication(
instance_config=instance_config,
all_tasks_or_pods=[],
replication_checker=None,
)
mock_check_healthy_kubernetes_tasks.assert_called_once_with(
instance_config=instance_config, expected_count=100, all_pods=[]
)
def test_check_healthy_kubernetes_tasks_for_service_instance():
with mock.patch(
"paasta_tools.check_kubernetes_services_replication.filter_pods_by_service_instance",
autospec=True,
) as mock_filter_pods_by_service_instance, mock.patch(
"paasta_tools.check_kubernetes_services_replication.is_pod_ready",
autospec=True,
side_effect=[True, False],
), mock.patch(
"paasta_tools.monitoring_tools.send_replication_event_if_under_replication",
autospec=True,
) as mock_send_replication_event_if_under_replication:
mock_instance_config = mock.Mock()
mock_pods = mock.Mock()
mock_pod_1 = mock.Mock()
mock_pod_2 = mock.Mock()
mock_filter_pods_by_service_instance.return_value = [mock_pod_1, mock_pod_2]
check_kubernetes_services_replication.check_healthy_kubernetes_tasks_for_service_instance(
mock_instance_config, 5, mock_pods
)
mock_filter_pods_by_service_instance.assert_called_with(
pod_list=mock_pods,
service=mock_instance_config.service,
instance=mock_instance_config.instance,
)
mock_send_replication_event_if_under_replication.assert_called_with(
instance_config=mock_instance_config, expected_count=5, num_available=1
)
|
import os
from behave import then
from behave import when
from itest_utils import get_service_connection_string
from itest_utils import update_context_marathon_config
from paasta_tools.utils import _run
from paasta_tools.utils import decompose_job_id
@when('we delete a marathon app called "{job_id}" from "{cluster_name}" soa configs')
def delete_apps(context, job_id, cluster_name):
context.job_id = job_id
(service, instance, _, __) = decompose_job_id(job_id)
context.service = service
context.instance = instance
context.zk_hosts = "%s/mesos-testcluster" % get_service_connection_string(
"zookeeper"
)
update_context_marathon_config(context)
context.app_id = context.marathon_complete_config["id"]
os.remove(f"{context.soa_dir}/{service}/marathon-{cluster_name}.yaml")
os.remove(f"{context.soa_dir}/{service}/deployments.json")
os.rmdir(f"{context.soa_dir}/{service}")
@then(
'we run cleanup_marathon_apps{flags} which exits with return code "{expected_return_code}"'
)
def run_cleanup_marathon_job(context, flags, expected_return_code):
cmd = f"python -m paasta_tools.cleanup_marathon_jobs --soa-dir {context.soa_dir} {flags}"
print("Running cmd %s" % (cmd))
exit_code, output = _run(cmd)
print(output)
assert exit_code == int(expected_return_code)
|
import asyncio
from collections import defaultdict
import datetime as dt
import logging
from types import ModuleType, TracebackType
from typing import Any, Dict, Iterable, List, Optional, Type, Union
from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON
from homeassistant.const import (
STATE_CLOSED,
STATE_HOME,
STATE_LOCKED,
STATE_NOT_HOME,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import Context, State
from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass
import homeassistant.util.dt as dt_util
from .typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
class AsyncTrackStates:
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
Must be run within the event loop.
"""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize a TrackStates block."""
self.hass = hass
self.states: List[State] = []
# pylint: disable=attribute-defined-outside-init
def __enter__(self) -> List[State]:
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.async_all(), self.now))
def get_changed_since(
states: Iterable[State], utc_point_in_time: dt.datetime
) -> List[State]:
"""Return list of states that have been changed since utc_point_in_time."""
return [state for state in states if state.last_updated >= utc_point_in_time]
@bind_hass
async def async_reproduce_state(
hass: HomeAssistantType,
states: Union[State, Iterable[State]],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a list of states on multiple domains."""
if isinstance(states, State):
states = [states]
to_call: Dict[str, List[State]] = defaultdict(list)
for state in states:
to_call[state.domain].append(state)
async def worker(domain: str, states_by_domain: List[State]) -> None:
try:
integration = await async_get_integration(hass, domain)
except IntegrationNotFound:
_LOGGER.warning(
"Trying to reproduce state for unknown integration: %s", domain
)
return
try:
platform: Optional[ModuleType] = integration.get_platform("reproduce_state")
except ImportError:
_LOGGER.warning("Integration %s does not support reproduce state", domain)
return
await platform.async_reproduce_states( # type: ignore
hass, states_by_domain, context=context, reproduce_options=reproduce_options
)
if to_call:
# run all domains in parallel
await asyncio.gather(
*(worker(domain, data) for domain, data in to_call.items())
)
def state_as_number(state: State) -> float:
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
if state.state in (
STATE_ON,
STATE_LOCKED,
STATE_ABOVE_HORIZON,
STATE_OPEN,
STATE_HOME,
):
return 1
if state.state in (
STATE_OFF,
STATE_UNLOCKED,
STATE_UNKNOWN,
STATE_BELOW_HORIZON,
STATE_CLOSED,
STATE_NOT_HOME,
):
return 0
return float(state.state)
|
from datetime import timedelta
from aiohttp import WSMsgType
import pytest
from homeassistant.components.websocket_api import const, http
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed
@pytest.fixture
def mock_low_queue():
"""Mock a low queue."""
with patch("homeassistant.components.websocket_api.http.MAX_PENDING_MSG", 5):
yield
@pytest.fixture
def mock_low_peak():
"""Mock a low queue."""
with patch("homeassistant.components.websocket_api.http.PENDING_MSG_PEAK", 5):
yield
async def test_pending_msg_overflow(hass, mock_low_queue, websocket_client):
"""Test get_panels command."""
for idx in range(10):
await websocket_client.send_json({"id": idx + 1, "type": "ping"})
msg = await websocket_client.receive()
assert msg.type == WSMsgType.close
async def test_pending_msg_peak(hass, mock_low_peak, hass_ws_client, caplog):
"""Test pending msg overflow command."""
orig_handler = http.WebSocketHandler
instance = None
def instantiate_handler(*args):
nonlocal instance
instance = orig_handler(*args)
return instance
with patch(
"homeassistant.components.websocket_api.http.WebSocketHandler",
instantiate_handler,
):
websocket_client = await hass_ws_client()
# Kill writer task and fill queue past peak
for _ in range(5):
instance._to_write.put_nowait(None)
# Trigger the peak check
instance._send_message({})
async_fire_time_changed(
hass, utcnow() + timedelta(seconds=const.PENDING_MSG_PEAK_TIME + 1)
)
msg = await websocket_client.receive()
assert msg.type == WSMsgType.close
assert "Client unable to keep up with pending messages" in caplog.text
async def test_non_json_message(hass, websocket_client, caplog):
"""Test trying to serialze non JSON objects."""
bad_data = object()
hass.states.async_set("test_domain.entity", "testing", {"bad": bad_data})
await websocket_client.send_json({"id": 5, "type": "get_states"})
msg = await websocket_client.receive_json()
assert msg["id"] == 5
assert msg["type"] == const.TYPE_RESULT
assert not msg["success"]
assert (
f"Unable to serialize to JSON. Bad data found at $.result[0](state: test_domain.entity).attributes.bad={bad_data}(<class 'object'>"
in caplog.text
)
|
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_ALLOWED_BANDWIDTH,
ATTR_AUTO_BACKUPS,
ATTR_COST_PER_MONTH,
ATTR_CREATED_AT,
ATTR_DISK,
ATTR_IPV4_ADDRESS,
ATTR_IPV6_ADDRESS,
ATTR_MEMORY,
ATTR_OS,
ATTR_REGION,
ATTR_SUBSCRIPTION_ID,
ATTR_SUBSCRIPTION_NAME,
ATTR_VCPUS,
CONF_SUBSCRIPTION,
DATA_VULTR,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_DEVICE_CLASS = "power"
DEFAULT_NAME = "Vultr {}"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SUBSCRIPTION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vultr subscription (server) binary sensor."""
vultr = hass.data[DATA_VULTR]
subscription = config.get(CONF_SUBSCRIPTION)
name = config.get(CONF_NAME)
if subscription not in vultr.data:
_LOGGER.error("Subscription %s not found", subscription)
return
add_entities([VultrBinarySensor(vultr, subscription, name)], True)
class VultrBinarySensor(BinarySensorEntity):
"""Representation of a Vultr subscription sensor."""
def __init__(self, vultr, subscription, name):
"""Initialize a new Vultr binary sensor."""
self._vultr = vultr
self._name = name
self.subscription = subscription
self.data = None
@property
def name(self):
"""Return the name of the sensor."""
try:
return self._name.format(self.data["label"])
except (KeyError, TypeError):
return self._name
@property
def icon(self):
"""Return the icon of this server."""
return "mdi:server" if self.is_on else "mdi:server-off"
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.data["power_status"] == "running"
@property
def device_class(self):
"""Return the class of this sensor."""
return DEFAULT_DEVICE_CLASS
@property
def device_state_attributes(self):
"""Return the state attributes of the Vultr subscription."""
return {
ATTR_ALLOWED_BANDWIDTH: self.data.get("allowed_bandwidth_gb"),
ATTR_AUTO_BACKUPS: self.data.get("auto_backups"),
ATTR_COST_PER_MONTH: self.data.get("cost_per_month"),
ATTR_CREATED_AT: self.data.get("date_created"),
ATTR_DISK: self.data.get("disk"),
ATTR_IPV4_ADDRESS: self.data.get("main_ip"),
ATTR_IPV6_ADDRESS: self.data.get("v6_main_ip"),
ATTR_MEMORY: self.data.get("ram"),
ATTR_OS: self.data.get("os"),
ATTR_REGION: self.data.get("location"),
ATTR_SUBSCRIPTION_ID: self.data.get("SUBID"),
ATTR_SUBSCRIPTION_NAME: self.data.get("label"),
ATTR_VCPUS: self.data.get("vcpu_count"),
}
def update(self):
"""Update state of sensor."""
self._vultr.update()
self.data = self._vultr.data[self.subscription]
|
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_SEEK_POSITION,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
DOMAIN,
SERVICE_CLEAR_PLAYLIST,
SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOURCE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ENTITY_MATCH_ALL,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_MEDIA_SEEK,
SERVICE_MEDIA_STOP,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_DOWN,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
SERVICE_VOLUME_UP,
)
from homeassistant.loader import bind_hass
async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn on specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
@bind_hass
def turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn on specified media player or all."""
hass.add_job(async_turn_on, hass, entity_id)
async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn off specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
@bind_hass
def turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn off specified media player or all."""
hass.add_job(async_turn_off, hass, entity_id)
async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL):
"""Toggle specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True)
@bind_hass
def toggle(hass, entity_id=ENTITY_MATCH_ALL):
"""Toggle specified media player or all."""
hass.add_job(async_toggle, hass, entity_id)
async def async_volume_up(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for volume up."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_VOLUME_UP, data, blocking=True)
@bind_hass
def volume_up(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for volume up."""
hass.add_job(async_volume_up, hass, entity_id)
async def async_volume_down(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for volume down."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_VOLUME_DOWN, data, blocking=True)
@bind_hass
def volume_down(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for volume down."""
hass.add_job(async_volume_down, hass, entity_id)
async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for muting the volume."""
data = {ATTR_MEDIA_VOLUME_MUTED: mute}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_VOLUME_MUTE, data, blocking=True)
@bind_hass
def mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for muting the volume."""
hass.add_job(async_mute_volume, hass, mute, entity_id)
async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for setting the volume."""
data = {ATTR_MEDIA_VOLUME_LEVEL: volume}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_VOLUME_SET, data, blocking=True)
@bind_hass
def set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for setting the volume."""
hass.add_job(async_set_volume_level, hass, volume, entity_id)
async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(
DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, data, blocking=True
)
@bind_hass
def media_play_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for play/pause."""
hass.add_job(async_media_play_pause, hass, entity_id)
async def async_media_play(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PLAY, data, blocking=True)
@bind_hass
def media_play(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for play/pause."""
hass.add_job(async_media_play, hass, entity_id)
async def async_media_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PAUSE, data, blocking=True)
@bind_hass
def media_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for pause."""
hass.add_job(async_media_pause, hass, entity_id)
async def async_media_stop(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for stop."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_MEDIA_STOP, data, blocking=True)
@bind_hass
def media_stop(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for stop."""
hass.add_job(async_media_stop, hass, entity_id)
async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for next track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(
DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data, blocking=True
)
@bind_hass
def media_next_track(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for next track."""
hass.add_job(async_media_next_track, hass, entity_id)
async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for prev track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(
DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data, blocking=True
)
@bind_hass
def media_previous_track(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for prev track."""
hass.add_job(async_media_previous_track, hass, entity_id)
async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command to seek in current playing media."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
data[ATTR_MEDIA_SEEK_POSITION] = position
await hass.services.async_call(DOMAIN, SERVICE_MEDIA_SEEK, data, blocking=True)
@bind_hass
def media_seek(hass, position, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command to seek in current playing media."""
hass.add_job(async_media_seek, hass, position, entity_id)
async def async_play_media(
hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None
):
"""Send the media player the command for playing media."""
data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
if enqueue:
data[ATTR_MEDIA_ENQUEUE] = enqueue
await hass.services.async_call(DOMAIN, SERVICE_PLAY_MEDIA, data, blocking=True)
@bind_hass
def play_media(hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None):
"""Send the media player the command for playing media."""
hass.add_job(async_play_media, hass, media_type, media_id, entity_id, enqueue)
async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command to select input source."""
data = {ATTR_INPUT_SOURCE: source}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SELECT_SOURCE, data, blocking=True)
@bind_hass
def select_source(hass, source, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command to select input source."""
hass.add_job(async_select_source, hass, source, entity_id)
async def async_clear_playlist(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for clear playlist."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_CLEAR_PLAYLIST, data, blocking=True)
@bind_hass
def clear_playlist(hass, entity_id=ENTITY_MATCH_ALL):
"""Send the media player the command for clear playlist."""
hass.add_job(async_clear_playlist, hass, entity_id)
|
from typing import Any, Dict
from unittest.mock import MagicMock, patch
from pyHS100 import SmartBulb, SmartDevice, SmartDeviceException, SmartPlug
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components import tplink
from homeassistant.components.tplink.common import (
CONF_DIMMER,
CONF_DISCOVERY,
CONF_LIGHT,
CONF_SWITCH,
)
from homeassistant.const import CONF_HOST
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, mock_coro
async def test_creating_entry_tries_discover(hass):
"""Test setting up does discovery."""
with patch(
"homeassistant.components.tplink.async_setup_entry",
return_value=mock_coro(True),
) as mock_setup, patch(
"homeassistant.components.tplink.common.Discover.discover",
return_value={"host": 1234},
):
result = await hass.config_entries.flow.async_init(
tplink.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
# Confirmation form
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
async def test_configuring_tplink_causes_discovery(hass):
"""Test that specifying empty config does discovery."""
with patch("homeassistant.components.tplink.common.Discover.discover") as discover:
discover.return_value = {"host": 1234}
await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}})
await hass.async_block_till_done()
assert len(discover.mock_calls) == 1
@pytest.mark.parametrize(
"name,cls,platform",
[
("pyHS100.SmartPlug", SmartPlug, "switch"),
("pyHS100.SmartBulb", SmartBulb, "light"),
],
)
@pytest.mark.parametrize("count", [1, 2, 3])
async def test_configuring_device_types(hass, name, cls, platform, count):
"""Test that light or switch platform list is filled correctly."""
with patch(
"homeassistant.components.tplink.common.Discover.discover"
) as discover, patch(
"homeassistant.components.tplink.common.SmartDevice._query_helper"
), patch(
"homeassistant.components.tplink.light.async_setup_entry",
return_value=True,
):
discovery_data = {
f"123.123.123.{c}": cls("123.123.123.123") for c in range(count)
}
discover.return_value = discovery_data
await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}})
await hass.async_block_till_done()
assert len(discover.mock_calls) == 1
assert len(hass.data[tplink.DOMAIN][platform]) == count
class UnknownSmartDevice(SmartDevice):
"""Dummy class for testing."""
@property
def has_emeter(self) -> bool:
"""Do nothing."""
pass
def turn_off(self) -> None:
"""Do nothing."""
pass
def turn_on(self) -> None:
"""Do nothing."""
pass
@property
def is_on(self) -> bool:
"""Do nothing."""
pass
@property
def state_information(self) -> Dict[str, Any]:
"""Do nothing."""
pass
async def test_configuring_devices_from_multiple_sources(hass):
"""Test static and discover devices are not duplicated."""
with patch(
"homeassistant.components.tplink.common.Discover.discover"
) as discover, patch(
"homeassistant.components.tplink.common.SmartDevice._query_helper"
), patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup"
):
discover_device_fail = SmartPlug("123.123.123.123")
discover_device_fail.get_sysinfo = MagicMock(side_effect=SmartDeviceException())
discover.return_value = {
"123.123.123.1": SmartBulb("123.123.123.1"),
"123.123.123.2": SmartPlug("123.123.123.2"),
"123.123.123.3": SmartBulb("123.123.123.3"),
"123.123.123.4": SmartPlug("123.123.123.4"),
"123.123.123.123": discover_device_fail,
"123.123.123.124": UnknownSmartDevice("123.123.123.124"),
}
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_LIGHT: [{CONF_HOST: "123.123.123.1"}],
CONF_SWITCH: [{CONF_HOST: "123.123.123.2"}],
CONF_DIMMER: [{CONF_HOST: "123.123.123.22"}],
}
},
)
await hass.async_block_till_done()
assert len(discover.mock_calls) == 1
assert len(hass.data[tplink.DOMAIN][CONF_LIGHT]) == 3
assert len(hass.data[tplink.DOMAIN][CONF_SWITCH]) == 2
async def test_is_dimmable(hass):
"""Test that is_dimmable switches are correctly added as lights."""
with patch(
"homeassistant.components.tplink.common.Discover.discover"
) as discover, patch(
"homeassistant.components.tplink.light.async_setup_entry",
return_value=mock_coro(True),
) as setup, patch(
"homeassistant.components.tplink.common.SmartDevice._query_helper"
), patch(
"homeassistant.components.tplink.common.SmartPlug.is_dimmable", True
):
dimmable_switch = SmartPlug("123.123.123.123")
discover.return_value = {"host": dimmable_switch}
await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}})
await hass.async_block_till_done()
assert len(discover.mock_calls) == 1
assert len(setup.mock_calls) == 1
assert len(hass.data[tplink.DOMAIN][CONF_LIGHT]) == 1
assert not hass.data[tplink.DOMAIN][CONF_SWITCH]
async def test_configuring_discovery_disabled(hass):
"""Test that discover does not get called when disabled."""
with patch(
"homeassistant.components.tplink.async_setup_entry",
return_value=mock_coro(True),
) as mock_setup, patch(
"homeassistant.components.tplink.common.Discover.discover", return_value=[]
) as discover:
await async_setup_component(
hass, tplink.DOMAIN, {tplink.DOMAIN: {tplink.CONF_DISCOVERY: False}}
)
await hass.async_block_till_done()
assert discover.call_count == 0
assert mock_setup.call_count == 1
async def test_platforms_are_initialized(hass):
"""Test that platforms are initialized per configuration array."""
config = {
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_LIGHT: [{CONF_HOST: "123.123.123.123"}],
CONF_SWITCH: [{CONF_HOST: "321.321.321.321"}],
}
}
with patch(
"homeassistant.components.tplink.common.Discover.discover"
) as discover, patch(
"homeassistant.components.tplink.common.SmartDevice._query_helper"
), patch(
"homeassistant.components.tplink.light.async_setup_entry",
return_value=mock_coro(True),
) as light_setup, patch(
"homeassistant.components.tplink.switch.async_setup_entry",
return_value=mock_coro(True),
) as switch_setup, patch(
"homeassistant.components.tplink.common.SmartPlug.is_dimmable", False
):
# patching is_dimmable is necessray to avoid misdetection as light.
await async_setup_component(hass, tplink.DOMAIN, config)
await hass.async_block_till_done()
assert discover.call_count == 0
assert light_setup.call_count == 1
assert switch_setup.call_count == 1
async def test_no_config_creates_no_entry(hass):
"""Test for when there is no tplink in config."""
with patch(
"homeassistant.components.tplink.async_setup_entry",
return_value=mock_coro(True),
) as mock_setup:
await async_setup_component(hass, tplink.DOMAIN, {})
await hass.async_block_till_done()
assert mock_setup.call_count == 0
@pytest.mark.parametrize("platform", ["switch", "light"])
async def test_unload(hass, platform):
"""Test that the async_unload_entry works."""
# As we have currently no configuration, we just to pass the domain here.
entry = MockConfigEntry(domain=tplink.DOMAIN)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.tplink.common.SmartDevice._query_helper"
), patch(
f"homeassistant.components.tplink.{platform}.async_setup_entry",
return_value=mock_coro(True),
) as light_setup:
config = {
tplink.DOMAIN: {
platform: [{CONF_HOST: "123.123.123.123"}],
CONF_DISCOVERY: False,
}
}
assert await async_setup_component(hass, tplink.DOMAIN, config)
await hass.async_block_till_done()
assert len(light_setup.mock_calls) == 1
assert tplink.DOMAIN in hass.data
assert await tplink.async_unload_entry(hass, entry)
assert not hass.data[tplink.DOMAIN]
|
from aiohttp.hdrs import X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO
import pytest
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_get(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.get(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.get(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_post(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.post(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.post(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_put(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.put(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.put(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_delete(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.delete(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.delete(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_patch(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.patch(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.patch(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ping?index=1"),
("core", "index.html"),
("local", "panel/config"),
("jk_921", "editor.php?idx=3&ping=5"),
("fsadjf10312", ""),
],
)
async def test_ingress_request_options(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.options(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1]),
text="test",
)
resp = await hassio_client.options(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we got right response
assert resp.status == 200
body = await resp.text()
assert body == "test"
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
@pytest.mark.parametrize(
"build_type",
[
("a3_vl", "test/beer/ws"),
("core", "ws.php"),
("local", "panel/config/stream"),
("jk_921", "hulk"),
("demo", "ws/connection?id=9&token=SJAKWS283"),
],
)
async def test_ingress_websocket(hassio_client, build_type, aioclient_mock):
"""Test no auth needed for ."""
aioclient_mock.get(
"http://127.0.0.1/ingress/{}/{}".format(build_type[0], build_type[1])
)
# Ignore error because we can setup a full IO infrastructure
await hassio_client.ws_connect(
"/api/hassio_ingress/{}/{}".format(build_type[0], build_type[1]),
headers={"X-Test-Header": "beer"},
)
# Check we forwarded command
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
assert aioclient_mock.mock_calls[-1][3][
"X-Ingress-Path"
] == "/api/hassio_ingress/{}".format(build_type[0])
assert aioclient_mock.mock_calls[-1][3]["X-Test-Header"] == "beer"
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_FOR]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_HOST]
assert aioclient_mock.mock_calls[-1][3][X_FORWARDED_PROTO]
|
from flask import Flask, jsonify
from flasgger import Swagger
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Colors API',
'uiversion': 2
}
Swagger(app, template_file='colors_template.yaml')
@app.route('/colors/<palette>/')
def colors(palette):
"""
Example using a dictionary as specification
This is the description
You can also set 'summary' and 'description' in
specs_dict
---
# values here overrides the specs dict
deprecated: true
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
if __name__ == "__main__":
app.run(debug=True)
|
import asyncio
import logging
from pyinsteon import devices
from pyinsteon.address import Address
from pyinsteon.constants import ALDBStatus
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT
from pyinsteon.managers.link_manager import (
async_enter_linking_mode,
async_enter_unlinking_mode,
)
from pyinsteon.managers.scene_manager import (
async_trigger_scene_off,
async_trigger_scene_on,
)
from pyinsteon.managers.x10_manager import (
async_x10_all_lights_off,
async_x10_all_lights_on,
async_x10_all_units_off,
)
from pyinsteon.x10_address import create as create_x10_address
from homeassistant.const import (
CONF_ADDRESS,
CONF_ENTITY_ID,
CONF_PLATFORM,
ENTITY_MATCH_ALL,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
dispatcher_send,
)
from .const import (
CONF_CAT,
CONF_DIM_STEPS,
CONF_HOUSECODE,
CONF_SUBCAT,
CONF_UNITCODE,
DOMAIN,
EVENT_CONF_BUTTON,
EVENT_GROUP_OFF,
EVENT_GROUP_OFF_FAST,
EVENT_GROUP_ON,
EVENT_GROUP_ON_FAST,
ON_OFF_EVENTS,
SIGNAL_ADD_DEFAULT_LINKS,
SIGNAL_ADD_DEVICE_OVERRIDE,
SIGNAL_ADD_ENTITIES,
SIGNAL_ADD_X10_DEVICE,
SIGNAL_LOAD_ALDB,
SIGNAL_PRINT_ALDB,
SIGNAL_REMOVE_DEVICE_OVERRIDE,
SIGNAL_REMOVE_ENTITY,
SIGNAL_REMOVE_X10_DEVICE,
SIGNAL_SAVE_DEVICES,
SRV_ADD_ALL_LINK,
SRV_ADD_DEFAULT_LINKS,
SRV_ALL_LINK_GROUP,
SRV_ALL_LINK_MODE,
SRV_CONTROLLER,
SRV_DEL_ALL_LINK,
SRV_HOUSECODE,
SRV_LOAD_ALDB,
SRV_LOAD_DB_RELOAD,
SRV_PRINT_ALDB,
SRV_PRINT_IM_ALDB,
SRV_SCENE_OFF,
SRV_SCENE_ON,
SRV_X10_ALL_LIGHTS_OFF,
SRV_X10_ALL_LIGHTS_ON,
SRV_X10_ALL_UNITS_OFF,
)
from .ipdb import get_device_platforms, get_platform_groups
from .schemas import (
ADD_ALL_LINK_SCHEMA,
ADD_DEFAULT_LINKS_SCHEMA,
DEL_ALL_LINK_SCHEMA,
LOAD_ALDB_SCHEMA,
PRINT_ALDB_SCHEMA,
TRIGGER_SCENE_SCHEMA,
X10_HOUSECODE_SCHEMA,
)
_LOGGER = logging.getLogger(__name__)
def add_on_off_event_device(hass, device):
"""Register an Insteon device as an on/off event device."""
@callback
def async_fire_group_on_off_event(name, address, group, button):
# Firing an event when a button is pressed.
if button and button[-2] == "_":
button_id = button[-1].lower()
else:
button_id = None
schema = {CONF_ADDRESS: address}
if button_id:
schema[EVENT_CONF_BUTTON] = button_id
if name == ON_EVENT:
event = EVENT_GROUP_ON
if name == OFF_EVENT:
event = EVENT_GROUP_OFF
if name == ON_FAST_EVENT:
event = EVENT_GROUP_ON_FAST
if name == OFF_FAST_EVENT:
event = EVENT_GROUP_OFF_FAST
_LOGGER.debug("Firing event %s with %s", event, schema)
hass.bus.async_fire(event, schema)
for group in device.events:
if isinstance(group, int):
for event in device.events[group]:
if event in [
OFF_EVENT,
ON_EVENT,
OFF_FAST_EVENT,
ON_FAST_EVENT,
]:
_LOGGER.debug(
"Registering on/off event for %s %d %s",
str(device.address),
group,
event,
)
device.events[group][event].subscribe(
async_fire_group_on_off_event, force_strong_ref=True
)
def register_new_device_callback(hass):
"""Register callback for new Insteon device."""
@callback
def async_new_insteon_device(address=None):
"""Detect device from transport to be delegated to platform."""
hass.async_create_task(async_create_new_entities(address))
async def async_create_new_entities(address):
_LOGGER.debug(
"Adding new INSTEON device to Home Assistant with address %s", address
)
await devices.async_save(workdir=hass.config.config_dir)
device = devices[address]
await device.async_status()
platforms = get_device_platforms(device)
for platform in platforms:
if platform == ON_OFF_EVENTS:
add_on_off_event_device(hass, device)
else:
signal = f"{SIGNAL_ADD_ENTITIES}_{platform}"
dispatcher_send(hass, signal, {"address": device.address})
devices.subscribe(async_new_insteon_device, force_strong_ref=True)
@callback
def async_register_services(hass):
"""Register services used by insteon component."""
save_lock = asyncio.Lock()
async def async_srv_add_all_link(service):
"""Add an INSTEON All-Link between two devices."""
group = service.data.get(SRV_ALL_LINK_GROUP)
mode = service.data.get(SRV_ALL_LINK_MODE)
link_mode = mode.lower() == SRV_CONTROLLER
await async_enter_linking_mode(link_mode, group)
async def async_srv_del_all_link(service):
"""Delete an INSTEON All-Link between two devices."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_enter_unlinking_mode(group)
async def async_srv_load_aldb(service):
"""Load the device All-Link database."""
entity_id = service.data[CONF_ENTITY_ID]
reload = service.data[SRV_LOAD_DB_RELOAD]
if entity_id.lower() == ENTITY_MATCH_ALL:
await async_srv_load_aldb_all(reload)
else:
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
async_dispatcher_send(hass, signal, reload)
async def async_srv_load_aldb_all(reload):
"""Load the All-Link database for all devices."""
# Cannot be done concurrently due to issues with the underlying protocol.
for address in devices:
device = devices[address]
if device != devices.modem and device.cat != 0x03:
await device.aldb.async_load(
refresh=reload, callback=async_srv_save_devices
)
async def async_srv_save_devices():
"""Write the Insteon device configuration to file."""
async with save_lock:
_LOGGER.debug("Saving Insteon devices")
await devices.async_save(hass.config.config_dir)
def print_aldb(service):
"""Print the All-Link Database for a device."""
# For now this sends logs to the log file.
# Future direction is to create an INSTEON control panel.
entity_id = service.data[CONF_ENTITY_ID]
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
dispatcher_send(hass, signal)
def print_im_aldb(service):
"""Print the All-Link Database for a device."""
# For now this sends logs to the log file.
# Future direction is to create an INSTEON control panel.
print_aldb_to_log(devices.modem.aldb)
async def async_srv_x10_all_units_off(service):
"""Send the X10 All Units Off command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_units_off(housecode)
async def async_srv_x10_all_lights_off(service):
"""Send the X10 All Lights Off command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_lights_off(housecode)
async def async_srv_x10_all_lights_on(service):
"""Send the X10 All Lights On command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_lights_on(housecode)
async def async_srv_scene_on(service):
"""Trigger an INSTEON scene ON."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_trigger_scene_on(group)
async def async_srv_scene_off(service):
"""Trigger an INSTEON scene ON."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_trigger_scene_off(group)
@callback
def async_add_default_links(service):
"""Add the default All-Link entries to a device."""
entity_id = service.data[CONF_ENTITY_ID]
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
async_dispatcher_send(hass, signal)
async def async_add_device_override(override):
"""Remove an Insten device and associated entities."""
address = Address(override[CONF_ADDRESS])
await async_remove_device(address)
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
await async_srv_save_devices()
async def async_remove_device_override(address):
"""Remove an Insten device and associated entities."""
address = Address(address)
await async_remove_device(address)
devices.set_id(address, None, None, None)
await devices.async_identify_device(address)
await async_srv_save_devices()
@callback
def async_add_x10_device(x10_config):
"""Add X10 device."""
housecode = x10_config[CONF_HOUSECODE]
unitcode = x10_config[CONF_UNITCODE]
platform = x10_config[CONF_PLATFORM]
steps = x10_config.get(CONF_DIM_STEPS, 22)
x10_type = "on_off"
if platform == "light":
x10_type = "dimmable"
elif platform == "binary_sensor":
x10_type = "sensor"
_LOGGER.debug(
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
)
# This must be run in the event loop
devices.add_x10_device(housecode, unitcode, x10_type, steps)
async def async_remove_x10_device(housecode, unitcode):
"""Remove an X10 device and associated entities."""
address = create_x10_address(housecode, unitcode)
devices.pop(address)
await async_remove_device(address)
async def async_remove_device(address):
"""Remove the device and all entities from hass."""
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
async_dispatcher_send(hass, signal)
dev_registry = await hass.helpers.device_registry.async_get_registry()
device = dev_registry.async_get_device(
identifiers={(DOMAIN, str(address))}, connections=set()
)
if device:
dev_registry.async_remove_device(device.id)
hass.services.async_register(
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
)
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_UNITS_OFF,
async_srv_x10_all_units_off,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_LIGHTS_OFF,
async_srv_x10_all_lights_off,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_LIGHTS_ON,
async_srv_x10_all_lights_on,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
)
hass.services.async_register(
DOMAIN,
SRV_ADD_DEFAULT_LINKS,
async_add_default_links,
schema=ADD_DEFAULT_LINKS_SCHEMA,
)
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
async_dispatcher_connect(
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
)
async_dispatcher_connect(
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
)
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
_LOGGER.debug("Insteon Services registered")
def print_aldb_to_log(aldb):
"""Print the All-Link Database to the log file."""
logger = logging.getLogger(f"{__name__}.links")
logger.info("%s ALDB load status is %s", aldb.address, aldb.status.name)
if aldb.status not in [ALDBStatus.LOADED, ALDBStatus.PARTIAL]:
_LOGGER.warning("All-Link database not loaded")
logger.info("RecID In Use Mode HWM Group Address Data 1 Data 2 Data 3")
logger.info("----- ------ ---- --- ----- -------- ------ ------ ------")
for mem_addr in aldb:
rec = aldb[mem_addr]
# For now we write this to the log
# Roadmap is to create a configuration panel
in_use = "Y" if rec.is_in_use else "N"
mode = "C" if rec.is_controller else "R"
hwm = "Y" if rec.is_high_water_mark else "N"
log_msg = (
f" {rec.mem_addr:04x} {in_use:s} {mode:s} {hwm:s} "
f"{rec.group:3d} {str(rec.target):s} {rec.data1:3d} "
f"{rec.data2:3d} {rec.data3:3d}"
)
logger.info(log_msg)
@callback
def async_add_insteon_entities(
hass, platform, entity_type, async_add_entities, discovery_info
):
"""Add Insteon devices to a platform."""
new_entities = []
device_list = [discovery_info.get("address")] if discovery_info else devices
for address in device_list:
device = devices[address]
groups = get_platform_groups(device, platform)
for group in groups:
new_entities.append(entity_type(device, group))
if new_entities:
async_add_entities(new_entities)
|
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
from scrapy_redis.spiders import RedisCrawlSpider
class MyCrawler(RedisCrawlSpider):
"""Spider that reads urls from redis queue (myspider:start_urls)."""
name = 'mycrawler_redis'
redis_key = 'mycrawler:start_urls'
rules = (
# follow all links
Rule(LinkExtractor(), callback='parse_page', follow=True),
)
def __init__(self, *args, **kwargs):
# Dynamically define the allowed domains list.
domain = kwargs.pop('domain', '')
self.allowed_domains = filter(None, domain.split(','))
super(MyCrawler, self).__init__(*args, **kwargs)
def parse_page(self, response):
return {
'name': response.css('title::text').extract_first(),
'url': response.url,
}
|
from kalliope.core.Models.settings.SettingsEntry import SettingsEntry
class Player(SettingsEntry):
"""
This Class is representing a Player with its name and parameters
.. note:: must be defined in the settings.yml
"""
def __init__(self, name=None, parameters=None):
super(Player, self).__init__(name=name)
self.parameters = parameters
def __str__(self):
return str(self.serialize())
def serialize(self):
return {
'name': self.name,
'parameters': self.parameters
}
def __eq__(self, other):
"""
This is used to compare 2 Player objects
:param other: the Player to compare
:return: True if both players are similar, False otherwise
"""
return self.__dict__ == other.__dict__
|
import mock
import numpy as np
import os
import six
import tempfile
import unittest
import chainer
from chainer.datasets import TupleDataset
from chainer.iterators import SerialIterator
from chainer.testing import attr
from chainercv.extensions import DetectionVisReport
from chainercv.utils import generate_random_bbox
from chainercv.utils import testing
try:
import matplotlib # NOQA
_available = True
except ImportError:
_available = False
class _RandomDetectionStubLink(chainer.Link):
def predict(self, imgs):
bboxes = []
labels = []
scores = []
for _ in imgs:
n_bbox = np.random.randint(0, 10)
bboxes.append(generate_random_bbox(
n_bbox, (48, 32), 4, 12))
labels.append(np.random.randint(0, 19, size=n_bbox))
scores.append(np.random.uniform(0, 1, size=n_bbox))
return bboxes, labels, scores
@testing.parameterize(
{
'filename': None,
'filename_func': lambda iter_, idx:
'detection_iter={:d}_idx={:d}.jpg'.format(iter_, idx)},
{
'filename': 'result_no_{index}_iter_{iteration}.png',
'filename_func': lambda iter_, idx:
'result_no_{:d}_iter_{:d}.png'.format(idx, iter_)},
{
'filename': 'detection_iter={iteration}.jpg',
'filename_func': lambda iter_, _:
'detection_iter={:d}.jpg'.format(iter_)},
{
'filename': 'detection_idx={index}.jpg',
'filename_func': lambda _, idx:
'detection_idx={:d}.jpg'.format(idx)},
)
class TestDetectionVisReport(unittest.TestCase):
def setUp(self):
self.trainer = mock.MagicMock()
self.trainer.out = tempfile.mkdtemp()
self.link = _RandomDetectionStubLink()
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
np.random.uniform(size=(10, 5, 4)),
np.random.randint(0, 19, size=(10, 5)))
self.iterator = SerialIterator(
self.dataset, 10, repeat=False, shuffle=False)
def test_available(self):
self.extension = DetectionVisReport(self.dataset, self.link)
self.assertEqual(self.extension.available(), _available)
def _check(self):
if self.filename is None:
extension = DetectionVisReport(self.iterator, self.link)
else:
extension = DetectionVisReport(
self.iterator, self.link, filename=self.filename)
if not _available:
return
for iter_ in range(3):
self.trainer.updater.iteration = iter_
extension(self.trainer)
for idx in six.moves.range(len(self.dataset)):
out_file = os.path.join(
self.trainer.out, self.filename_func(iter_, idx))
self.assertTrue(os.path.exists(out_file))
def test_cpu(self):
self._check()
@attr.gpu
def test_gpu(self):
self.link.to_gpu()
self._check()
testing.run_module(__name__, __file__)
|
import argparse
import glob
import os
import struct
import sys
def clamp_to_min_max(value, min, max):
if value > max:
value = max
elif value < min:
value = min
return value
def clamp_to_u8(value):
return clamp_to_min_max(value, 0, 255)
def parse_args():
parser = argparse.ArgumentParser(description="Set the reactive effect")
parser.add_argument('-d', '--device', type=str, help="Device string like \"0003:1532:0045.000C\"")
parser.add_argument('--speed', required=True, type=int, help="Random breathing effect")
parser.add_argument('--colour', required=True, nargs=3, metavar=("R", "G", "B"), type=int, help="Reactive colour effect")
args = parser.parse_args()
return args
def run():
args = parse_args()
if args.device is None:
mouse_dirs = glob.glob(os.path.join('/sys/bus/hid/drivers/razermouse/', "*:*:*.*"))
if len(mouse_dirs) > 1:
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
if len(mouse_dirs) < 1:
print("No mouse directories found. Make sure the driver is binded", file=sys.stderr)
sys.exit(1)
mouse_dir = mouse_dirs[0]
else:
mouse_dir = os.path.join('/sys/bus/hid/drivers/razermouse/', args.device)
if not os.path.isdir(mouse_dir):
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
speed = clamp_to_min_max(args.speed, 1, 3)
values = map(clamp_to_u8, args.colour)
byte_string = struct.pack(">BBBB", speed, *values)
reactive_mode_filepath = os.path.join(mouse_dir, "mode_reactive")
with open(reactive_mode_filepath, 'wb') as reactive_mode_file:
reactive_mode_file.write(byte_string)
print("Done")
if __name__ == '__main__':
run()
|
import io
from pkgutil import walk_packages
from setuptools import setup
def find_packages(path):
# This method returns packages and subpackages as well.
return [name for _, name, is_pkg in walk_packages([path]) if is_pkg]
def read_file(filename):
with io.open(filename) as fp:
return fp.read().strip()
def read_rst(filename):
# Ignore unsupported directives by pypi.
content = read_file(filename)
return ''.join(line for line in io.StringIO(content)
if not line.startswith('.. comment::'))
def read_requirements(filename):
return [line.strip() for line in read_file(filename).splitlines()
if not line.startswith('#')]
setup(
name='scrapy-redis',
version=read_file('VERSION'),
description="Redis-based components for Scrapy.",
long_description=read_rst('README.rst') + '\n\n' + read_rst('HISTORY.rst'),
author="Rolando Espinoza",
author_email='[email protected]',
url='https://github.com/rolando/scrapy-redis',
packages=list(find_packages('src')),
package_dir={'': 'src'},
setup_requires=read_requirements('requirements-setup.txt'),
install_requires=read_requirements('requirements-install.txt'),
include_package_data=True,
license="MIT",
keywords='scrapy-redis',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
import json
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import network
from perfkitbenchmarker import smb_service
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import azure_network
from perfkitbenchmarker.providers.azure import util
FLAGS = flags.FLAGS
class AzureSmbService(smb_service.BaseSmbService):
"""An Azure SMB resource.
Creates the Azure Files file system and mount point for use with SMB clients.
See
https://docs.microsoft.com/en-us/azure/storage/files/storage-files-introduction
"""
CLOUD = azure.CLOUD
SMB_TIERS = ('Standard_LRS', 'Premium_LRS')
# TODO(spencerkim): Add smb tier and version to metadata
DEFAULT_SMB_VERSION = '3.0'
DEFAULT_TIER = 'Standard_LRS'
def __init__(self, disk_spec, zone):
super(AzureSmbService, self).__init__(disk_spec, zone)
self.name = 'azure-smb-fs-%s' % FLAGS.run_uri
self.location = util.GetLocationFromZone(self.zone)
self.resource_group = azure_network.GetResourceGroup(self.location)
@property
def network(self):
network_spec = network.BaseNetworkSpec(self.zone)
return azure_network.AzureNetwork.GetNetworkFromNetworkSpec(network_spec)
def GetRemoteAddress(self):
logging.debug('Calling GetRemoteAddress on SMB server %s', self.name)
if self.name is None:
raise errors.Resource.RetryableGetError('Filer not created')
return '//{storage}.file.core.windows.net/{name}'.format(
storage=self.storage_account_name, name=self.name)
def GetStorageAccountAndKey(self):
logging.debug('Calling GetStorageAccountAndKey on SMB server %s', self.name)
if self.name is None:
raise errors.Resource.RetryableGetError('Filer not created')
return {'user': self.storage_account_name, 'pw': self.storage_account_key}
def _Create(self):
"""Creates an Azure Files share.
For Standard Files, see
https://docs.microsoft.com/en-us/azure/storage/files/storage-how-to-create-file-share#create-file-share-through-command-line-interface-cli
and for Premium Files, see
https://docs.microsoft.com/en-us/azure/storage/files/storage-how-to-create-premium-fileshare#create-a-premium-file-share-using-azure-cli
"""
logging.info('Creating SMB server %s', self.name)
if FLAGS.smb_tier == 'Standard_LRS':
storage_account_number = azure_network.AzureStorageAccount.total_storage_accounts - 1
self.storage_account_name = 'pkb%s' % FLAGS.run_uri + 'storage' + str(
storage_account_number)
elif FLAGS.smb_tier == 'Premium_LRS':
storage_account_number = (
azure_network.AzureStorageAccount.total_storage_accounts)
self.storage_account_name = 'pkb%s' % FLAGS.run_uri + 'filestorage' + str(
storage_account_number)
# Premium Files uses a different storage account kind from Standard Files.
# See links in description for more details.
self.storage_account = azure_network.AzureStorageAccount(
storage_type='Premium_LRS',
location=FLAGS.zone[0] or 'westus2',
name=self.storage_account_name,
kind='FileStorage',
resource_group=self.resource_group,
use_existing=False)
self.storage_account.Create()
self.connection_args = util.GetAzureStorageConnectionArgs(
self.storage_account_name, self.resource_group.args)
self.storage_account_key = util.GetAzureStorageAccountKey(
self.storage_account_name, self.resource_group.args)
self._AzureSmbCommand('create')
def _Delete(self):
logging.info('Deleting SMB server %s', self.name)
self._AzureSmbCommand('delete')
def _Exists(self):
logging.debug('Calling Exists on SMB server %s', self.name)
return self._AzureSmbCommand('exists')['exists']
def _IsReady(self):
logging.debug('Calling IsReady on SMB server %s', self.name)
return self._Exists()
def _Describe(self):
logging.debug('Calling Describe on SMB server %s', self.name)
output = self._AzureSmbCommand('show')
return output
def _AzureSmbCommand(self, verb):
cmd = [azure.AZURE_PATH, 'storage', 'share', verb, '--output', 'json']
cmd += ['--name', self.name]
if verb == 'create':
cmd += ['--quota', str(FLAGS.data_disk_size)]
cmd += self.connection_args
stdout, stderr, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode:
raise errors.Error('Error running command %s : %s' % (verb, stderr))
return json.loads(stdout)
|
from __future__ import print_function
from __future__ import absolute_import
from pyspark.ml.linalg import Vectors
import numpy as np
import random
from pyspark import SparkContext, SparkConf
from pyspark.sql import SQLContext
from pyspark.ml.feature import StringIndexer, StandardScaler
from pyspark.ml import Pipeline
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from elephas.ml_model import ElephasEstimator
data_path = "../"
# Spark contexts
conf = SparkConf().setAppName('Otto_Spark_ML_Pipeline').setMaster('local[8]')
sc = SparkContext(conf=conf)
sql_context = SQLContext(sc)
# Data loader
def shuffle_csv(csv_file):
lines = open(csv_file).readlines()
random.shuffle(lines)
open(csv_file, 'w').writelines(lines)
def load_data_rdd(csv_file, shuffle=True, train=True):
if shuffle:
shuffle_csv(data_path + csv_file)
data = sc.textFile(data_path + csv_file)
data = data.filter(lambda x: x.split(',')[0] != 'id').map(lambda line: line.split(','))
if train:
data = data.map(
lambda line: (Vectors.dense(np.asarray(line[1:-1]).astype(np.float32)),
str(line[-1]).replace('Class_', '')))
else:
data = data.map(lambda line: (Vectors.dense(np.asarray(line[1:]).astype(np.float32)), "1"))
return data
# Define Data frames
train_df = sql_context.createDataFrame(load_data_rdd("train.csv"), ['features', 'category'])
test_df = sql_context.createDataFrame(load_data_rdd("test.csv", shuffle=False, train=False), ['features', 'category'])
# Preprocessing steps
string_indexer = StringIndexer(inputCol="category", outputCol="index_category")
scaler = StandardScaler(inputCol="features", outputCol="scaled_features", withStd=True, withMean=True)
# Keras model
nb_classes = train_df.select("category").distinct().count()
input_dim = len(train_df.select("features").first()[0])
model = Sequential()
model.add(Dense(512, input_shape=(input_dim,)))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
sgd = optimizers.SGD(lr=0.01)
sgd_conf = optimizers.serialize(sgd)
# Initialize Elephas Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_yaml())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.setFeaturesCol("scaled_features")
estimator.setLabelCol("index_category")
estimator.set_epochs(10)
estimator.set_batch_size(128)
estimator.set_num_workers(1)
estimator.set_verbosity(0)
estimator.set_validation_split(0.15)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[string_indexer, scaler, estimator])
fitted_pipeline = pipeline.fit(train_df)
# Evaluate Spark model
prediction = fitted_pipeline.transform(train_df)
pnl = prediction.select("index_category", "prediction")
pnl.show(100)
|
import os
import shutil
import tempfile
import unittest
import mock
from perfkitbenchmarker import data
class FileResourceLoaderTestCase(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix='pkb-test-')
self.instance = data.FileResourceLoader(self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def _Create(self, file_name, content=''):
file_path = os.path.join(self.temp_dir, file_name)
with open(file_path, 'w') as fp:
fp.write(content)
return file_path
def testResourcePath_NonExistantResource(self):
self.assertListEqual([], os.listdir(self.temp_dir))
self.assertRaises(data.ResourceNotFound,
self.instance.ResourcePath,
'fake.txt')
def testResourcePath_ExtantResource(self):
file_name = 'test.txt'
file_path = self._Create(file_name)
self.assertEqual(file_path, self.instance.ResourcePath(file_name))
def testResourceExists_NonExistantResource(self):
self.assertFalse(self.instance.ResourceExists('fake.txt'))
def testResourceExists_ExtantResource(self):
file_name = 'test.txt'
self._Create(file_name)
self.assertTrue(self.instance.ResourceExists(file_name))
class PackageResourceLoaderTestCase(unittest.TestCase):
def setUp(self):
self.instance = data.PackageResourceLoader(data.__name__)
def testResourcePath_NonExistantResource(self):
self.assertRaises(data.ResourceNotFound,
self.instance.ResourcePath,
'fake.txt')
def testResourcePath_ExtantResource(self):
file_name = '__init__.py'
path = self.instance.ResourcePath(file_name)
self.assertEqual(file_name, os.path.basename(path))
self.assertTrue(os.path.exists(path))
def testResourceExists_NonExistantResource(self):
self.assertFalse(self.instance.ResourceExists('fake.txt'))
def testResourceExists_ExtantResource(self):
file_name = '__init__.py'
self.assertTrue(self.instance.ResourceExists(file_name))
class ResourcePathTestCase(unittest.TestCase):
def setUp(self):
mock_found_loader = mock.MagicMock(spec=data.ResourceLoader)
mock_found_loader.ResourceExists.return_value = True
mock_found_loader.ResourcePath.return_value = '/found'
mock_not_found_loader = mock.MagicMock(spec=data.ResourceLoader)
mock_not_found_loader.ResourceExists.return_value = False
p = mock.patch(data.__name__ + '._GetResourceLoaders',
return_value=[mock_found_loader])
self.mock_get_resource_loaders = p.start()
self.addCleanup(p.stop)
p = mock.patch(data.__name__ + '.DEFAULT_RESOURCE_LOADERS',
[mock_not_found_loader])
p.start()
self.addCleanup(p.stop)
def testSearchUserPaths(self):
r = data.ResourcePath('resource', True)
self.assertEqual('/found', r)
def testDoNotSearchUserPaths(self):
with self.assertRaises(data.ResourceNotFound):
data.ResourcePath('resource', False)
if __name__ == '__main__':
unittest.main()
|