text
stringlengths 213
32.3k
|
---|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from diamond.collector import Collector
from eventstoreprojections import EventstoreProjectionsCollector
##########################################################################
class TestEventstoreProjectionsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('EventstoreProjectionsCollector', {})
self.collector = EventstoreProjectionsCollector(config, None)
def test_import(self):
self.assertTrue(EventstoreProjectionsCollector)
@patch('urllib2.urlopen')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock, urlopen_mock):
returns = [self.getFixture('projections')]
urlopen_mock.side_effect = lambda *args: returns.pop(0)
self.collector.collect()
metrics = {
'projections.all-reports.eventsProcessedAfterRestart': 88,
'projections.all-reports.bufferedEvents': 0,
'projections.all-reports.coreProcessingTime': 46,
'projections.all-reports.epoch': -1,
'projections.all-reports.version': 1,
'projections.all-reports.progress': 100.0,
'projections.all-reports.status': 1,
'projections.all-reports.writePendingEventsBeforeCheckpoint': 0,
'projections.all-reports.partitionsCached': 1,
'projections.all-reports.writesInProgress': 0,
'projections.all-reports.readsInProgress': 0,
'projections.all-reports.writePendingEventsAfterCheckpoint': 0,
'projections._by_event_type.eventsProcessedAfterRestart': 0,
'projections._by_event_type.bufferedEvents': 0,
'projections._by_event_type.coreProcessingTime': 0,
'projections._by_event_type.epoch': -1,
'projections._by_event_type.version': 0,
'projections._by_event_type.progress': -1.0,
'projections._by_event_type.status': 0,
'projections._by_event_type.writePendingEventsBeforeCheckpoint': 0,
'projections._by_event_type.partitionsCached': 1,
'projections._by_event_type.writesInProgress': 0,
'projections._by_event_type.readsInProgress': 0,
'projections._by_event_type.writePendingEventsAfterCheckpoint': 0,
'projections._by_category.eventsProcessedAfterRestart': 886,
'projections._by_category.bufferedEvents': 0,
'projections._by_category.coreProcessingTime': 10,
'projections._by_category.epoch': -1,
'projections._by_category.version': 1,
'projections._by_category.progress': 100.0,
'projections._by_category.status': 1,
'projections._by_category.writePendingEventsBeforeCheckpoint': 0,
'projections._by_category.partitionsCached': 1,
'projections._by_category.writesInProgress': 0,
'projections._by_category.readsInProgress': 0,
'projections._by_category.writePendingEventsAfterCheckpoint': 0,
'projections._stream_by_cat.eventsProcessedAfterRestart': 0,
'projections._stream_by_cat.bufferedEvents': 0,
'projections._stream_by_cat.coreProcessingTime': 0,
'projections._stream_by_cat.epoch': -1,
'projections._stream_by_cat.version': 0,
'projections._stream_by_cat.progress': -1.0,
'projections._stream_by_cat.status': 0,
'projections._stream_by_cat.writePendingEventsBeforeCheckpoint': 0,
'projections._stream_by_cat.partitionsCached': 1,
'projections._stream_by_cat.writesInProgress': 0,
'projections._stream_by_cat.readsInProgress': 0,
'projections._stream_by_cat.writePendingEventsAfterCheckpoint': 0,
'projections._streams.eventsProcessedAfterRestart': 0,
'projections._streams.bufferedEvents': 0,
'projections._streams.coreProcessingTime': 0,
'projections._streams.epoch': -1,
'projections._streams.version': 0,
'projections._streams.progress': -1.0,
'projections._streams.status': 0,
'projections._streams.writePendingEventsBeforeCheckpoint': 0,
'projections._streams.partitionsCached': 1,
'projections._streams.writesInProgress': 0,
'projections._streams.readsInProgress': 0,
'projections._streams.writePendingEventsAfterCheckpoint': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics)
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import asyncio
import logging
from pyenvisalink import EnvisalinkAlarmPanel
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_TIMEOUT, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "envisalink"
DATA_EVL = "envisalink"
CONF_CODE = "code"
CONF_EVL_KEEPALIVE = "keepalive_interval"
CONF_EVL_PORT = "port"
CONF_EVL_VERSION = "evl_version"
CONF_PANEL_TYPE = "panel_type"
CONF_PANIC = "panic_type"
CONF_PARTITIONNAME = "name"
CONF_PARTITIONS = "partitions"
CONF_PASS = "password"
CONF_USERNAME = "user_name"
CONF_ZONEDUMP_INTERVAL = "zonedump_interval"
CONF_ZONENAME = "name"
CONF_ZONES = "zones"
CONF_ZONETYPE = "type"
DEFAULT_PORT = 4025
DEFAULT_EVL_VERSION = 3
DEFAULT_KEEPALIVE = 60
DEFAULT_ZONEDUMP_INTERVAL = 30
DEFAULT_ZONETYPE = "opening"
DEFAULT_PANIC = "Police"
DEFAULT_TIMEOUT = 10
SIGNAL_ZONE_UPDATE = "envisalink.zones_updated"
SIGNAL_PARTITION_UPDATE = "envisalink.partition_updated"
SIGNAL_KEYPAD_UPDATE = "envisalink.keypad_updated"
ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ZONENAME): cv.string,
vol.Optional(CONF_ZONETYPE, default=DEFAULT_ZONETYPE): cv.string,
}
)
PARTITION_SCHEMA = vol.Schema({vol.Required(CONF_PARTITIONNAME): cv.string})
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PANEL_TYPE): vol.All(
cv.string, vol.In(["HONEYWELL", "DSC"])
),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASS): cv.string,
vol.Optional(CONF_CODE): cv.string,
vol.Optional(CONF_PANIC, default=DEFAULT_PANIC): cv.string,
vol.Optional(CONF_ZONES): {vol.Coerce(int): ZONE_SCHEMA},
vol.Optional(CONF_PARTITIONS): {vol.Coerce(int): PARTITION_SCHEMA},
vol.Optional(CONF_EVL_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_EVL_VERSION, default=DEFAULT_EVL_VERSION): vol.All(
vol.Coerce(int), vol.Range(min=3, max=4)
),
vol.Optional(CONF_EVL_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(
vol.Coerce(int), vol.Range(min=15)
),
vol.Optional(
CONF_ZONEDUMP_INTERVAL, default=DEFAULT_ZONEDUMP_INTERVAL
): vol.Coerce(int),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(int),
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_CUSTOM_FUNCTION = "invoke_custom_function"
ATTR_CUSTOM_FUNCTION = "pgm"
ATTR_PARTITION = "partition"
SERVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CUSTOM_FUNCTION): cv.string,
vol.Required(ATTR_PARTITION): cv.string,
}
)
async def async_setup(hass, config):
"""Set up for Envisalink devices."""
conf = config.get(DOMAIN)
host = conf.get(CONF_HOST)
port = conf.get(CONF_EVL_PORT)
code = conf.get(CONF_CODE)
panel_type = conf.get(CONF_PANEL_TYPE)
panic_type = conf.get(CONF_PANIC)
version = conf.get(CONF_EVL_VERSION)
user = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASS)
keep_alive = conf.get(CONF_EVL_KEEPALIVE)
zone_dump = conf.get(CONF_ZONEDUMP_INTERVAL)
zones = conf.get(CONF_ZONES)
partitions = conf.get(CONF_PARTITIONS)
connection_timeout = conf.get(CONF_TIMEOUT)
sync_connect = asyncio.Future()
controller = EnvisalinkAlarmPanel(
host,
port,
panel_type,
version,
user,
password,
zone_dump,
keep_alive,
hass.loop,
connection_timeout,
)
hass.data[DATA_EVL] = controller
@callback
def login_fail_callback(data):
"""Handle when the evl rejects our login."""
_LOGGER.error("The Envisalink rejected your credentials")
if not sync_connect.done():
sync_connect.set_result(False)
@callback
def connection_fail_callback(data):
"""Network failure callback."""
_LOGGER.error(
"Could not establish a connection with the Envisalink- retrying..."
)
if not sync_connect.done():
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_envisalink)
sync_connect.set_result(True)
@callback
def connection_success_callback(data):
"""Handle a successful connection."""
_LOGGER.info("Established a connection with the Envisalink")
if not sync_connect.done():
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_envisalink)
sync_connect.set_result(True)
@callback
def zones_updated_callback(data):
"""Handle zone timer updates."""
_LOGGER.debug("Envisalink sent a zone update event. Updating zones...")
async_dispatcher_send(hass, SIGNAL_ZONE_UPDATE, data)
@callback
def alarm_data_updated_callback(data):
"""Handle non-alarm based info updates."""
_LOGGER.debug("Envisalink sent new alarm info. Updating alarms...")
async_dispatcher_send(hass, SIGNAL_KEYPAD_UPDATE, data)
@callback
def partition_updated_callback(data):
"""Handle partition changes thrown by evl (including alarms)."""
_LOGGER.debug("The envisalink sent a partition update event")
async_dispatcher_send(hass, SIGNAL_PARTITION_UPDATE, data)
@callback
def stop_envisalink(event):
"""Shutdown envisalink connection and thread on exit."""
_LOGGER.info("Shutting down Envisalink")
controller.stop()
async def handle_custom_function(call):
"""Handle custom/PGM service."""
custom_function = call.data.get(ATTR_CUSTOM_FUNCTION)
partition = call.data.get(ATTR_PARTITION)
controller.command_output(code, partition, custom_function)
controller.callback_zone_timer_dump = zones_updated_callback
controller.callback_zone_state_change = zones_updated_callback
controller.callback_partition_state_change = partition_updated_callback
controller.callback_keypad_update = alarm_data_updated_callback
controller.callback_login_failure = login_fail_callback
controller.callback_login_timeout = connection_fail_callback
controller.callback_login_success = connection_success_callback
_LOGGER.info("Start envisalink")
controller.start()
result = await sync_connect
if not result:
return False
# Load sub-components for Envisalink
if partitions:
hass.async_create_task(
async_load_platform(
hass,
"alarm_control_panel",
"envisalink",
{CONF_PARTITIONS: partitions, CONF_CODE: code, CONF_PANIC: panic_type},
config,
)
)
hass.async_create_task(
async_load_platform(
hass,
"sensor",
"envisalink",
{CONF_PARTITIONS: partitions, CONF_CODE: code},
config,
)
)
if zones:
hass.async_create_task(
async_load_platform(
hass, "binary_sensor", "envisalink", {CONF_ZONES: zones}, config
)
)
hass.services.async_register(
DOMAIN, SERVICE_CUSTOM_FUNCTION, handle_custom_function, schema=SERVICE_SCHEMA
)
return True
class EnvisalinkDevice(Entity):
"""Representation of an Envisalink device."""
def __init__(self, name, info, controller):
"""Initialize the device."""
self._controller = controller
self._info = info
self._name = name
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
|
from typing import Any, Union, Callable, Optional, Sequence, Collection, Text
from typing import Tuple, Set, List, Type
import numpy as np
from tensornetwork.network_components import AbstractNode, Node, Edge, connect
from tensornetwork.network_components import CopyNode
from tensornetwork.network_operations import get_all_nodes, copy, reachable
from tensornetwork.network_operations import get_subgraph_dangling, remove_node
from tensornetwork.contractors import greedy
Tensor = Any
def quantum_constructor(
out_edges: Sequence[Edge],
in_edges: Sequence[Edge],
ref_nodes: Optional[Collection[AbstractNode]] = None,
ignore_edges: Optional[Collection[Edge]] = None) -> "QuOperator":
"""Constructs an appropriately specialized QuOperator.
If there are no edges, creates a QuScalar. If the are only output (input)
edges, creates a QuVector (QuAdjointVector). Otherwise creates a
QuOperator.
Args:
out_edges: output edges.
in_edges: in edges.
ref_nodes: reference nodes for the tensor network (needed if there is a
scalar component).
ignore_edges: edges to ignore when checking the dimensionality of the
tensor network.
Returns:
The object.
"""
if len(out_edges) == 0 and len(in_edges) == 0:
return QuScalar(ref_nodes, ignore_edges)
if len(out_edges) == 0:
return QuAdjointVector(in_edges, ref_nodes, ignore_edges)
if len(in_edges) == 0:
return QuVector(out_edges, ref_nodes, ignore_edges)
return QuOperator(out_edges, in_edges, ref_nodes, ignore_edges)
def identity(space: Sequence[int],
backend: Optional[Text] = None,
dtype: Type[np.number] = np.float64) -> "QuOperator":
"""Construct a `QuOperator` representing the identity on a given space.
Internally, this is done by constructing `CopyNode`s for each edge, with
dimension according to `space`.
Args:
space: A sequence of integers for the dimensions of the tensor product
factors of the space (the edges in the tensor network).
backend: Optionally specify the backend to use for computations.
dtype: The data type (for conversion to dense).
Returns:
The desired identity operator.
"""
nodes = [CopyNode(2, d, backend=backend, dtype=dtype) for d in space]
out_edges = [n[0] for n in nodes]
in_edges = [n[1] for n in nodes]
return quantum_constructor(out_edges, in_edges)
def check_spaces(edges_1: Sequence[Edge], edges_2: Sequence[Edge]) -> None:
"""Check the vector spaces represented by two lists of edges are compatible.
The number of edges must be the same and the dimensions of each pair of edges
must match. Otherwise, an exception is raised.
Args:
edges_1: List of edges representing a many-body Hilbert space.
edges_2: List of edges representing a many-body Hilbert space.
"""
if len(edges_1) != len(edges_2):
raise ValueError("Hilbert-space mismatch: Cannot connect {} subsystems "
"with {} subsystems.".format(len(edges_1), len(edges_2)))
for (i, (e1, e2)) in enumerate(zip(edges_1, edges_2)):
if e1.dimension != e2.dimension:
raise ValueError("Hilbert-space mismatch on subsystems {}: Input "
"dimension {} != output dimension {}.".format(
i, e1.dimension, e2.dimension))
def eliminate_identities(nodes: Collection[AbstractNode]) -> Tuple[dict, dict]:
"""Eliminates any connected CopyNodes that are identity matrices.
This will modify the network represented by `nodes`.
Only identities that are connected to other nodes are eliminated.
Args:
nodes: Collection of nodes to search.
Returns:
nodes_dict: Dictionary mapping remaining Nodes to any replacements.
dangling_edges_dict: Dictionary specifying all dangling-edge replacements.
"""
nodes_dict = {}
dangling_edges_dict = {}
for n in nodes:
if isinstance(
n, CopyNode) and n.get_rank() == 2 and not (n[0].is_dangling() and
n[1].is_dangling()):
old_edges = [n[0], n[1]]
_, new_edges = remove_node(n)
if 0 in new_edges and 1 in new_edges:
e = connect(new_edges[0], new_edges[1])
elif 0 in new_edges: # 1 was dangling
dangling_edges_dict[old_edges[1]] = new_edges[0]
elif 1 in new_edges: # 0 was dangling
dangling_edges_dict[old_edges[0]] = new_edges[1]
else:
# Trace of identity, so replace with a scalar node!
d = n.get_dimension(0)
# NOTE: Assume CopyNodes have numpy dtypes.
nodes_dict[n] = Node(np.array(d, dtype=n.dtype), backend=n.backend)
else:
for e in n.get_all_dangling():
dangling_edges_dict[e] = e
nodes_dict[n] = n
return nodes_dict, dangling_edges_dict
class QuOperator():
"""Represents a linear operator via a tensor network.
To interpret a tensor network as a linear operator, some of the dangling
edges must be designated as `out_edges` (output edges) and the rest as
`in_edges` (input edges).
Considered as a matrix, the `out_edges` represent the row index and the
`in_edges` represent the column index.
The (right) action of the operator on another then consists of connecting
the `in_edges` of the first operator to the `out_edges` of the second.
Can be used to do simple linear algebra with tensor networks.
"""
__array_priority__ = 100.0 # for correct __rmul__ with scalar ndarrays
def __init__(self,
out_edges: Sequence[Edge],
in_edges: Sequence[Edge],
ref_nodes: Optional[Collection[AbstractNode]] = None,
ignore_edges: Optional[Collection[Edge]] = None) -> None:
"""Creates a new `QuOperator` from a tensor network.
This encapsulates an existing tensor network, interpreting it as a linear
operator.
The network is checked for consistency: All dangling edges must either be
in `out_edges`, `in_edges`, or `ignore_edges`.
Args:
out_edges: The edges of the network to be used as the output edges.
in_edges: The edges of the network to be used as the input edges.
ref_nodes: Nodes used to refer to parts of the tensor network that are
not connected to any input or output edges (for example: a scalar
factor).
ignore_edges: Optional collection of dangling edges to ignore when
performing consistency checks.
"""
# TODO: Decide whether the user must also supply all nodes involved.
# This would enable extra error checking and is probably clearer
# than `ref_nodes`.
if len(in_edges) == 0 and len(out_edges) == 0 and not ref_nodes:
raise ValueError("At least one reference node is required to specify a "
"scalar. None provided!")
self.out_edges = list(out_edges)
self.in_edges = list(in_edges)
self.ignore_edges = set(ignore_edges) if ignore_edges else set()
self.ref_nodes = set(ref_nodes) if ref_nodes else set()
self.check_network()
@classmethod
def from_tensor(cls,
tensor: Tensor,
out_axes: Sequence[int],
in_axes: Sequence[int],
backend: Optional[Text] = None) -> "QuOperator":
"""Construct a `QuOperator` directly from a single tensor.
This first wraps the tensor in a `Node`, then constructs the `QuOperator`
from that `Node`.
Args:
tensor: The tensor.
out_axes: The axis indices of `tensor` to use as `out_edges`.
in_axes: The axis indices of `tensor` to use as `in_edges`.
backend: Optionally specify the backend to use for computations.
Returns:
The new operator.
"""
n = Node(tensor, backend=backend)
out_edges = [n[i] for i in out_axes]
in_edges = [n[i] for i in in_axes]
return cls(out_edges, in_edges, set([n]))
@property
def nodes(self) -> Set[AbstractNode]:
"""All tensor-network nodes involved in the operator."""
return reachable(
get_all_nodes(self.out_edges + self.in_edges) | self.ref_nodes)
@property
def in_space(self) -> List[int]:
return [e.dimension for e in self.in_edges]
@property
def out_space(self) -> List[int]:
return [e.dimension for e in self.out_edges]
def is_scalar(self) -> bool:
return len(self.out_edges) == 0 and len(self.in_edges) == 0
def is_vector(self) -> bool:
return len(self.out_edges) > 0 and len(self.in_edges) == 0
def is_adjoint_vector(self) -> bool:
return len(self.out_edges) == 0 and len(self.in_edges) > 0
def check_network(self) -> None:
"""Check that the network has the expected dimensionality.
This checks that all input and output edges are dangling and that
there are no other dangling edges (except any specified in
`ignore_edges`). If not, an exception is raised.
"""
for (i, e) in enumerate(self.out_edges):
if not e.is_dangling():
raise ValueError("Output edge {} is not dangling!".format(i))
for (i, e) in enumerate(self.in_edges):
if not e.is_dangling():
raise ValueError("Input edge {} is not dangling!".format(i))
for e in self.ignore_edges:
if not e.is_dangling():
raise ValueError("ignore_edges contains non-dangling edge: {}".format(
str(e)))
known_edges = set(self.in_edges) | set(self.out_edges) | self.ignore_edges
all_dangling_edges = get_subgraph_dangling(self.nodes)
if known_edges != all_dangling_edges:
raise ValueError("The network includes unexpected dangling edges (that "
"are not members of ignore_edges).")
def adjoint(self) -> "QuOperator":
"""The adjoint of the operator.
This creates a new `QuOperator` with complex-conjugate copies of all
tensors in the network and with the input and output edges switched.
"""
nodes_dict, edge_dict = copy(self.nodes, True)
out_edges = [edge_dict[e] for e in self.in_edges]
in_edges = [edge_dict[e] for e in self.out_edges]
ref_nodes = [nodes_dict[n] for n in self.ref_nodes]
ignore_edges = [edge_dict[e] for e in self.ignore_edges]
return quantum_constructor(out_edges, in_edges, ref_nodes, ignore_edges)
def trace(self) -> "QuOperator":
"""The trace of the operator."""
return self.partial_trace(range(len(self.in_edges)))
def norm(self) -> "QuOperator":
"""The norm of the operator.
This is the 2-norm (also known as the Frobenius or Hilbert-Schmidt
norm).
"""
return (self.adjoint() @ self).trace()
def partial_trace(self,
subsystems_to_trace_out: Collection[int]) -> "QuOperator":
"""The partial trace of the operator.
Subsystems to trace out are supplied as indices, so that dangling edges
are connected to eachother as:
`out_edges[i] ^ in_edges[i] for i in subsystems_to_trace_out`
This does not modify the original network. The original ordering of the
remaining subsystems is maintained.
Args:
subsystems_to_trace_out: Indices of subsystems to trace out.
Returns:
A new QuOperator or QuScalar representing the result.
"""
out_edges_trace = [self.out_edges[i] for i in subsystems_to_trace_out]
in_edges_trace = [self.in_edges[i] for i in subsystems_to_trace_out]
check_spaces(in_edges_trace, out_edges_trace)
nodes_dict, edge_dict = copy(self.nodes, False)
for (e1, e2) in zip(out_edges_trace, in_edges_trace):
edge_dict[e1] = edge_dict[e1] ^ edge_dict[e2]
# get leftover edges in the original order
out_edges_trace = set(out_edges_trace)
in_edges_trace = set(in_edges_trace)
out_edges = [
edge_dict[e] for e in self.out_edges if e not in out_edges_trace
]
in_edges = [edge_dict[e] for e in self.in_edges if e not in in_edges_trace]
ref_nodes = [n for _, n in nodes_dict.items()]
ignore_edges = [edge_dict[e] for e in self.ignore_edges]
return quantum_constructor(out_edges, in_edges, ref_nodes, ignore_edges)
def __matmul__(self, other: "QuOperator") -> "QuOperator":
"""The action of this operator on another.
Given `QuOperator`s `A` and `B`, produces a new `QuOperator` for `A @ B`,
where `A @ B` means: "the action of A, as a linear operator, on B".
Under the hood, this produces copies of the tensor networks defining `A`
and `B` and then connects the copies by hooking up the `in_edges` of
`A.copy()` to the `out_edges` of `B.copy()`.
"""
check_spaces(self.in_edges, other.out_edges)
# Copy all nodes involved in the two operators.
# We must do this separately for self and other, in case self and other
# are defined via the same network components (e.g. if self === other).
nodes_dict1, edges_dict1 = copy(self.nodes, False)
nodes_dict2, edges_dict2 = copy(other.nodes, False)
# connect edges to create network for the result
for (e1, e2) in zip(self.in_edges, other.out_edges):
_ = edges_dict1[e1] ^ edges_dict2[e2]
in_edges = [edges_dict2[e] for e in other.in_edges]
out_edges = [edges_dict1[e] for e in self.out_edges]
ref_nodes = ([n for _, n in nodes_dict1.items()] +
[n for _, n in nodes_dict2.items()])
ignore_edges = ([edges_dict1[e] for e in self.ignore_edges] +
[edges_dict2[e] for e in other.ignore_edges])
return quantum_constructor(out_edges, in_edges, ref_nodes, ignore_edges)
def __mul__(self, other: Union["QuOperator", AbstractNode,
Tensor]) -> "QuOperator":
"""Scalar multiplication of operators.
Given two operators `A` and `B`, one of the which is a scalar (it has no
input or output edges), `A * B` produces a new operator representing the
scalar multiplication of `A` and `B`.
For convenience, one of `A` or `B` may be a number or scalar-valued tensor
or `Node` (it will automatically be wrapped in a `QuScalar`).
Note: This is a special case of `tensor_product()`.
"""
if not isinstance(other, QuOperator):
if isinstance(other, AbstractNode):
node = other
else:
node = Node(other, backend=self.nodes.pop().backend)
if node.shape:
raise ValueError("Cannot perform elementwise multiplication by a "
"non-scalar tensor.")
other = QuScalar([node])
if self.is_scalar() or other.is_scalar():
return self.tensor_product(other)
raise ValueError("Elementwise multiplication is only supported if at "
"least one of the arguments is a scalar.")
def __rmul__(
self, other: Union["QuOperator", AbstractNode, Tensor]) -> "QuOperator":
"""Scalar multiplication of operators.
See `.__mul__()`.
"""
return self.__mul__(other)
def tensor_product(self, other: "QuOperator") -> "QuOperator":
"""Tensor product with another operator.
Given two operators `A` and `B`, produces a new operator `AB` representing
`A` ⊗ `B`. The `out_edges` (`in_edges`) of `AB` is simply the
concatenation of the `out_edges` (`in_edges`) of `A.copy()` with that of
`B.copy()`:
`new_out_edges = [*out_edges_A_copy, *out_edges_B_copy]`
`new_in_edges = [*in_edges_A_copy, *in_edges_B_copy]`
Args:
other: The other operator (`B`).
Returns:
The result (`AB`).
"""
nodes_dict1, edges_dict1 = copy(self.nodes, False)
nodes_dict2, edges_dict2 = copy(other.nodes, False)
in_edges = ([edges_dict1[e] for e in self.in_edges] +
[edges_dict2[e] for e in other.in_edges])
out_edges = ([edges_dict1[e] for e in self.out_edges] +
[edges_dict2[e] for e in other.out_edges])
ref_nodes = ([n for _, n in nodes_dict1.items()] +
[n for _, n in nodes_dict2.items()])
ignore_edges = ([edges_dict1[e] for e in self.ignore_edges] +
[edges_dict2[e] for e in other.ignore_edges])
return quantum_constructor(out_edges, in_edges, ref_nodes, ignore_edges)
def contract(
self,
contractor: Callable = greedy,
final_edge_order: Optional[Sequence[Edge]] = None) -> "QuOperator":
"""Contract the tensor network in place.
This modifies the tensor network representation of the operator (or vector,
or scalar), reducing it to a single tensor, without changing the value.
Args:
contractor: A function that performs the contraction. Defaults to
`greedy`, which uses the greedy algorithm from `opt_einsum` to
determine a contraction order.
final_edge_order: Manually specify the axis ordering of the final tensor.
Returns:
The present object.
"""
nodes_dict, dangling_edges_dict = eliminate_identities(self.nodes)
self.in_edges = [dangling_edges_dict[e] for e in self.in_edges]
self.out_edges = [dangling_edges_dict[e] for e in self.out_edges]
self.ignore_edges = set(dangling_edges_dict[e] for e in self.ignore_edges)
self.ref_nodes = set(
nodes_dict[n] for n in self.ref_nodes if n in nodes_dict)
self.check_network()
if final_edge_order:
final_edge_order = [dangling_edges_dict[e] for e in final_edge_order]
self.ref_nodes = set(
[contractor(self.nodes, output_edge_order=final_edge_order)])
else:
self.ref_nodes = set([contractor(self.nodes, ignore_edge_order=True)])
return self
def eval(self,
contractor: Callable = greedy,
final_edge_order: Optional[Sequence[Edge]] = None) -> Tensor:
"""Contracts the tensor network in place and returns the final tensor.
Note that this modifies the tensor network representing the operator.
The default ordering for the axes of the final tensor is:
`*out_edges, *in_edges`.
If there are any "ignored" edges, their axes come first:
`*ignored_edges, *out_edges, *in_edges`.
Args:
contractor: A function that performs the contraction. Defaults to
`greedy`, which uses the greedy algorithm from `opt_einsum` to
determine a contraction order.
final_edge_order: Manually specify the axis ordering of the final tensor.
The default ordering is determined by `out_edges` and `in_edges` (see
above).
Returns:
The final tensor representing the operator.
"""
if not final_edge_order:
final_edge_order = (
list(self.ignore_edges) + self.out_edges + self.in_edges)
self.contract(contractor, final_edge_order)
nodes = self.nodes
if len(nodes) != 1:
raise ValueError("Node count '{}' > 1 after contraction!".format(
len(nodes)))
return list(nodes)[0].tensor
class QuVector(QuOperator):
"""Represents a (column) vector via a tensor network."""
def __init__(self,
subsystem_edges: Sequence[Edge],
ref_nodes: Optional[Collection[AbstractNode]] = None,
ignore_edges: Optional[Collection[Edge]] = None) -> None:
"""Constructs a new `QuVector` from a tensor network.
This encapsulates an existing tensor network, interpreting it as a (column)
vector.
Args:
subsystem_edges: The edges of the network to be used as the output edges.
ref_nodes: Nodes used to refer to parts of the tensor network that are
not connected to any input or output edges (for example: a scalar
factor).
ignore_edges: Optional collection of edges to ignore when performing
consistency checks.
"""
super().__init__(subsystem_edges, [], ref_nodes, ignore_edges)
@classmethod
def from_tensor(cls,
tensor: Tensor,
subsystem_axes: Optional[Sequence[int]] = None,
backend: Optional[Text] = None) -> "QuVector":
"""Construct a `QuVector` directly from a single tensor.
This first wraps the tensor in a `Node`, then constructs the `QuVector`
from that `Node`.
Args:
tensor: The tensor.
subsystem_axes: Sequence of integer indices specifying the order in which
to interpret the axes as subsystems (output edges). If not specified,
the axes are taken in ascending order.
backend: Optionally specify the backend to use for computations.
Returns:
The new operator.
"""
n = Node(tensor, backend=backend)
if subsystem_axes is not None:
subsystem_edges = [n[i] for i in subsystem_axes]
else:
subsystem_edges = n.get_all_edges()
return cls(subsystem_edges)
@property
def subsystem_edges(self) -> List[Edge]:
return self.out_edges
@property
def space(self) -> List[int]:
return self.out_space
def projector(self) -> "QuOperator":
return self @ self.adjoint()
def reduced_density(self,
subsystems_to_trace_out: Collection[int]) -> "QuOperator":
rho = self.projector()
return rho.partial_trace(subsystems_to_trace_out)
class QuAdjointVector(QuOperator):
"""Represents an adjoint (row) vector via a tensor network."""
def __init__(self,
subsystem_edges: Sequence[Edge],
ref_nodes: Optional[Collection[AbstractNode]] = None,
ignore_edges: Optional[Collection[Edge]] = None) -> None:
"""Constructs a new `QuAdjointVector` from a tensor network.
This encapsulates an existing tensor network, interpreting it as an adjoint
vector (row vector).
Args:
subsystem_edges: The edges of the network to be used as the input edges.
ref_nodes: Nodes used to refer to parts of the tensor network that are
not connected to any input or output edges (for example: a scalar
factor).
ignore_edges: Optional collection of edges to ignore when performing
consistency checks.
"""
super().__init__([], subsystem_edges, ref_nodes, ignore_edges)
@classmethod
def from_tensor(cls,
tensor: Tensor,
subsystem_axes: Optional[Sequence[int]] = None,
backend: Optional[Text] = None) -> "QuAdjointVector":
"""Construct a `QuAdjointVector` directly from a single tensor.
This first wraps the tensor in a `Node`, then constructs the
`QuAdjointVector` from that `Node`.
Args:
tensor: The tensor.
subsystem_axes: Sequence of integer indices specifying the order in which
to interpret the axes as subsystems (input edges). If not specified,
the axes are taken in ascending order.
backend: Optionally specify the backend to use for computations.
Returns:
The new operator.
"""
n = Node(tensor, backend=backend)
if subsystem_axes is not None:
subsystem_edges = [n[i] for i in subsystem_axes]
else:
subsystem_edges = n.get_all_edges()
return cls(subsystem_edges)
@property
def subsystem_edges(self) -> List[Edge]:
return self.in_edges
@property
def space(self) -> List[int]:
return self.in_space
def projector(self) -> "QuOperator":
return self.adjoint() @ self
def reduced_density(self,
subsystems_to_trace_out: Collection[int]) -> "QuOperator":
rho = self.projector()
return rho.partial_trace(subsystems_to_trace_out)
class QuScalar(QuOperator):
"""Represents a scalar via a tensor network."""
def __init__(self,
ref_nodes: Collection[AbstractNode],
ignore_edges: Optional[Collection[Edge]] = None) -> None:
"""Constructs a new `QuScalar` from a tensor network.
This encapsulates an existing tensor network, interpreting it as a scalar.
Args:
ref_nodes: Nodes used to refer to the tensor network (need not be
exhaustive - one node from each disconnected subnetwork is sufficient).
ignore_edges: Optional collection of edges to ignore when performing
consistency checks.
"""
super().__init__([], [], ref_nodes, ignore_edges)
@classmethod
def from_tensor(cls,
tensor: Tensor,
backend: Optional[Text] = None) -> "QuScalar":
"""Construct a `QuScalar` directly from a single tensor.
This first wraps the tensor in a `Node`, then constructs the
`QuScalar` from that `Node`.
Args:
tensor: The tensor.
backend: Optionally specify the backend to use for computations.
Returns:
The new operator.
"""
n = Node(tensor, backend=backend)
return cls(set([n]))
|
import re
import collections
from string import Template
from itertools import product
import numpy as np
from joblib import Parallel, delayed
from pyparsing import (
Word,
alphanums,
Suppress,
Optional,
CharsNotIn,
Group,
nums,
ZeroOrMore,
OneOrMore,
cppStyleComment,
printables,
)
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
class BIFReader(object):
"""
Base class for reading network file in bif format
"""
def __init__(self, path=None, string=None, include_properties=False, n_jobs=-1):
"""
Initializes a BIFReader object.
Parameters
----------
path : file or str
File of bif data
string : str
String of bif data
include_properties: boolean
If True, gets the properties tag from the file and stores in graph properties.
n_jobs: int (default: -1)
Number of jobs to run in parallel. `-1` means use all processors.
Examples
--------
# dog-problem.bif file is present at
# http://www.cs.cmu.edu/~javabayes/Examples/DogProblem/dog-problem.bif
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader = BIFReader("bif_test.bif")
<pgmpy.readwrite.BIF.BIFReader object at 0x7f2375621cf8>
"""
if path:
with open(path, "r") as network:
self.network = network.read()
elif string:
self.network = string
else:
raise ValueError("Must specify either path or string")
self.n_jobs = n_jobs
self.include_properties = include_properties
if '"' in self.network:
# Replacing quotes by spaces to remove case sensitivity like:
# "Dog-Problem" and Dog-problem
# or "true""false" and "true" "false" and true false
self.network = self.network.replace('"', " ")
if "/*" in self.network or "//" in self.network:
self.network = cppStyleComment.suppress().transformString(
self.network
) # removing comments from the file
(
self.name_expr,
self.state_expr,
self.property_expr,
) = self.get_variable_grammar()
self.probability_expr, self.cpd_expr = self.get_probability_grammar()
self.network_name = self.get_network_name()
self.variable_names = self.get_variables()
self.variable_states = self.get_states()
if self.include_properties:
self.variable_properties = self.get_property()
self.variable_parents = self.get_parents()
self.variable_cpds = self.get_values()
self.variable_edges = self.get_edges()
def get_variable_grammar(self):
"""
A method that returns variable grammar
"""
# Defining a expression for valid word
word_expr = Word(alphanums + "_" + "-")
word_expr2 = Word(initChars=printables, excludeChars=["{", "}", ",", " "])
name_expr = Suppress("variable") + word_expr + Suppress("{")
state_expr = ZeroOrMore(word_expr2 + Optional(Suppress(",")))
# Defining a variable state expression
variable_state_expr = (
Suppress("type")
+ Suppress(word_expr)
+ Suppress("[")
+ Suppress(Word(nums))
+ Suppress("]")
+ Suppress("{")
+ Group(state_expr)
+ Suppress("}")
+ Suppress(";")
)
# variable states is of the form type description [args] { val1, val2 }; (comma may or may not be present)
property_expr = (
Suppress("property") + CharsNotIn(";") + Suppress(";")
) # Creating a expr to find property
return name_expr, variable_state_expr, property_expr
def get_probability_grammar(self):
"""
A method that returns probability grammar
"""
# Creating valid word expression for probability, it is of the format
# wor1 | var2 , var3 or var1 var2 var3 or simply var
word_expr = (
Word(alphanums + "-" + "_")
+ Suppress(Optional("|"))
+ Suppress(Optional(","))
)
word_expr2 = Word(
initChars=printables, excludeChars=[",", ")", " ", "("]
) + Suppress(Optional(","))
# creating an expression for valid numbers, of the format
# 1.00 or 1 or 1.00. 0.00 or 9.8e-5 etc
num_expr = Word(nums + "-" + "+" + "e" + "E" + ".") + Suppress(Optional(","))
probability_expr = (
Suppress("probability")
+ Suppress("(")
+ OneOrMore(word_expr)
+ Suppress(")")
)
optional_expr = Suppress("(") + OneOrMore(word_expr2) + Suppress(")")
probab_attributes = optional_expr | Suppress("table")
cpd_expr = probab_attributes + OneOrMore(num_expr)
return probability_expr, cpd_expr
def variable_block(self):
start = re.finditer("variable", self.network)
for index in start:
end = self.network.find("}\n", index.start())
yield self.network[index.start() : end]
def probability_block(self):
start = re.finditer("probability", self.network)
for index in start:
end = self.network.find("}\n", index.start())
yield self.network[index.start() : end]
def get_network_name(self):
"""
Returns the name of the network
Example
---------------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIF.BifReader("bif_test.bif")
>>> reader.network_name()
'Dog-Problem'
"""
start = self.network.find("network")
end = self.network.find("}\n", start)
# Creating a network attribute
network_attribute = Suppress("network") + Word(alphanums + "_" + "-") + "{"
network_name = network_attribute.searchString(self.network[start:end])[0][0]
return network_name
def get_variables(self):
"""
Returns list of variables of the network
Example
-------------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_variables()
['light-on','bowel_problem','dog-out','hear-bark','family-out']
"""
variable_names = []
for block in self.variable_block():
name = self.name_expr.searchString(block)[0][0]
variable_names.append(name)
return variable_names
def get_states(self):
"""
Returns the states of variables present in the network
Example
-----------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_states()
{'bowel-problem': ['true','false'],
'dog-out': ['true','false'],
'family-out': ['true','false'],
'hear-bark': ['true','false'],
'light-on': ['true','false']}
"""
variable_states = {}
for block in self.variable_block():
name = self.name_expr.searchString(block)[0][0]
variable_states[name] = list(self.state_expr.searchString(block)[0][0])
return variable_states
def get_property(self):
"""
Returns the property of the variable
Example
-------------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_property()
{'bowel-problem': ['position = (335, 99)'],
'dog-out': ['position = (300, 195)'],
'family-out': ['position = (257, 99)'],
'hear-bark': ['position = (296, 268)'],
'light-on': ['position = (218, 195)']}
"""
variable_properties = {}
for block in self.variable_block():
name = self.name_expr.searchString(block)[0][0]
properties = self.property_expr.searchString(block)
variable_properties[name] = [y.strip() for x in properties for y in x]
return variable_properties
def get_parents(self):
"""
Returns the parents of the variables present in the network
Example
--------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_parents()
{'bowel-problem': [],
'dog-out': ['family-out', 'bowel-problem'],
'family-out': [],
'hear-bark': ['dog-out'],
'light-on': ['family-out']}
"""
variable_parents = {}
for block in self.probability_block():
names = self.probability_expr.searchString(block.split("\n")[0])[0]
variable_parents[names[0]] = names[1:]
return variable_parents
def _get_values_from_block(self, block):
names = self.probability_expr.searchString(block)
var_name, parents = names[0][0], names[0][1:]
cpds = self.cpd_expr.searchString(block)
# Check if the block is a table.
if bool(re.search(".*\\n[ ]*table .*\n.*", block)):
arr = np.array([float(j) for i in cpds for j in i])
arr = arr.reshape(
(
len(self.variable_states[var_name]),
arr.size // len(self.variable_states[var_name]),
)
)
else:
arr_length = np.prod([len(self.variable_states[var]) for var in parents])
arr = np.zeros((len(self.variable_states[var_name]), arr_length))
values_dict = {}
for prob_line in cpds:
states = prob_line[: len(parents)]
vals = [float(i) for i in prob_line[len(parents) :]]
values_dict[tuple(states)] = vals
for index, combination in enumerate(
product(*[self.variable_states[var] for var in parents])
):
arr[:, index] = values_dict[combination]
return var_name, arr
def get_values(self):
"""
Returns the CPD of the variables present in the network
Example
--------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_values()
{'bowel-problem': np.array([[0.01],
[0.99]]),
'dog-out': np.array([[0.99, 0.97, 0.9, 0.3],
[0.01, 0.03, 0.1, 0.7]]),
'family-out': np.array([[0.15],
[0.85]]),
'hear-bark': np.array([[0.7, 0.01],
[0.3, 0.99]]),
'light-on': np.array([[0.6, 0.05],
[0.4, 0.95]])}
"""
cpd_values = Parallel(n_jobs=self.n_jobs)(
delayed(self._get_values_from_block)(block)
for block in self.probability_block()
)
variable_cpds = {}
for var_name, arr in cpd_values:
variable_cpds[var_name] = arr
return variable_cpds
def get_edges(self):
"""
Returns the edges of the network
Example
--------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_edges()
[['family-out', 'light-on'],
['family-out', 'dog-out'],
['bowel-problem', 'dog-out'],
['dog-out', 'hear-bark']]
"""
edges = [
[value, key]
for key in self.variable_parents.keys()
for value in self.variable_parents[key]
]
return edges
def get_model(self, state_name_type=str):
"""
Returns the Bayesian Model read from the file/str.
Parameters
----------
state_name_type: int, str or bool (default: str)
The data type to which to convert the state names of the variables.
Example
----------
>>> from pgmpy.readwrite import BIFReader
>>> reader = BIFReader("bif_test.bif")
>>> reader.get_model()
<pgmpy.models.BayesianModel.BayesianModel object at 0x7f20af154320>
"""
try:
model = BayesianModel()
model.add_nodes_from(self.variable_names)
model.add_edges_from(self.variable_edges)
model.name = self.network_name
tabular_cpds = []
for var in sorted(self.variable_cpds.keys()):
values = self.variable_cpds[var]
sn = {
p_var: list(map(state_name_type, self.variable_states[p_var]))
for p_var in self.variable_parents[var]
}
sn[var] = list(map(state_name_type, self.variable_states[var]))
cpd = TabularCPD(
var,
len(self.variable_states[var]),
values,
evidence=self.variable_parents[var],
evidence_card=[
len(self.variable_states[evidence_var])
for evidence_var in self.variable_parents[var]
],
state_names=sn,
)
tabular_cpds.append(cpd)
model.add_cpds(*tabular_cpds)
if self.include_properties:
for node, properties in self.variable_properties.items():
for prop in properties:
prop_name, prop_value = map(
lambda t: t.strip(), prop.split("=")
)
model.nodes[node][prop_name] = prop_value
return model
except AttributeError:
raise AttributeError(
"First get states of variables, edges, parents and network name"
)
class BIFWriter(object):
"""
Base class for writing BIF network file format
"""
def __init__(self, model):
"""
Initialise a BIFWriter Object
Parameters
----------
model: BayesianModel Instance
Examples
---------
>>> from pgmpy.readwrite import BIFWriter
>>> writer = BIFWriter(model)
>>> writer
<writer_BIF.BIFWriter at 0x7f05e5ea27b8>
"""
if not isinstance(model, BayesianModel):
raise TypeError("model must be an instance of BayesianModel")
self.model = model
if not self.model.name:
self.network_name = "unknown"
else:
self.network_name = self.model.name
self.variable_states = self.get_states()
self.property_tag = self.get_properties()
self.variable_parents = self.get_parents()
self.tables = self.get_cpds()
def BIF_templates(self):
"""
Create template for writing in BIF format
"""
network_template = Template("network $name {\n}\n")
# property tag may or may not be present in model,and since no of properties
# can be more than one , will replace them accoriding to format otherwise null
variable_template = Template(
"""variable $name {
type discrete [ $no_of_states ] { $states };
$properties}\n"""
)
property_template = Template(" property $prop ;\n")
# $variable_ here is name of variable, used underscore for clarity
probability_template = Template(
"""probability ( $variable_$seprator_$parents ) {
table $values ;
}\n"""
)
return (
network_template,
variable_template,
property_template,
probability_template,
)
def __str__(self):
"""
Returns the BIF format as string
"""
(
network_template,
variable_template,
property_template,
probability_template,
) = self.BIF_templates()
network = ""
network += network_template.substitute(name=self.network_name)
variables = self.model.nodes()
for var in sorted(variables):
no_of_states = str(len(self.variable_states[var]))
states = ", ".join(self.variable_states[var])
if not self.property_tag[var]:
properties = ""
else:
properties = ""
for prop_val in self.property_tag[var]:
properties += property_template.substitute(prop=prop_val)
network += variable_template.substitute(
name=var,
no_of_states=no_of_states,
states=states,
properties=properties,
)
for var in sorted(variables):
if not self.variable_parents[var]:
parents = ""
seprator = ""
else:
parents = ", ".join(self.variable_parents[var])
seprator = " | "
cpd = ", ".join(map(str, self.tables[var]))
network += probability_template.substitute(
variable_=var, seprator_=seprator, parents=parents, values=cpd
)
return network
def get_variables(self):
"""
Add variables to BIF
Returns
-------
list: a list containing names of variable
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.get_variables()
['bowel-problem', 'family-out', 'hear-bark', 'light-on', 'dog-out']
"""
variables = self.model.nodes()
return variables
def get_states(self):
"""
Add states to variable of BIF
Returns
-------
dict: dict of type {variable: a list of states}
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.get_states()
{'bowel-problem': ['bowel-problem_0', 'bowel-problem_1'],
'dog-out': ['dog-out_0', 'dog-out_1'],
'family-out': ['family-out_0', 'family-out_1'],
'hear-bark': ['hear-bark_0', 'hear-bark_1'],
'light-on': ['light-on_0', 'light-on_1']}
"""
variable_states = {}
cpds = self.model.get_cpds()
for cpd in cpds:
variable = cpd.variable
variable_states[variable] = []
for state in cpd.state_names[variable]:
variable_states[variable].append(str(state))
return variable_states
def get_properties(self):
"""
Add property to variables in BIF
Returns
-------
dict: dict of type {variable: list of properties }
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.get_properties()
{'bowel-problem': ['position = (335, 99)'],
'dog-out': ['position = (300, 195)'],
'family-out': ['position = (257, 99)'],
'hear-bark': ['position = (296, 268)'],
'light-on': ['position = (218, 195)']}
"""
variables = self.model.nodes()
property_tag = {}
for variable in sorted(variables):
properties = self.model.nodes[variable]
properties = collections.OrderedDict(sorted(properties.items()))
property_tag[variable] = []
for prop, val in properties.items():
property_tag[variable].append(str(prop) + " = " + str(val))
return property_tag
def get_parents(self):
"""
Add the parents to BIF
Returns
-------
dict: dict of type {variable: a list of parents}
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.get_parents()
{'bowel-problem': [],
'dog-out': ['bowel-problem', 'family-out'],
'family-out': [],
'hear-bark': ['dog-out'],
'light-on': ['family-out']}
"""
cpds = self.model.get_cpds()
variable_parents = {}
for cpd in cpds:
variable_parents[cpd.variable] = cpd.variables[1:]
return variable_parents
def get_cpds(self):
"""
Adds tables to BIF
Returns
-------
dict: dict of type {variable: array}
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.get_cpds()
{'bowel-problem': array([ 0.01, 0.99]),
'dog-out': array([ 0.99, 0.97, 0.9 , 0.3 , 0.01, 0.03, 0.1 , 0.7 ]),
'family-out': array([ 0.15, 0.85]),
'hear-bark': array([ 0.7 , 0.01, 0.3 , 0.99]),
'light-on': array([ 0.6 , 0.05, 0.4 , 0.95])}
"""
cpds = self.model.get_cpds()
tables = {}
for cpd in cpds:
tables[cpd.variable] = cpd.values.ravel()
return tables
def write_bif(self, filename):
"""
Writes the BIF data into a file
Parameters
----------
filename : Name of the file
Example
-------
>>> from pgmpy.readwrite import BIFReader, BIFWriter
>>> model = BIFReader('dog-problem.bif').get_model()
>>> writer = BIFWriter(model)
>>> writer.write_bif(filename='test_file.bif')
"""
writer = self.__str__()
with open(filename, "w") as fout:
fout.write(writer)
|
GRADE_CHART = {
100: 'A+',
95: 'A',
90: 'A',
85: 'A-',
80: 'B+',
75: 'B',
70: 'B',
65: 'B-',
60: 'C+',
55: 'C',
50: 'C',
45: 'C-',
40: 'D+',
35: 'D',
30: 'D',
25: 'D-',
20: 'F',
15: 'F',
10: 'F',
5: 'F',
0: 'F'
}
# See https://wiki.mozilla.org/Security/Standard_Levels for a definition of the risk levels
# We cannot make an accurate decision on HIGH and MAXIMUM risk likelihood indicators with the current checks,
# thus the likelihood indicator is currently at best (or worse) MEDIUM. Modifiers (A-A+B+B-, ... are normalized
# A,B, ...) in the calling function.
LIKELIHOOD_INDICATOR_CHART = {
'A': 'LOW',
'B': 'MEDIUM',
'C': 'MEDIUM',
'D': 'MEDIUM',
'F': 'MEDIUM'
}
# The minimum required score to receive extra credit
MINIMUM_SCORE_FOR_EXTRA_CREDIT = 90
GRADES = set(GRADE_CHART.values())
SCORE_TABLE = {
# contribute.json
'contribute-json-with-required-keys': {
'description': 'Contribute.json implemented with the required contact information',
'modifier': 0,
},
'contribute-json-only-required-on-mozilla-properties': {
'description': 'Contribute.json isn\'t required on websites that don\'t belong to Mozilla',
'modifier': 0,
},
'contribute-json-missing-required-keys': {
'description': 'Contribute.json exists, but is missing some of the required keys',
'modifier': -5,
},
'contribute-json-not-implemented': {
'description': 'Contribute.json file missing from root of website',
'modifier': -5,
},
'contribute-json-invalid-json': {
'description': 'Contribute.json file cannot be parsed',
'modifier': -10,
},
# CSP
'csp-implemented-with-no-unsafe-default-src-none': {
'description': 'Content Security Policy (CSP) implemented with default-src \'none\' and no \'unsafe\'',
'modifier': 10,
},
'csp-implemented-with-no-unsafe': {
'description': 'Content Security Policy (CSP) implemented without \'unsafe-inline\' or \'unsafe-eval\'',
'modifier': 5,
},
'csp-implemented-with-unsafe-inline-in-style-src-only': {
'description': ('Content Security Policy (CSP) implemented with unsafe sources inside style-src. '
'This includes \'unsafe-inline\', data: or overly broad sources such as https:.'),
'modifier': 0,
},
'csp-implemented-with-insecure-scheme-in-passive-content-only': {
'description': ('Content Security Policy (CSP) implemented, '
'but secure site allows images or media to be loaded over HTTP'),
'modifier': -10,
},
'csp-implemented-with-unsafe-eval': {
'description': 'Content Security Policy (CSP) implemented, but allows \'unsafe-eval\'',
'modifier': -10,
},
'csp-implemented-with-unsafe-inline': {
'description': ('Content Security Policy (CSP) implemented unsafely. '
'This includes \'unsafe-inline\' or data: inside script-src, '
'overly broad sources such as https: inside object-src or script-src, '
'or not restricting the sources for object-src or script-src.'),
'modifier': -20,
},
'csp-implemented-with-insecure-scheme': {
'description': ('Content Security Policy (CSP) implemented, '
'but secure site allows resources to be loaded over HTTP'),
'modifier': -20,
},
'csp-header-invalid': {
'description': 'Content Security Policy (CSP) header cannot be parsed successfully',
'modifier': -25,
},
'csp-not-implemented': {
'description': 'Content Security Policy (CSP) header not implemented',
'modifier': -25,
},
# Cookies
'cookies-secure-with-httponly-sessions-and-samesite': {
'description': ('All cookies use the Secure flag, session cookies use the HttpOnly flag, and cross-origin '
'restrictions are in place via the SameSite flag'),
'modifier': 5,
},
'cookies-secure-with-httponly-sessions': {
'description': 'All cookies use the Secure flag and all session cookies use the HttpOnly flag',
'modifier': 0,
},
'cookies-not-found': {
'description': 'No cookies detected',
'modifier': 0,
},
'cookies-without-secure-flag-but-protected-by-hsts': {
'description': 'Cookies set without using the Secure flag, but transmission over HTTP prevented by HSTS',
'modifier': -5,
},
'cookies-session-without-secure-flag-but-protected-by-hsts': {
'description': 'Session cookie set without the Secure flag, but transmission over HTTP prevented by HSTS',
'modifier': -10,
},
'cookies-without-secure-flag': {
'description': 'Cookies set without using the Secure flag or set over HTTP',
'modifier': -20,
},
'cookies-samesite-flag-invalid': {
'description': 'Cookies use SameSite flag, but set to something other than Strict or Lax',
'modifier': -20,
},
'cookies-anticsrf-without-samesite-flag': {
'description': 'Anti-CSRF tokens set without using the SameSite flag',
'modifier': -20,
},
'cookies-session-without-httponly-flag': {
'description': 'Session cookie set without using the HttpOnly flag',
'modifier': -30,
},
'cookies-session-without-secure-flag': {
'description': 'Session cookie set without using the Secure flag or set over HTTP',
'modifier': -40,
},
# Cross-origin resource sharing
'cross-origin-resource-sharing-not-implemented': {
'description': 'Content is not visible via cross-origin resource sharing (CORS) files or headers',
'modifier': 0,
},
'cross-origin-resource-sharing-implemented-with-public-access': {
'description': ('Public content is visible via cross-origin resource sharing (CORS) '
'Access-Control-Allow-Origin header'),
'modifier': 0,
},
'cross-origin-resource-sharing-implemented-with-restricted-access': {
'description': ('Content is visible via cross-origin resource sharing (CORS) files or headers, '
'but is restricted to specific domains'),
'modifier': 0,
},
'cross-origin-resource-sharing-implemented-with-universal-access': {
'description': 'Content is visible via cross-origin resource sharing (CORS) file or headers',
'modifier': -50,
},
# Public Key Pinning
'hpkp-preloaded': {
'description': 'Preloaded via the HTTP Public Key Pinning (HPKP) preloading process',
'modifier': 0,
},
'hpkp-implemented-max-age-at-least-fifteen-days': {
'description': 'HTTP Public Key Pinning (HPKP) header set to a minimum of 15 days (1296000)',
'modifier': 0,
},
'hpkp-implemented-max-age-less-than-fifteen-days': {
'description': 'HTTP Public Key Pinning (HPKP) header set to less than 15 days (1296000)',
'modifier': 0,
},
'hpkp-not-implemented': {
'description': 'HTTP Public Key Pinning (HPKP) header not implemented',
'modifier': 0,
},
'hpkp-not-implemented-no-https': {
'description': 'HTTP Public Key Pinning (HPKP) header can\'t be implemented without HTTPS',
'modifier': 0,
},
'hpkp-invalid-cert': {
'description': ('HTTP Public Key Pinning (HPKP) header cannot be set, '
'as site contains an invalid certificate chain'),
'modifier': 0,
},
'hpkp-header-invalid': {
'description': 'HTTP Public Key Pinning (HPKP) header cannot be recognized',
'modifier': -5,
},
# Redirection
'redirection-all-redirects-preloaded': {
'description': 'All hosts redirected to are in the HTTP Strict Transport Security (HSTS) preload list',
'modifier': 0,
},
'redirection-to-https': {
'description': 'Initial redirection is to HTTPS on same host, final destination is HTTPS',
'modifier': 0,
},
'redirection-not-needed-no-http': {
'description': 'Not able to connect via HTTP, so no redirection necessary',
'modifier': 0,
},
'redirection-off-host-from-http': {
'description': 'Initial redirection from HTTP to HTTPS is to a different host, preventing HSTS',
'modifier': -5,
},
'redirection-not-to-https-on-initial-redirection': {
'description': 'Redirects to HTTPS eventually, but initial redirection is to another HTTP URL',
'modifier': -10,
},
'redirection-not-to-https': {
'description': 'Redirects, but final destination is not an HTTPS URL',
'modifier': -20,
},
'redirection-missing': {
'description': 'Does not redirect to an HTTPS site',
'modifier': -20,
},
'redirection-invalid-cert': {
'description': 'Invalid certificate chain encountered during redirection',
'modifier': -20,
},
# Referrer Policy
'referrer-policy-private': {
'description': ('Referrer-Policy header set to "no-referrer", "same-origin", "strict-origin" or '
'"strict-origin-when-cross-origin"'),
'modifier': 5,
},
'referrer-policy-no-referrer-when-downgrade': {
'description': 'Referrer-Policy header set to "no-referrer-when-downgrade"',
'modifier': 0,
},
'referrer-policy-not-implemented': {
'description': 'Referrer-Policy header not implemented',
'modifier': 0,
},
'referrer-policy-unsafe': {
'description': 'Referrer-Policy header set unsafely to "origin", "origin-when-cross-origin", or "unsafe-url"',
'modifier': -5,
},
'referrer-policy-header-invalid': {
'description': 'Referrer-Policy header cannot be recognized',
'modifier': -5,
},
# Strict Transport Security (HSTS)
'hsts-preloaded': {
'description': 'Preloaded via the HTTP Strict Transport Security (HSTS) preloading process',
'modifier': 5,
},
'hsts-implemented-max-age-at-least-six-months': {
'description': 'HTTP Strict Transport Security (HSTS) header set to a minimum of six months (15768000)',
'modifier': 0,
},
'hsts-implemented-max-age-less-than-six-months': {
'description': 'HTTP Strict Transport Security (HSTS) header set to less than six months (15768000)',
'modifier': -10,
},
'hsts-not-implemented': {
'description': 'HTTP Strict Transport Security (HSTS) header not implemented',
'modifier': -20,
},
'hsts-header-invalid': {
'description': 'HTTP Strict Transport Security (HSTS) header cannot be recognized',
'modifier': -20,
},
'hsts-not-implemented-no-https': {
'description': 'HTTP Strict Transport Security (HSTS) header cannot be set for sites not available over HTTPS',
'modifier': -20,
},
'hsts-invalid-cert': {
'description': ('HTTP Strict Transport Security (HSTS) header cannot be set, '
'as site contains an invalid certificate chain'),
'modifier': -20,
},
# Subresource Integrity (SRI)
'sri-implemented-and-all-scripts-loaded-securely': {
'description': 'Subresource Integrity (SRI) is implemented and all scripts are loaded from a similar origin',
'modifier': 5,
},
'sri-implemented-and-external-scripts-loaded-securely': {
'description': 'Subresource Integrity (SRI) is implemented and all scripts are loaded securely',
'modifier': 5,
},
'sri-not-implemented-response-not-html': {
'description': 'Subresource Integrity (SRI) is only needed for html resources',
'modifier': 0,
},
'sri-not-implemented-but-no-scripts-loaded': {
'description': 'Subresource Integrity (SRI) is not needed since site contains no script tags',
'modifier': 0,
},
'sri-not-implemented-but-all-scripts-loaded-from-secure-origin': {
'description': 'Subresource Integrity (SRI) not implemented, but all scripts are loaded from a similar origin',
'modifier': 0,
},
'sri-not-implemented-but-external-scripts-loaded-securely': {
'description': 'Subresource Integrity (SRI) not implemented, but all external scripts are loaded over HTTPS',
'modifier': -5,
},
'sri-implemented-but-external-scripts-not-loaded-securely': {
'description': ('Subresource Integrity (SRI) implemented, but external scripts are loaded over HTTP or use '
'protocol-relative URLs via src="//..."'),
'modifier': -20,
},
'sri-not-implemented-and-external-scripts-not-loaded-securely': {
'description': ('Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or '
'use protocol-relative URLs via src="//..."'),
'modifier': -50,
},
# X-Content-Type-Options
'x-content-type-options-nosniff': {
'description': 'X-Content-Type-Options header set to "nosniff"',
'modifier': 0,
},
'x-content-type-options-not-implemented': {
'description': 'X-Content-Type-Options header not implemented',
'modifier': -5,
},
'x-content-type-options-header-invalid': {
'description': 'X-Content-Type-Options header cannot be recognized',
'modifier': -5,
},
# X-Frame-Options
'x-frame-options-implemented-via-csp': {
'description': 'X-Frame-Options (XFO) implemented via the CSP frame-ancestors directive',
'modifier': 5,
},
'x-frame-options-sameorigin-or-deny': {
'description': 'X-Frame-Options (XFO) header set to SAMEORIGIN or DENY',
'modifier': 0,
},
'x-frame-options-allow-from-origin': {
'description': 'X-Frame-Options (XFO) header uses ALLOW-FROM uri directive',
'modifier': 0,
},
'x-frame-options-not-implemented': {
'description': 'X-Frame-Options (XFO) header not implemented',
'modifier': -20,
},
'x-frame-options-header-invalid': {
'description': 'X-Frame-Options (XFO) header cannot be recognized',
'modifier': -20,
},
# X-XSS-Protection
'x-xss-protection-enabled-mode-block': {
'description': 'X-XSS-Protection header set to "1; mode=block"',
'modifier': 0,
},
'x-xss-protection-enabled': {
'description': 'X-XSS-Protection header set to "1"',
'modifier': 0,
},
'x-xss-protection-not-needed-due-to-csp': {
'description': 'X-XSS-Protection header not needed due to strong Content Security Policy (CSP) header',
'modifier': 0,
},
'x-xss-protection-disabled': {
'description': 'X-XSS-Protection header set to "0" (disabled)',
'modifier': -10,
},
'x-xss-protection-not-implemented': {
'description': 'X-XSS-Protection header not implemented',
'modifier': -10,
},
'x-xss-protection-header-invalid': {
'description': 'X-XSS-Protection header cannot be recognized',
'modifier': -10,
},
# Generic results
'html-not-parsable': {
'description': 'Claims to be html, but cannot be parsed',
'modifier': -20, # can't run an SRI check if the HTML isn't parsable
},
'request-did-not-return-status-code-200': {
'description': 'Site did not return a status code of 200',
'modifier': -5, # can't run an SRI check on pages that don't return a 200 (deprecated)
},
'xml-not-parsable': {
'description': 'Claims to be xml, but cannot be parsed',
'modifier': -20, # can't run an ACAO check if the xml files can't be parsed
}
}
def get_grade_and_likelihood_for_score(score: int) -> tuple:
"""
:param score: raw score based on all of the tests
:return: the overall test score, grade and likelihood_indicator
"""
score = max(score, 0) # can't have scores below 0
# If it's >100, just use the grade for 100, otherwise round down to the nearest multiple of 5
grade = GRADE_CHART[min(score - score % 5, 100)]
# If GRADE_CHART and LIKELIHOOD_INDICATOR_CHART are not synchronized during
# manual code updates, then default to UNKNOWN
likelihood_indicator = LIKELIHOOD_INDICATOR_CHART.get(grade[0], 'UNKNOWN')
return score, grade, likelihood_indicator
def get_score_description(result) -> str:
return SCORE_TABLE[result]['description']
def get_score_modifier(result) -> int:
return SCORE_TABLE[result]['modifier']
|
import mock
import pytest
from kubernetes.client.rest import ApiException
from paasta_tools.kubernetes.application.controller_wrappers import Application
from paasta_tools.kubernetes.application.controller_wrappers import DeploymentWrapper
from paasta_tools.kubernetes_tools import KubernetesDeploymentConfig
@pytest.fixture
def mock_pdr_for_service_instance():
with mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.pod_disruption_budget_for_service_instance",
autospec=True,
) as mock_pdr_for_service_instance:
yield mock_pdr_for_service_instance
@pytest.fixture
def mock_load_system_paasta_config():
with mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.load_system_paasta_config",
autospec=True,
) as mock_load_system_paasta_config:
yield mock_load_system_paasta_config
def test_brutal_bounce(mock_load_system_paasta_config):
# mock the new client used to brutal bounce in the background using threading.
mock_cloned_client = mock.MagicMock()
with mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.KubeClient",
return_value=mock_cloned_client,
autospec=True,
):
with mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.threading.Thread",
autospec=True,
) as mock_deep_delete_and_create:
mock_client = mock.MagicMock()
app = mock.MagicMock()
app.item.metadata.name = "fake_name"
app.item.metadata.namespace = "faasta"
# we do NOT call deep_delete_and_create
app = setup_app({}, True)
DeploymentWrapper.update(self=app, kube_client=mock_client)
assert mock_deep_delete_and_create.call_count == 0
# we call deep_delete_and_create: when bounce_method is brutal
config_dict = {"instances": 1, "bounce_method": "brutal"}
app = setup_app(config_dict, True)
app.update(kube_client=mock_client)
mock_deep_delete_and_create.assert_called_once_with(
target=app.deep_delete_and_create, args=[mock_cloned_client]
)
@pytest.mark.parametrize("bounce_margin_factor_set", [True, False])
def test_ensure_pod_disruption_budget_create(
bounce_margin_factor_set,
mock_pdr_for_service_instance,
mock_load_system_paasta_config,
):
mock_load_system_paasta_config.return_value.get_pdb_max_unavailable.return_value = 3
mock_req_pdr = mock.Mock()
mock_req_pdr.spec.max_unavailable = 10 if bounce_margin_factor_set else 3
mock_pdr_for_service_instance.return_value = mock_req_pdr
mock_client = mock.MagicMock()
mock_client.policy.read_namespaced_pod_disruption_budget.side_effect = ApiException(
status=404
)
app = mock.MagicMock()
if bounce_margin_factor_set:
app.soa_config.config_dict = {"bounce_margin_factor": 0.1}
app.soa_config.get_bounce_margin_factor.return_value = 0.1
app.kube_deployment.service.return_value = "fake_service"
app.kube_deployment.instance.return_value = "fake_instance"
Application.ensure_pod_disruption_budget(self=app, kube_client=mock_client)
mock_client.policy.create_namespaced_pod_disruption_budget.assert_called_once_with(
namespace="paasta", body=mock_req_pdr
)
def test_ensure_pod_disruption_budget_replaces_outdated(
mock_pdr_for_service_instance, mock_load_system_paasta_config
):
mock_req_pdr = mock.Mock()
mock_req_pdr.spec.max_unavailable = 10
mock_pdr_for_service_instance.return_value = mock_req_pdr
mock_client = mock.MagicMock()
mock_pdr = mock.Mock()
mock_pdr.spec.max_unavailable = 5
mock_pdr.spec.min_available = None
mock_client.policy.read_namespaced_pod_disruption_budget.return_value = mock_pdr
app = mock.MagicMock()
app.soa_config.get_bounce_margin_factor.return_value = 0.1
app.kube_deployment.service.return_value = "fake_service"
app.kube_deployment.instance.return_value = "fake_instance"
Application.ensure_pod_disruption_budget(self=app, kube_client=mock_client)
mock_client.policy.patch_namespaced_pod_disruption_budget.assert_called_once_with(
name=mock_req_pdr.metadata.name,
namespace=mock_req_pdr.metadata.namespace,
body=mock_req_pdr,
)
def test_ensure_pod_disruption_budget_noop_when_min_available_is_set(
mock_pdr_for_service_instance, mock_load_system_paasta_config
):
mock_req_pdr = mock.Mock()
mock_req_pdr.spec.max_unavailable = 10
mock_pdr_for_service_instance.return_value = mock_req_pdr
mock_client = mock.MagicMock()
mock_pdr = mock.Mock()
mock_pdr.spec.max_unavailable = 5
mock_pdr.spec.min_available = 5
mock_client.policy.read_namespaced_pod_disruption_budget.return_value = mock_pdr
app = mock.MagicMock()
app.soa_config.get_bounce_margin_factor.return_value = 0.1
app.kube_deployment.service.return_value = "fake_service"
app.kube_deployment.instance.return_value = "fake_instance"
Application.ensure_pod_disruption_budget(self=app, kube_client=mock_client)
mock_client.policy.patch_namespaced_pod_disruption_budget.assert_not_called()
def setup_app(config_dict, exists_hpa):
item = mock.MagicMock()
item.metadata.name = "fake_name"
item.metadata.namespace = "faasta"
app = DeploymentWrapper(item=item)
app.soa_config = KubernetesDeploymentConfig(
service="service",
cluster="cluster",
instance="instance",
config_dict=config_dict,
branch_dict=None,
)
app.exists_hpa = mock.Mock(return_value=exists_hpa)
app.delete_horizontal_pod_autoscaler = mock.Mock(return_value=None)
return app
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_no_autoscaling(mock_autoscaling_is_paused):
mock_client = mock.MagicMock()
# Do nothing
config_dict = {"instances": 1}
app = setup_app(config_dict, False)
mock_autoscaling_is_paused.return_value = False
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 0
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_delete_hpa_when_no_autoscaling(
mock_autoscaling_is_paused,
):
mock_client = mock.MagicMock()
# old HPA got removed so delete
config_dict = {"instances": 1}
app = setup_app(config_dict, True)
mock_autoscaling_is_paused.return_value = False
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 1
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_when_autoscaling_is_paused(
mock_autoscaling_is_paused,
):
mock_client = mock.MagicMock()
config_dict = {"max_instances": 3, "min_instances": 1}
app = setup_app(config_dict, True)
app.item.spec.replicas = 2
mock_autoscaling_is_paused.return_value = True
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 1
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_when_autoscaling_is_resumed(
mock_autoscaling_is_paused,
):
mock_client = mock.MagicMock()
config_dict = {"max_instances": 3, "min_instances": 1}
app = setup_app(config_dict, True)
app.item.spec.replicas = 2
mock_autoscaling_is_paused.return_value = False
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 1
)
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_create_hpa(mock_autoscaling_is_paused):
mock_client = mock.MagicMock()
# Create
config_dict = {"max_instances": 3}
app = setup_app(config_dict, False)
mock_autoscaling_is_paused.return_value = False
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 0
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.assert_called_once_with(
namespace="faasta",
body=app.soa_config.get_autoscaling_metric_spec(
"fake_name", "cluster", mock_client, namespace="faasta",
),
pretty=True,
)
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
return_value=False,
)
def test_sync_horizontal_pod_autoscaler_do_not_create_hpa_bespoke(
mock_autoscaling_is_paused,
):
mock_client = mock.MagicMock()
# Create
config_dict = {"max_instances": 3, "autoscaling": {"decision_policy": "bespoke"}}
app = setup_app(config_dict, False)
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 0
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
@mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.autoscaling_is_paused",
autospec=True,
)
def test_sync_horizontal_pod_autoscaler_update_hpa(mock_autoscaling_is_paused):
mock_client = mock.MagicMock()
# Update
config_dict = {"max_instances": 3}
app = setup_app(config_dict, True)
mock_autoscaling_is_paused.return_value = False
app.sync_horizontal_pod_autoscaler(kube_client=mock_client)
assert (
mock_client.autoscaling.create_namespaced_horizontal_pod_autoscaler.call_count
== 0
)
assert app.delete_horizontal_pod_autoscaler.call_count == 0
mock_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler.assert_called_once_with(
namespace="faasta",
name="fake_name",
body=app.soa_config.get_autoscaling_metric_spec(
"fake_name", "cluster", mock_client, namespace="faasta",
),
pretty=True,
)
|
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.components.switch import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .common import TEST_PASSWORD, TEST_USER_ID
async def test_valve_switches(hass, config_entry, aioclient_mock_fixture):
"""Test Flo by Moen valve switches."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
entity_id = "switch.shutoff_valve"
assert hass.states.get(entity_id).state == STATE_ON
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert hass.states.get(entity_id).state == STATE_OFF
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert hass.states.get(entity_id).state == STATE_ON
|
from datetime import datetime
from datetime import timedelta
import tests
from pyVim import connect
from pyVmomi.Iso8601 import TZManager
from pyVmomi import SoapAdapter
from vcr.stubs import VCRHTTPSConnection
from vcr import config
class Iso8601Tests(tests.VCRTestBase):
@tests.VCRTestBase.my_vcr.use_cassette('test_vm_config_iso8601.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once')
def test_vm_config_iso8601(self):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "5001ad1b-c78d-179e-ecd7-1cc0e1cf1b96"
vm = search_index.FindByUuid(None, uuid, True, True)
boot_time = vm.runtime.bootTime
# NOTE (hartsock): assertIsNone does not work in Python 2.6
self.assertTrue(boot_time is not None)
# 2014-08-05T17:50:20.594958Z
expected_time = datetime(2014, 8, 5, 17, 50, 20, 594958,
boot_time.tzinfo)
self.assertEqual(expected_time, boot_time)
def test_iso8601_set_datetime(self):
# NOTE (hartsock): This test is an example of how to register
# a fixture based test to compare the XML document that pyVmomi
# is transmitting. We needed to invent a set of tools to effectively
# compare logical XML documents to each other. In this case we are
# only interested in the 'soapenv:Body' tag and its children.
now_string = "2014-08-19T04:29:36.070918-04:00"
# NOTE (hartsock): the strptime formatter has a bug in python 2.x
# http://bugs.python.org/issue6641 so we're building the date time
# using the constructor arguments instead of parsing it.
now = datetime(2014, 8, 19, 4, 29, 36, 70918,
TZManager.GetTZInfo(
tzname='EDT',
utcOffset=timedelta(hours=-4, minutes=0)))
def has_tag(doc):
if doc is None:
return False
return '<dateTime>' in doc.decode("utf-8")
def correct_time_string(doc):
return '<dateTime>{0}</dateTime>'.format(now_string) in doc.decode("utf-8")
def check_date_time_value(r1, r2):
for r in [r1, r2]:
if has_tag(r.body):
if not correct_time_string(r.body):
return False
return True
my_vcr = config.VCR(
custom_patches=(
(SoapAdapter, '_HTTPSConnection', VCRHTTPSConnection),))
my_vcr.register_matcher('document', check_date_time_value)
# NOTE (hartsock): the `match_on` option is altered to use the
# look at the XML body sent to the server
with my_vcr.use_cassette('iso8601_set_datetime.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once',
match_on=['method', 'scheme', 'host', 'port',
'path', 'query', 'document']):
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
search_index = si.content.searchIndex
uuid = "4c4c4544-0043-4d10-8056-b1c04f4c5331"
host = search_index.FindByUuid(None, uuid, False)
date_time_system = host.configManager.dateTimeSystem
# NOTE (hartsock): sending the date time 'now' to host.
date_time_system.UpdateDateTime(now)
|
import pytest
from homeassistant.components import frontend
from homeassistant.setup import async_setup_component
@pytest.mark.parametrize(
"config_to_try",
(
{"invalid space": {"url": "https://home-assistant.io"}},
{"router": {"url": "not-a-url"}},
),
)
async def test_wrong_config(hass, config_to_try):
"""Test setup with wrong configuration."""
assert not await async_setup_component(
hass, "panel_iframe", {"panel_iframe": config_to_try}
)
async def test_correct_config(hass):
"""Test correct config."""
assert await async_setup_component(
hass,
"panel_iframe",
{
"panel_iframe": {
"router": {
"icon": "mdi:network-wireless",
"title": "Router",
"url": "http://192.168.1.1",
"require_admin": True,
},
"weather": {
"icon": "mdi:weather",
"title": "Weather",
"url": "https://www.wunderground.com/us/ca/san-diego",
"require_admin": True,
},
"api": {"icon": "mdi:weather", "title": "Api", "url": "/api"},
"ftp": {
"icon": "mdi:weather",
"title": "FTP",
"url": "ftp://some/ftp",
},
}
},
)
panels = hass.data[frontend.DATA_PANELS]
assert panels.get("router").to_response() == {
"component_name": "iframe",
"config": {"url": "http://192.168.1.1"},
"icon": "mdi:network-wireless",
"title": "Router",
"url_path": "router",
"require_admin": True,
}
assert panels.get("weather").to_response() == {
"component_name": "iframe",
"config": {"url": "https://www.wunderground.com/us/ca/san-diego"},
"icon": "mdi:weather",
"title": "Weather",
"url_path": "weather",
"require_admin": True,
}
assert panels.get("api").to_response() == {
"component_name": "iframe",
"config": {"url": "/api"},
"icon": "mdi:weather",
"title": "Api",
"url_path": "api",
"require_admin": False,
}
assert panels.get("ftp").to_response() == {
"component_name": "iframe",
"config": {"url": "ftp://some/ftp"},
"icon": "mdi:weather",
"title": "FTP",
"url_path": "ftp",
"require_admin": False,
}
|
import json
import hashlib
import requests
from optional_django.serializers import JSONEncoder
from .exceptions import ReactRenderingError
from . import conf
from .exceptions import RenderServerError
class RenderedComponent(object):
def __init__(self, markup, props, data):
self.markup = markup
self.props = props
self.data = data
def __str__(self):
return self.markup
def __unicode__(self):
return unicode(self.markup)
class RenderServer(object):
def render(self, path, props=None, to_static_markup=False, request_headers=None, timeout=None, url=None):
if not url:
url = conf.settings.RENDER_URL
if props is not None:
serialized_props = json.dumps(props, cls=JSONEncoder)
else:
serialized_props = None
if not conf.settings.RENDER:
return RenderedComponent('', serialized_props, {})
options = {
'path': path,
'serializedProps': serialized_props,
'toStaticMarkup': to_static_markup
}
serialized_options = json.dumps(options)
options_hash = hashlib.sha1(serialized_options.encode('utf-8')).hexdigest()
all_request_headers = {'content-type': 'application/json'}
# Add additional requests headers if the requet_headers dictionary is specified
if request_headers is not None:
all_request_headers.update(request_headers)
# Add a send/receive timeout with the request if not specified
if not isinstance(timeout, (tuple, int, float)):
timeout = 5.0
try:
res = requests.post(
url,
data=serialized_options,
headers=all_request_headers,
params={'hash': options_hash},
timeout=timeout
)
except requests.ConnectionError:
raise RenderServerError('Could not connect to render server at {}'.format(url))
if res.status_code != 200:
raise RenderServerError(
'Unexpected response from render server at {} - {}: {}'.format(url, res.status_code, res.text)
)
obj = res.json()
markup = obj.pop('markup', None)
err = obj.pop('error', None)
data = obj
if err:
if 'message' in err and 'stack' in err:
raise ReactRenderingError(
'Message: {}\n\nStack trace: {}'.format(err['message'], err['stack'])
)
raise ReactRenderingError(err)
if markup is None:
raise ReactRenderingError('Render server failed to return markup. Returned: {}'.format(obj))
return RenderedComponent(markup, serialized_props, data)
render_server = RenderServer()
|
import hashlib
import logging
import re
import pylast as lastfm
from pylast import WSError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_PLAYED = "last_played"
ATTR_PLAY_COUNT = "play_count"
ATTR_TOP_PLAYED = "top_played"
ATTRIBUTION = "Data provided by Last.fm"
STATE_NOT_SCROBBLING = "Not Scrobbling"
CONF_USERS = "users"
ICON = "mdi:radio-fm"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_USERS, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Last.fm sensor platform."""
api_key = config[CONF_API_KEY]
users = config.get(CONF_USERS)
lastfm_api = lastfm.LastFMNetwork(api_key=api_key)
entities = []
for username in users:
try:
lastfm_api.get_user(username).get_image()
entities.append(LastfmSensor(username, lastfm_api))
except WSError as error:
_LOGGER.error(error)
return
add_entities(entities, True)
class LastfmSensor(Entity):
"""A class for the Last.fm account."""
def __init__(self, user, lastfm_api):
"""Initialize the sensor."""
self._unique_id = hashlib.sha256(user.encode("utf-8")).hexdigest()
self._user = lastfm_api.get_user(user)
self._name = user
self._lastfm = lastfm_api
self._state = "Not Scrobbling"
self._playcount = None
self._lastplayed = None
self._topplayed = None
self._cover = None
@property
def unique_id(self):
"""Return the unique ID of the sensor."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Update device state."""
self._cover = self._user.get_image()
self._playcount = self._user.get_playcount()
recent_tracks = self._user.get_recent_tracks(limit=2)
if recent_tracks:
last = recent_tracks[0]
self._lastplayed = f"{last.track.artist} - {last.track.title}"
top_tracks = self._user.get_top_tracks(limit=1)
if top_tracks:
top = top_tracks[0]
toptitle = re.search("', '(.+?)',", str(top))
topartist = re.search("'(.+?)',", str(top))
self._topplayed = f"{topartist.group(1)} - {toptitle.group(1)}"
now_playing = self._user.get_now_playing()
if now_playing is None:
self._state = STATE_NOT_SCROBBLING
return
self._state = f"{now_playing.artist} - {now_playing.title}"
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_LAST_PLAYED: self._lastplayed,
ATTR_PLAY_COUNT: self._playcount,
ATTR_TOP_PLAYED: self._topplayed,
}
@property
def entity_picture(self):
"""Avatar of the user."""
return self._cover
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
|
from enum import Enum
from os import path
import pytest
from .fixture import make
DiffResult = Enum('DiffResult', 'Same SameFiltered DodgySame DodgyDifferent Different FileError')
@pytest.fixture
def differnt_dirs():
make()
def abspath(*args):
d = path.dirname(__file__)
return list(path.join(d, arg) for arg in args)
cmp_args = {
'shallow-comparison': False,
'time-resolution': 10000000000,
'ignore_blank_lines': True,
'apply-text-filters': True
}
no_ignore_args = dict(cmp_args)
no_ignore_args['ignore_blank_lines'] = False
no_ignore_args['apply-text-filters'] = False
dodgy_args = dict(cmp_args)
dodgy_args['shallow-comparison'] = True
@pytest.mark.parametrize('files, regexes, comparison_args, expected', [
# empty file list
((), [], cmp_args, DiffResult.Same),
# dirs are same
(('diffs/a', 'diffs/b'), [], cmp_args, DiffResult.Same),
# dir and file ar diffent
(('diffs/a', 'diffs/b/b.txt'), [], cmp_args, DiffResult.Different),
# shallow equal (time + size)
(('diffs/a/d/d.txt', 'diffs/b/d/d.1.txt'), [], dodgy_args, DiffResult.DodgySame),
# empty files (fastest equal, wont read files)
(('diffs/a/c/c.txt', 'diffs/b/c/c.txt'), [], cmp_args, DiffResult.Same),
# 4.1kb vs 4.1kb file (slow equal, read both until end)
(('diffs/a/d/d.txt', 'diffs/b/d/d.txt'), [], cmp_args, DiffResult.Same),
# 4.1kb vs 4.1kb file (fast different, first chunk diff)
(('diffs/a/d/d.txt', 'diffs/b/d/d.1.txt'), [], cmp_args, DiffResult.Different),
# 4.1kb vs 4.1kb file (slow different, read both until end)
(('diffs/a/d/d.txt', 'diffs/b/d/d.2.txt'), [], cmp_args, DiffResult.Different),
# empty vs 1b file (fast different, first chunk diff)
(('diffs/a/e/g/g.txt', 'diffs/b/e/g/g.txt'), [], cmp_args, DiffResult.Different),
# CRLF vs CRLF with trailing, ignoring blank lines
(('diffs/a/crlf.txt', 'diffs/a/crlftrailing.txt'), [], cmp_args, DiffResult.SameFiltered),
# CRLF vs CRLF with trailing, not ignoring blank lines
(('diffs/a/crlf.txt', 'diffs/a/crlftrailing.txt'), [], no_ignore_args, DiffResult.Different),
# LF vs LF with trailing, ignoring blank lines
(('diffs/b/lf.txt', 'diffs/b/lftrailing.txt'), [], cmp_args, DiffResult.SameFiltered),
# LF vs LF with trailing, not ignoring blank lines
(('diffs/b/lf.txt', 'diffs/b/lftrailing.txt'), [], no_ignore_args, DiffResult.Different),
# CRLF vs LF, ignoring blank lines
(('diffs/a/crlf.txt', 'diffs/b/lf.txt'), [], cmp_args, DiffResult.SameFiltered),
# CRLF vs LF, not ignoring blank lines
(('diffs/a/crlf.txt', 'diffs/b/lf.txt'), [], no_ignore_args, DiffResult.Different),
# CRLF with trailing vs LF with trailing, ignoring blank lines
(('diffs/a/crlftrailing.txt', 'diffs/b/lftrailing.txt'), [], cmp_args, DiffResult.SameFiltered),
# CRLF with trailing vs LF with trailing, not ignoring blank lines
(('diffs/a/crlftrailing.txt', 'diffs/b/lftrailing.txt'), [], no_ignore_args, DiffResult.Different),
])
def test_files_same(files, regexes, comparison_args, expected, differnt_dirs):
from meld.dirdiff import _files_same
files_path = abspath(*files)
result = _files_same(files_path, regexes, comparison_args)
actual = DiffResult(result + 1)
assert actual == expected
|
import functools
from typing import List
from zigpy.exceptions import ZigbeeException
import zigpy.zcl.clusters.hvac as hvac
from homeassistant.components.fan import (
DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import State, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core import discovery
from .core.const import (
CHANNEL_FAN,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
)
from .core.registries import ZHA_ENTITIES
from .entity import ZhaEntity, ZhaGroupEntity
# Additional speeds in zigbee's ZCL
# Spec is unclear as to what this value means. On King Of Fans HBUniversal
# receiver, this means Very High.
SPEED_ON = "on"
# The fan speed is self-regulated
SPEED_AUTO = "auto"
# When the heated/cooled space is occupied, the fan is always on
SPEED_SMART = "smart"
SPEED_LIST = [
SPEED_OFF,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_HIGH,
SPEED_ON,
SPEED_AUTO,
SPEED_SMART,
]
VALUE_TO_SPEED = dict(enumerate(SPEED_LIST))
SPEED_TO_VALUE = {speed: i for i, speed in enumerate(SPEED_LIST)}
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, DOMAIN)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation fan from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
class BaseFan(FanEntity):
"""Base representation of a ZHA fan."""
def __init__(self, *args, **kwargs):
"""Initialize the fan."""
super().__init__(*args, **kwargs)
self._state = None
self._fan_channel = None
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return SPEED_LIST
@property
def speed(self) -> str:
"""Return the current speed."""
return self._state
@property
def is_on(self) -> bool:
"""Return true if entity is on."""
if self._state is None:
return False
return self._state != SPEED_OFF
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_SET_SPEED
async def async_turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn the entity on."""
if speed is None:
speed = SPEED_MEDIUM
await self.async_set_speed(speed)
async def async_turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
await self.async_set_speed(SPEED_OFF)
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
await self._fan_channel.async_set_speed(SPEED_TO_VALUE[speed])
self.async_set_state(0, "fan_mode", speed)
@callback
def async_set_state(self, attr_id, attr_name, value):
"""Handle state update from channel."""
@STRICT_MATCH(channel_names=CHANNEL_FAN)
class ZhaFan(BaseFan, ZhaEntity):
"""Representation of a ZHA fan."""
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Init this sensor."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._fan_channel = self.cluster_channels.get(CHANNEL_FAN)
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._fan_channel, SIGNAL_ATTR_UPDATED, self.async_set_state
)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = VALUE_TO_SPEED.get(last_state.state, last_state.state)
@callback
def async_set_state(self, attr_id, attr_name, value):
"""Handle state update from channel."""
self._state = VALUE_TO_SPEED.get(value, self._state)
self.async_write_ha_state()
async def async_update(self):
"""Attempt to retrieve on off state from the fan."""
await super().async_update()
if self._fan_channel:
state = await self._fan_channel.get_attribute_value("fan_mode")
if state is not None:
self._state = VALUE_TO_SPEED.get(state, self._state)
@GROUP_MATCH()
class FanGroup(BaseFan, ZhaGroupEntity):
"""Representation of a fan group."""
def __init__(
self, entity_ids: List[str], unique_id: str, group_id: int, zha_device, **kwargs
) -> None:
"""Initialize a fan group."""
super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs)
self._available: bool = False
group = self.zha_device.gateway.get_group(self._group_id)
self._fan_channel = group.endpoint[hvac.Fan.cluster_id]
# what should we do with this hack?
async def async_set_speed(value) -> None:
"""Set the speed of the fan."""
try:
await self._fan_channel.write_attributes({"fan_mode": value})
except ZigbeeException as ex:
self.error("Could not set speed: %s", ex)
return
self._fan_channel.async_set_speed = async_set_speed
async def async_update(self):
"""Attempt to retrieve on off state from the fan."""
all_states = [self.hass.states.get(x) for x in self._entity_ids]
states: List[State] = list(filter(None, all_states))
on_states: List[State] = [state for state in states if state.state != SPEED_OFF]
self._available = any(state.state != STATE_UNAVAILABLE for state in states)
# for now just use first non off state since its kind of arbitrary
if not on_states:
self._state = SPEED_OFF
else:
self._state = states[0].state
|
import os
import posixpath
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import linux_packages
from six.moves.urllib.parse import urlparse
flags.DEFINE_string('maven_version', '3.6.3',
'The version of maven')
flags.DEFINE_string('maven_mirror_url', None,
'If specified, this URL will be used as a Maven mirror')
FLAGS = flags.FLAGS
MVN_URL = 'https://archive.apache.org/dist/maven/maven-{0}/{1}/binaries/apache-maven-{1}-bin.tar.gz'
MVN_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'maven')
MVN_ENV_PATH = '/etc/profile.d/maven.sh'
MVN_ENV = '''
export JAVA_HOME={java_home}
export M2_HOME={maven_home}
export MAVEN_HOME={maven_home}
export PATH={maven_home}/bin:$PATH
'''
PACKAGE_NAME = 'maven'
PREPROVISIONED_DATA = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'):
'2528c35a99c30f8940cc599ba15d34359d58bec57af58c1075519b8cd33b69e7',
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'):
'26ad91d751b3a9a53087aefa743f4e16a17741d3915b219cf74112bf87a438c5'
}
PACKAGE_DATA_URL = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'): MVN_URL.format('3', '3.6.1'),
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'): MVN_URL.format('3', '3.6.3')
}
def GetRunCommand(arguments):
"""Return Maven run command including proxy settings."""
command = 'source {} && mvn {}'.format(MVN_ENV_PATH, arguments)
if FLAGS['http_proxy'].present:
parsed_url = urlparse(FLAGS.http_proxy)
http_proxy_params = ' -Dhttp.proxyHost={host} -Dhttp.proxyPort={port}'
command += http_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
if FLAGS['https_proxy'].present:
parsed_url = urlparse(FLAGS.https_proxy)
https_proxy_params = ' -Dhttps.proxyHost={host} -Dhttps.proxyPort={port}'
command += https_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
return command
def _GetJavaHome(vm):
out, _ = vm.RemoteCommand("java -XshowSettings:properties 2>&1 > /dev/null "
"| awk '/java.home/{print $3}'")
out = out.strip()
if '/jre' in out:
return out[:out.index('/jre')]
else:
return out
def AptInstall(vm):
_Install(vm)
def YumInstall(vm):
vm.InstallPackages('which')
_Install(vm)
def _Install(vm):
"""Install maven package."""
vm.Install('openjdk')
vm.Install('curl')
# Download and extract maven
maven_full_ver = FLAGS.maven_version
maven_major_ver = maven_full_ver[:maven_full_ver.index('.')]
maven_url = MVN_URL.format(maven_major_ver, maven_full_ver)
maven_tar = maven_url.split('/')[-1]
# will only work with preprovision_ignore_checksum
if maven_tar not in PREPROVISIONED_DATA:
PREPROVISIONED_DATA[maven_tar] = ''
PACKAGE_DATA_URL[maven_tar] = maven_url
maven_remote_path = posixpath.join(linux_packages.INSTALL_DIR, maven_tar)
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, [maven_tar],
linux_packages.INSTALL_DIR)
vm.RemoteCommand(('mkdir -p {0} && '
'tar -C {0} --strip-components=1 -xzf {1}').format(
MVN_DIR, maven_remote_path))
java_home = _GetJavaHome(vm)
# Set env variables for maven
maven_env = MVN_ENV.format(java_home=java_home, maven_home=MVN_DIR)
cmd = 'echo "{0}" | sudo tee -a {1}'.format(maven_env, MVN_ENV_PATH)
vm.RemoteCommand(cmd)
if FLAGS.maven_mirror_url:
settings_local_path = data.ResourcePath(os.path.join(
'maven', 'settings.xml.j2'))
settings_remote_path = '~/.m2/settings.xml'
context = {
'maven_mirror_url': FLAGS.maven_mirror_url
}
vm.RemoteCommand('mkdir -p ~/.m2')
vm.RenderTemplate(settings_local_path, settings_remote_path, context)
def Uninstall(vm):
vm.Uninstall('openjdk')
vm.RemoteCommand('rm -rf {0}'.format(MVN_DIR), ignore_failure=True)
vm.RemoteCommand('sudo rm -f {0}'.format(MVN_ENV_PATH), ignore_failure=True)
|
import pytest
from voluptuous.error import MultipleInvalid
from homeassistant.components.eafm import const
from tests.async_mock import patch
async def test_flow_no_discovered_stations(hass, mock_get_stations):
"""Test config flow discovers no station."""
mock_get_stations.return_value = []
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "abort"
assert result["reason"] == "no_stations"
async def test_flow_invalid_station(hass, mock_get_stations):
"""Test config flow errors on invalid station."""
mock_get_stations.return_value = [
{"label": "My station", "stationReference": "L12345"}
]
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
with pytest.raises(MultipleInvalid):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"station": "My other station"}
)
async def test_flow_works(hass, mock_get_stations, mock_get_station):
"""Test config flow discovers no station."""
mock_get_stations.return_value = [
{"label": "My station", "stationReference": "L12345"}
]
mock_get_station.return_value = [
{"label": "My station", "stationReference": "L12345"}
]
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
with patch("homeassistant.components.eafm.async_setup_entry", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"station": "My station"}
)
assert result["type"] == "create_entry"
assert result["title"] == "My station"
assert result["data"] == {
"station": "L12345",
}
|
import logging
import os
import pykira
import voluptuous as vol
from voluptuous.error import Error as VoluptuousError
import yaml
from homeassistant.const import (
CONF_CODE,
CONF_DEVICE,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_SENSORS,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
STATE_UNKNOWN,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
DOMAIN = "kira"
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "0.0.0.0"
DEFAULT_PORT = 65432
CONF_REPEAT = "repeat"
CONF_REMOTES = "remotes"
CONF_SENSOR = "sensor"
CONF_REMOTE = "remote"
CODES_YAML = f"{DOMAIN}_codes.yaml"
CODE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_CODE): cv.string,
vol.Optional(CONF_TYPE): cv.string,
vol.Optional(CONF_DEVICE): cv.string,
vol.Optional(CONF_REPEAT): cv.positive_int,
}
)
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "sensors"),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
REMOTE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "remotes"),
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SENSORS): [SENSOR_SCHEMA],
vol.Optional(CONF_REMOTES): [REMOTE_SCHEMA],
}
)
},
extra=vol.ALLOW_EXTRA,
)
def load_codes(path):
"""Load KIRA codes from specified file."""
codes = []
if os.path.exists(path):
with open(path) as code_file:
data = yaml.safe_load(code_file) or []
for code in data:
try:
codes.append(CODE_SCHEMA(code))
except VoluptuousError as exception:
# keep going
_LOGGER.warning("KIRA code invalid data: %s", exception)
else:
with open(path, "w") as code_file:
code_file.write("")
return codes
def setup(hass, config):
"""Set up the KIRA component."""
sensors = config.get(DOMAIN, {}).get(CONF_SENSORS, [])
remotes = config.get(DOMAIN, {}).get(CONF_REMOTES, [])
# If no sensors or remotes were specified, add a sensor
if not (sensors or remotes):
sensors.append({})
codes = load_codes(hass.config.path(CODES_YAML))
hass.data[DOMAIN] = {CONF_SENSOR: {}, CONF_REMOTE: {}}
def load_module(platform, idx, module_conf):
"""Set up the KIRA module and load platform."""
# note: module_name is not the HA device name. it's just a unique name
# to ensure the component and platform can share information
module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN
device_name = module_conf.get(CONF_NAME, DOMAIN)
port = module_conf.get(CONF_PORT, DEFAULT_PORT)
host = module_conf.get(CONF_HOST, DEFAULT_HOST)
if platform == CONF_SENSOR:
module = pykira.KiraReceiver(host, port)
module.start()
else:
module = pykira.KiraModule(host, port)
hass.data[DOMAIN][platform][module_name] = module
for code in codes:
code_tuple = (code.get(CONF_NAME), code.get(CONF_DEVICE, STATE_UNKNOWN))
module.registerCode(code_tuple, code.get(CONF_CODE))
discovery.load_platform(
hass, platform, DOMAIN, {"name": module_name, "device": device_name}, config
)
for idx, module_conf in enumerate(sensors):
load_module(CONF_SENSOR, idx, module_conf)
for idx, module_conf in enumerate(remotes):
load_module(CONF_REMOTE, idx, module_conf)
def _stop_kira(_event):
"""Stop the KIRA receiver."""
for receiver in hass.data[DOMAIN][CONF_SENSOR].values():
receiver.stop()
_LOGGER.info("Terminated receivers")
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira)
return True
|
import logging
from typing import Optional
from pyvolumio import CannotConnectError, Volumio
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.const import CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import DiscoveryInfoType
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_HOST): str, vol.Required(CONF_PORT, default=3000): int}
)
async def validate_input(hass, host, port):
"""Validate the user input allows us to connect."""
volumio = Volumio(host, port, async_get_clientsession(hass))
try:
return await volumio.get_system_info()
except CannotConnectError as error:
raise CannotConnect from error
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Volumio."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize flow."""
self._host: Optional[str] = None
self._port: Optional[int] = None
self._name: Optional[str] = None
self._uuid: Optional[str] = None
@callback
def _async_get_entry(self):
return self.async_create_entry(
title=self._name,
data={
CONF_NAME: self._name,
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_ID: self._uuid,
},
)
async def _set_uid_and_abort(self):
await self.async_set_unique_id(self._uuid)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_NAME: self._name,
}
)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
info = None
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
try:
info = await validate_input(self.hass, self._host, self._port)
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if info is not None:
self._name = info.get("name", self._host)
self._uuid = info.get("id")
if self._uuid is not None:
await self._set_uid_and_abort()
return self._async_get_entry()
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_zeroconf(self, discovery_info: DiscoveryInfoType):
"""Handle zeroconf discovery."""
self._host = discovery_info["host"]
self._port = int(discovery_info["port"])
self._name = discovery_info["properties"]["volumioName"]
self._uuid = discovery_info["properties"]["UUID"]
await self._set_uid_and_abort()
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(self, user_input=None):
"""Handle user-confirmation of discovered node."""
if user_input is not None:
try:
await validate_input(self.hass, self._host, self._port)
return self._async_get_entry()
except CannotConnect:
return self.async_abort(reason="cannot_connect")
return self.async_show_form(
step_id="discovery_confirm", description_placeholders={"name": self._name}
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
|
import os
import textwrap
from gi.repository import Gio, GObject, Gtk, Pango
from meld.conf import _
from meld.settings import get_meld_settings, settings
@Gtk.Template(resource_path='/org/gnome/meld/ui/commit-dialog.ui')
class CommitDialog(Gtk.Dialog):
__gtype_name__ = "CommitDialog"
break_commit_message = GObject.Property(type=bool, default=False)
changedfiles = Gtk.Template.Child()
textview = Gtk.Template.Child()
scrolledwindow1 = Gtk.Template.Child()
previousentry = Gtk.Template.Child()
def __init__(self, parent):
super().__init__()
self.set_transient_for(parent.get_toplevel())
selected = parent._get_selected_files()
try:
to_commit = parent.vc.get_files_to_commit(selected)
topdir = parent.vc.root
if to_commit:
to_commit = ["\t" + s for s in to_commit]
else:
to_commit = ["\t" + _("No files will be committed")]
except NotImplementedError:
topdir = os.path.dirname(os.path.commonprefix(selected))
to_commit = ["\t" + s[len(topdir) + 1:] for s in selected]
self.changedfiles.set_text("(in %s)\n%s" %
(topdir, "\n".join(to_commit)))
font = get_meld_settings().font
self.textview.modify_font(font)
commit_prefill = parent.vc.get_commit_message_prefill()
if commit_prefill:
buf = self.textview.get_buffer()
buf.set_text(commit_prefill)
buf.place_cursor(buf.get_start_iter())
# Try and make the textview wide enough for a standard 80-character
# commit message.
context = self.textview.get_pango_context()
metrics = context.get_metrics(None, None)
char_width = metrics.get_approximate_char_width() / Pango.SCALE
width_request, height_request = self.scrolledwindow1.get_size_request()
self.scrolledwindow1.set_size_request(80 * char_width, height_request)
settings.bind('vc-show-commit-margin', self.textview,
'show-right-margin', Gio.SettingsBindFlags.DEFAULT)
settings.bind('vc-commit-margin', self.textview,
'right-margin-position', Gio.SettingsBindFlags.DEFAULT)
settings.bind('vc-break-commit-message', self,
'break-commit-message', Gio.SettingsBindFlags.DEFAULT)
self.show_all()
def run(self):
self.previousentry.set_active(-1)
self.textview.grab_focus()
response = super().run()
msg = None
if response == Gtk.ResponseType.OK:
show_margin = self.textview.get_show_right_margin()
margin = self.textview.get_right_margin_position()
buf = self.textview.get_buffer()
msg = buf.get_text(*buf.get_bounds(), include_hidden_chars=False)
# This is a dependent option because of the margin column
if show_margin and self.props.break_commit_message:
paragraphs = msg.split("\n\n")
msg = "\n\n".join(textwrap.fill(p, margin) for p in paragraphs)
if msg.strip():
self.previousentry.prepend_history(msg)
self.destroy()
return response, msg
@Gtk.Template.Callback()
def on_previousentry_activate(self, gentry):
idx = gentry.get_active()
if idx != -1:
model = gentry.get_model()
buf = self.textview.get_buffer()
buf.set_text(model[idx][1])
@Gtk.Template(resource_path='/org/gnome/meld/ui/push-dialog.ui')
class PushDialog(Gtk.MessageDialog):
__gtype_name__ = "PushDialog"
def __init__(self, parent):
super().__init__()
self.set_transient_for(parent.get_toplevel())
self.show_all()
def run(self):
# TODO: Ask the VC for a more informative label for what will happen.
# In git, this is probably the parsed output of push --dry-run.
response = super().run()
self.destroy()
return response
|
from typing import Callable, TypeVar, Union
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
class DictRegistry(dict):
"""Dict Registry of items."""
def register(
self, name: Union[int, str], item: Union[str, CALLABLE_T] = None
) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Return decorator to register item with a specific name."""
def decorator(channel: CALLABLE_T) -> CALLABLE_T:
"""Register decorated channel or item."""
if item is None:
self[name] = channel
else:
self[name] = item
return channel
return decorator
class SetRegistry(set):
"""Set Registry of items."""
def register(self, name: Union[int, str]) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Return decorator to register item with a specific name."""
def decorator(channel: CALLABLE_T) -> CALLABLE_T:
"""Register decorated channel or item."""
self.add(name)
return channel
return decorator
|
from datetime import date, datetime, time, timedelta
from pygal._compat import is_str, timestamp
from pygal.adapters import positive
from pygal.graph.xy import XY
def datetime_to_timestamp(x):
"""Convert a datetime into a utc float timestamp"""
if isinstance(x, datetime):
return timestamp(x)
return x
def datetime_to_time(x):
"""Convert a datetime into a time"""
if isinstance(x, datetime):
return x.time()
return x
def date_to_datetime(x):
"""Convert a date into a datetime"""
if not isinstance(x, datetime) and isinstance(x, date):
return datetime.combine(x, time())
return x
def time_to_datetime(x):
"""Convert a time into a datetime"""
if isinstance(x, time):
return datetime.combine(date(1970, 1, 1), x)
return x
def timedelta_to_seconds(x):
"""Convert a timedelta into an amount of seconds"""
if isinstance(x, timedelta):
return x.total_seconds()
return x
def time_to_seconds(x):
"""Convert a time in a seconds sum"""
if isinstance(x, time):
return ((((x.hour * 60) + x.minute) * 60 + x.second) * 10**6 +
x.microsecond) / 10**6
if is_str(x):
return x
# Clamp to valid time
return x and max(0, min(x, 24 * 3600 - 10**-6))
def seconds_to_time(x):
"""Convert a number of second into a time"""
t = int(x * 10**6)
ms = t % 10**6
t = t // 10**6
s = t % 60
t = t // 60
m = t % 60
t = t // 60
h = t
return time(h, m, s, ms)
class DateTimeLine(XY):
"""DateTime abscissa xy graph class"""
_x_adapters = [datetime_to_timestamp, date_to_datetime]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def datetime_to_str(x):
dt = datetime.utcfromtimestamp(x)
return self.x_value_formatter(dt)
return datetime_to_str
class DateLine(DateTimeLine):
"""Date abscissa xy graph class"""
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def date_to_str(x):
d = datetime.utcfromtimestamp(x).date()
return self.x_value_formatter(d)
return date_to_str
class TimeLine(DateTimeLine):
"""Time abscissa xy graph class"""
_x_adapters = [positive, time_to_seconds, datetime_to_time]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def date_to_str(x):
t = seconds_to_time(x)
return self.x_value_formatter(t)
return date_to_str
class TimeDeltaLine(XY):
"""TimeDelta abscissa xy graph class"""
_x_adapters = [timedelta_to_seconds]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def timedelta_to_str(x):
td = timedelta(seconds=x)
return self.x_value_formatter(td)
return timedelta_to_str
|
from homeassistant.const import CONF_HOST
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .server import RoonServer
async def async_setup(hass, config):
"""Set up the Roon platform."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass, entry):
"""Set up a roonserver from a config entry."""
host = entry.data[CONF_HOST]
roonserver = RoonServer(hass, entry)
if not await roonserver.async_setup():
return False
hass.data[DOMAIN][entry.entry_id] = roonserver
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, entry.entry_id)},
manufacturer="Roonlabs",
name=host,
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
roonserver = hass.data[DOMAIN].pop(entry.entry_id)
return await roonserver.async_reset()
|
from flask import Flask, jsonify
from flasgger import Swagger
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Colors API'
}
swagger_config = Swagger.DEFAULT_CONFIG
swagger_config['swagger_ui_bundle_js'] = '//unpkg.com/swagger-ui-dist@3/swagger-ui-bundle.js'
swagger_config['swagger_ui_standalone_preset_js'] = '//unpkg.com/swagger-ui-dist@3/swagger-ui-standalone-preset.js'
swagger_config['jquery_js'] = '//unpkg.com/[email protected]/dist/jquery.min.js'
swagger_config['swagger_ui_css'] = '//unpkg.com/swagger-ui-dist@3/swagger-ui.css'
Swagger(app, config=swagger_config)
@app.route('/colors/<palette>/')
def colors(palette):
"""Example endpoint return a list of colors by palette
This is using docstring for specifications
---
tags:
- colors
parameters:
- name: palette
in: path
type: string
enum: ['all', 'rgb', 'cmyk']
required: true
default: all
description: Which palette to filter?
operationId: get_colors
consumes:
- application/json
produces:
- application/json
security:
colors_auth:
- 'write:colors'
- 'read:colors'
schemes: ['http', 'https']
deprecated: false
externalDocs:
description: Project repository
url: http://github.com/rochacbruno/flasgger
definitions:
Palette:
type: object
properties:
palette_name:
type: array
items:
$ref: '#/definitions/Color'
Color:
type: string
responses:
200:
description: A list of colors (may be filtered by palette)
schema:
$ref: '#/definitions/Palette'
examples:
rgb: ['red', 'green', 'blue']
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
for url, spec in specs_data.items():
assert 'Palette' in spec['definitions']
assert 'Color' in spec['definitions']
assert 'colors' in spec['paths']['/colors/{palette}/']['get']['tags']
if __name__ == "__main__":
app.run(debug=True)
|
from datetime import timedelta
import logging
from volkszaehler import Volkszaehler
from volkszaehler.exceptions import VolkszaehlerApiConnectionError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PORT,
ENERGY_WATT_HOUR,
POWER_WATT,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_UUID = "uuid"
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "Volkszaehler"
DEFAULT_PORT = 80
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
SENSOR_TYPES = {
"average": ["Average", POWER_WATT, "mdi:power-off"],
"consumption": ["Consumption", ENERGY_WATT_HOUR, "mdi:power-plug"],
"max": ["Max", POWER_WATT, "mdi:arrow-up"],
"min": ["Min", POWER_WATT, "mdi:arrow-down"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_UUID): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["average"]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Volkszaehler sensors."""
host = config[CONF_HOST]
name = config[CONF_NAME]
port = config[CONF_PORT]
uuid = config[CONF_UUID]
conditions = config[CONF_MONITORED_CONDITIONS]
session = async_get_clientsession(hass)
vz_api = VolkszaehlerData(
Volkszaehler(hass.loop, session, uuid, host=host, port=port)
)
await vz_api.async_update()
if vz_api.api.data is None:
raise PlatformNotReady
dev = []
for condition in conditions:
dev.append(VolkszaehlerSensor(vz_api, name, condition))
async_add_entities(dev, True)
class VolkszaehlerSensor(Entity):
"""Implementation of a Volkszaehler sensor."""
def __init__(self, vz_api, name, sensor_type):
"""Initialize the Volkszaehler sensor."""
self.vz_api = vz_api
self._name = name
self.type = sensor_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._name, SENSOR_TYPES[self.type][0])
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return SENSOR_TYPES[self.type][1]
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.vz_api.available
@property
def state(self):
"""Return the state of the resources."""
return self._state
async def async_update(self):
"""Get the latest data from REST API."""
await self.vz_api.async_update()
if self.vz_api.api.data is not None:
self._state = round(getattr(self.vz_api.api, self.type), 2)
class VolkszaehlerData:
"""The class for handling the data retrieval from the Volkszaehler API."""
def __init__(self, api):
"""Initialize the data object."""
self.api = api
self.available = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from the Volkszaehler REST API."""
try:
await self.api.get_data()
self.available = True
except VolkszaehlerApiConnectionError:
_LOGGER.error("Unable to fetch data from the Volkszaehler API")
self.available = False
|
from __future__ import division
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.fpn import bbox_head_loss_post
from chainercv.links.model.fpn import bbox_head_loss_pre
from chainercv.links.model.fpn import BboxHead
def _random_array(xp, shape):
return xp.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
@testing.parameterize(
{'n_class': 1 + 1},
{'n_class': 5 + 1},
{'n_class': 20 + 1},
)
class TestBboxHead(unittest.TestCase):
def setUp(self):
self.link = BboxHead(
n_class=self.n_class, scales=(1 / 2, 1 / 4, 1 / 8))
def _check_call(self):
hs = [
chainer.Variable(_random_array(self.link.xp, (2, 64, 32, 32))),
chainer.Variable(_random_array(self.link.xp, (2, 64, 16, 16))),
chainer.Variable(_random_array(self.link.xp, (2, 64, 8, 8))),
]
rois = [
self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32),
self.link.xp.array(
((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32),
self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32),
]
roi_indices = [
self.link.xp.array((0,), dtype=np.int32),
self.link.xp.array((1, 0), dtype=np.int32),
self.link.xp.array((1,), dtype=np.int32),
]
locs, confs = self.link(hs, rois, roi_indices)
self.assertIsInstance(locs, chainer.Variable)
self.assertIsInstance(locs.array, self.link.xp.ndarray)
self.assertEqual(locs.shape, (4, self.n_class, 4))
self.assertIsInstance(confs, chainer.Variable)
self.assertIsInstance(confs.array, self.link.xp.ndarray)
self.assertEqual(confs.shape, (4, self.n_class))
def test_call_cpu(self):
self._check_call()
@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self._check_call()
def _check_distribute(self):
rois = self.link.xp.array((
(0, 0, 10, 10),
(0, 1000, 0, 1000),
(0, 0, 224, 224),
(100, 100, 224, 224),
), dtype=np.float32)
roi_indices = self.link.xp.array((0, 1, 0, 0), dtype=np.int32)
rois, roi_indices = self.link.distribute(rois, roi_indices)
self.assertEqual(len(rois), 3)
self.assertEqual(len(roi_indices), 3)
for l in range(3):
self.assertIsInstance(rois[l], self.link.xp.ndarray)
self.assertIsInstance(roi_indices[l], self.link.xp.ndarray)
self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0])
self.assertEqual(rois[l].shape[1:], (4,))
self.assertEqual(roi_indices[l].shape[1:], ())
self.assertEqual(sum(rois[l].shape[0] for l in range(3)), 4)
def test_distribute_cpu(self):
self._check_distribute()
@attr.gpu
def test_distribute_gpu(self):
self.link.to_gpu()
self._check_distribute()
def _check_decode(self):
rois = [
self.link.xp.array(((4, 1, 6, 3),), dtype=np.float32),
self.link.xp.array(
((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32),
self.link.xp.array(((10, 4, 12, 10),), dtype=np.float32),
]
roi_indices = [
self.link.xp.array((0,), dtype=np.int32),
self.link.xp.array((1, 0), dtype=np.int32),
self.link.xp.array((1,), dtype=np.int32),
]
locs = chainer.Variable(_random_array(
self.link.xp, (4, self.n_class, 4)))
confs = chainer.Variable(_random_array(
self.link.xp, (4, self.n_class)))
bboxes, labels, scores = self.link.decode(
rois, roi_indices,
locs, confs,
(0.4, 0.2), ((100, 100), (200, 200)),
0.5, 0.1)
self.assertEqual(len(bboxes), 2)
self.assertEqual(len(labels), 2)
self.assertEqual(len(scores), 2)
for n in range(2):
self.assertIsInstance(bboxes[n], self.link.xp.ndarray)
self.assertIsInstance(labels[n], self.link.xp.ndarray)
self.assertIsInstance(scores[n], self.link.xp.ndarray)
self.assertEqual(bboxes[n].shape[0], labels[n].shape[0])
self.assertEqual(bboxes[n].shape[0], scores[n].shape[0])
self.assertEqual(bboxes[n].shape[1:], (4,))
self.assertEqual(labels[n].shape[1:], ())
self.assertEqual(scores[n].shape[1:], ())
def test_decode_cpu(self):
self._check_decode()
@attr.gpu
def test_decode_gpu(self):
self.link.to_gpu()
self._check_decode()
class TestBboxLoss(unittest.TestCase):
def _check_bbox_head_loss_pre(self, xp):
rois = [
xp.array(((4, 1, 6, 3),), dtype=np.float32),
xp.array(
((0, 1, 2, 3), (5, 4, 10, 6)), dtype=np.float32),
xp.array(((10, 4, 12, 10),), dtype=np.float32),
]
roi_indices = [
xp.array((0,), dtype=np.int32),
xp.array((1, 0), dtype=np.int32),
xp.array((1,), dtype=np.int32),
]
bboxes = [
xp.array(((2, 4, 6, 7), (1, 12, 3, 30)), dtype=np.float32),
xp.array(((10, 2, 12, 12),), dtype=np.float32),
]
labels = [
xp.array((10, 4), dtype=np.float32),
xp.array((1,), dtype=np.float32),
]
rois, roi_indices, gt_locs, gt_labels = bbox_head_loss_pre(
rois, roi_indices, (0.1, 0.2), bboxes, labels)
self.assertEqual(len(rois), 3)
self.assertEqual(len(roi_indices), 3)
self.assertEqual(len(gt_locs), 3)
self.assertEqual(len(gt_labels), 3)
for l in range(3):
self.assertIsInstance(rois[l], xp.ndarray)
self.assertIsInstance(roi_indices[l], xp.ndarray)
self.assertIsInstance(gt_locs[l], xp.ndarray)
self.assertIsInstance(gt_labels[l], xp.ndarray)
self.assertEqual(rois[l].shape[0], roi_indices[l].shape[0])
self.assertEqual(rois[l].shape[0], gt_locs[l].shape[0])
self.assertEqual(rois[l].shape[0], gt_labels[l].shape[0])
self.assertEqual(rois[l].shape[1:], (4,))
self.assertEqual(roi_indices[l].shape[1:], ())
self.assertEqual(gt_locs[l].shape[1:], (4,))
self.assertEqual(gt_labels[l].shape[1:], ())
def test_bbox_head_loss_pre_cpu(self):
self._check_bbox_head_loss_pre(np)
@attr.gpu
def test_bbox_head_loss_pre_gpu(self):
import cupy
self._check_bbox_head_loss_pre(cupy)
def _check_bbox_head_loss_post(self, xp):
locs = chainer.Variable(_random_array(xp, (20, 81, 4)))
confs = chainer.Variable(_random_array(xp, (20, 81)))
roi_indices = [
xp.random.randint(0, 2, size=5).astype(np.int32),
xp.random.randint(0, 2, size=7).astype(np.int32),
xp.random.randint(0, 2, size=8).astype(np.int32),
]
gt_locs = [
_random_array(xp, (5, 4)),
_random_array(xp, (7, 4)),
_random_array(xp, (8, 4)),
]
gt_labels = [
xp.random.randint(0, 80, size=5).astype(np.int32),
xp.random.randint(0, 80, size=7).astype(np.int32),
xp.random.randint(0, 80, size=8).astype(np.int32),
]
loc_loss, conf_loss = bbox_head_loss_post(
locs, confs, roi_indices, gt_locs, gt_labels, 2)
self.assertIsInstance(loc_loss, chainer.Variable)
self.assertIsInstance(loc_loss.array, xp.ndarray)
self.assertEqual(loc_loss.shape, ())
self.assertIsInstance(conf_loss, chainer.Variable)
self.assertIsInstance(conf_loss.array, xp.ndarray)
self.assertEqual(conf_loss.shape, ())
def test_bbox_head_loss_post_cpu(self):
self._check_bbox_head_loss_post(np)
@attr.gpu
def test_bbox_head_loss_post_gpu(self):
import cupy
self._check_bbox_head_loss_post(cupy)
testing.run_module(__name__, __file__)
|
import asyncio
import cProfile
import time
from guppy import hpy
from pyprof2calltree import convert
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
SERVICE_START = "start"
SERVICE_MEMORY = "memory"
CONF_SECONDS = "seconds"
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the profiler component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Profiler from a config entry."""
lock = asyncio.Lock()
async def _async_run_profile(call: ServiceCall):
async with lock:
await _async_generate_profile(hass, call)
async def _async_run_memory_profile(call: ServiceCall):
async with lock:
await _async_generate_memory_profile(hass, call)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_START,
_async_run_profile,
schema=vol.Schema(
{vol.Optional(CONF_SECONDS, default=60.0): vol.Coerce(float)}
),
)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_MEMORY,
_async_run_memory_profile,
schema=vol.Schema(
{vol.Optional(CONF_SECONDS, default=60.0): vol.Coerce(float)}
),
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
hass.services.async_remove(domain=DOMAIN, service=SERVICE_START)
return True
async def _async_generate_profile(hass: HomeAssistant, call: ServiceCall):
start_time = int(time.time() * 1000000)
hass.components.persistent_notification.async_create(
"The profile has started. This notification will be updated when it is complete.",
title="Profile Started",
notification_id=f"profiler_{start_time}",
)
profiler = cProfile.Profile()
profiler.enable()
await asyncio.sleep(float(call.data[CONF_SECONDS]))
profiler.disable()
cprofile_path = hass.config.path(f"profile.{start_time}.cprof")
callgrind_path = hass.config.path(f"callgrind.out.{start_time}")
await hass.async_add_executor_job(
_write_profile, profiler, cprofile_path, callgrind_path
)
hass.components.persistent_notification.async_create(
f"Wrote cProfile data to {cprofile_path} and callgrind data to {callgrind_path}",
title="Profile Complete",
notification_id=f"profiler_{start_time}",
)
async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall):
start_time = int(time.time() * 1000000)
hass.components.persistent_notification.async_create(
"The memory profile has started. This notification will be updated when it is complete.",
title="Profile Started",
notification_id=f"memory_profiler_{start_time}",
)
heap_profiler = hpy()
heap_profiler.setref()
await asyncio.sleep(float(call.data[CONF_SECONDS]))
heap = heap_profiler.heap()
heap_path = hass.config.path(f"heap_profile.{start_time}.hpy")
await hass.async_add_executor_job(_write_memory_profile, heap, heap_path)
hass.components.persistent_notification.async_create(
f"Wrote heapy memory profile to {heap_path}",
title="Profile Complete",
notification_id=f"memory_profiler_{start_time}",
)
def _write_profile(profiler, cprofile_path, callgrind_path):
profiler.create_stats()
profiler.dump_stats(cprofile_path)
convert(profiler.getstats(), callgrind_path)
def _write_memory_profile(heap, heap_path):
heap.byrcs.dump(heap_path)
|
import logging
import socket
from maxcube.device import (
MAX_DEVICE_MODE_AUTOMATIC,
MAX_DEVICE_MODE_BOOST,
MAX_DEVICE_MODE_MANUAL,
MAX_DEVICE_MODE_VACATION,
)
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
PRESET_NONE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from . import DATA_KEY
_LOGGER = logging.getLogger(__name__)
ATTR_VALVE_POSITION = "valve_position"
PRESET_ON = "on"
# There are two magic temperature values, which indicate:
# Off (valve fully closed)
OFF_TEMPERATURE = 4.5
# On (valve fully open)
ON_TEMPERATURE = 30.5
# Lowest Value without turning off
MIN_TEMPERATURE = 5.0
# Largest Value without fully opening
MAX_TEMPERATURE = 30.0
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
HASS_PRESET_TO_MAX_MODE = {
PRESET_AWAY: MAX_DEVICE_MODE_VACATION,
PRESET_BOOST: MAX_DEVICE_MODE_BOOST,
PRESET_NONE: MAX_DEVICE_MODE_AUTOMATIC,
PRESET_ON: MAX_DEVICE_MODE_MANUAL,
}
MAX_MODE_TO_HASS_PRESET = {
MAX_DEVICE_MODE_AUTOMATIC: PRESET_NONE,
MAX_DEVICE_MODE_BOOST: PRESET_BOOST,
MAX_DEVICE_MODE_MANUAL: PRESET_NONE,
MAX_DEVICE_MODE_VACATION: PRESET_AWAY,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Iterate through all MAX! Devices and add thermostats."""
devices = []
for handler in hass.data[DATA_KEY].values():
cube = handler.cube
for device in cube.devices:
name = f"{cube.room_by_id(device.room_id).name} {device.name}"
if cube.is_thermostat(device) or cube.is_wallthermostat(device):
devices.append(MaxCubeClimate(handler, name, device.rf_address))
if devices:
add_entities(devices)
class MaxCubeClimate(ClimateEntity):
"""MAX! Cube ClimateEntity."""
def __init__(self, handler, name, rf_address):
"""Initialize MAX! Cube ClimateEntity."""
self._name = name
self._rf_address = rf_address
self._cubehandle = handler
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def name(self):
"""Return the name of the climate device."""
return self._name
@property
def min_temp(self):
"""Return the minimum temperature."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
if device.min_temperature is None:
return MIN_TEMPERATURE
return device.min_temperature
@property
def max_temp(self):
"""Return the maximum temperature."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
if device.max_temperature is None:
return MAX_TEMPERATURE
return device.max_temperature
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
return device.actual_temperature
@property
def hvac_mode(self):
"""Return current operation mode."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
if device.mode in [MAX_DEVICE_MODE_AUTOMATIC, MAX_DEVICE_MODE_BOOST]:
return HVAC_MODE_AUTO
if (
device.mode == MAX_DEVICE_MODE_MANUAL
and device.target_temperature == OFF_TEMPERATURE
):
return HVAC_MODE_OFF
return HVAC_MODE_HEAT
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return [HVAC_MODE_OFF, HVAC_MODE_AUTO, HVAC_MODE_HEAT]
def set_hvac_mode(self, hvac_mode: str):
"""Set new target hvac mode."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
temp = device.target_temperature
mode = MAX_DEVICE_MODE_MANUAL
if hvac_mode == HVAC_MODE_OFF:
temp = OFF_TEMPERATURE
elif hvac_mode != HVAC_MODE_HEAT:
# Reset the temperature to a sane value.
# Ideally, we should send 0 and the device will set its
# temperature according to the schedule. However, current
# version of the library has a bug which causes an
# exception when setting values below 8.
if temp in [OFF_TEMPERATURE, ON_TEMPERATURE]:
temp = device.eco_temperature
mode = MAX_DEVICE_MODE_AUTOMATIC
cube = self._cubehandle.cube
with self._cubehandle.mutex:
try:
cube.set_temperature_mode(device, temp, mode)
except (socket.timeout, OSError):
_LOGGER.error("Setting HVAC mode failed")
return
@property
def hvac_action(self):
"""Return the current running hvac operation if supported."""
cube = self._cubehandle.cube
device = cube.device_by_rf(self._rf_address)
valve = 0
if cube.is_thermostat(device):
valve = device.valve_position
elif cube.is_wallthermostat(device):
for device in cube.devices_by_room(cube.room_by_id(device.room_id)):
if cube.is_thermostat(device) and device.valve_position > 0:
valve = device.valve_position
break
else:
return None
# Assume heating when valve is open
if valve > 0:
return CURRENT_HVAC_HEAT
return (
CURRENT_HVAC_OFF if self.hvac_mode == HVAC_MODE_OFF else CURRENT_HVAC_IDLE
)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
if (
device.target_temperature is None
or device.target_temperature < self.min_temp
or device.target_temperature > self.max_temp
):
return None
return device.target_temperature
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
if kwargs.get(ATTR_TEMPERATURE) is None:
return False
target_temperature = kwargs.get(ATTR_TEMPERATURE)
device = self._cubehandle.cube.device_by_rf(self._rf_address)
cube = self._cubehandle.cube
with self._cubehandle.mutex:
try:
cube.set_target_temperature(device, target_temperature)
except (socket.timeout, OSError):
_LOGGER.error("Setting target temperature failed")
return False
@property
def preset_mode(self):
"""Return the current preset mode."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
if self.hvac_mode == HVAC_MODE_OFF:
return PRESET_NONE
if device.mode == MAX_DEVICE_MODE_MANUAL:
if device.target_temperature == device.comfort_temperature:
return PRESET_COMFORT
if device.target_temperature == device.eco_temperature:
return PRESET_ECO
if device.target_temperature == ON_TEMPERATURE:
return PRESET_ON
return PRESET_NONE
return MAX_MODE_TO_HASS_PRESET[device.mode]
@property
def preset_modes(self):
"""Return available preset modes."""
return [
PRESET_NONE,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
PRESET_AWAY,
PRESET_ON,
]
def set_preset_mode(self, preset_mode):
"""Set new operation mode."""
device = self._cubehandle.cube.device_by_rf(self._rf_address)
temp = device.target_temperature
mode = MAX_DEVICE_MODE_AUTOMATIC
if preset_mode in [PRESET_COMFORT, PRESET_ECO, PRESET_ON]:
mode = MAX_DEVICE_MODE_MANUAL
if preset_mode == PRESET_COMFORT:
temp = device.comfort_temperature
elif preset_mode == PRESET_ECO:
temp = device.eco_temperature
else:
temp = ON_TEMPERATURE
else:
mode = HASS_PRESET_TO_MAX_MODE[preset_mode] or MAX_DEVICE_MODE_AUTOMATIC
with self._cubehandle.mutex:
try:
self._cubehandle.cube.set_temperature_mode(device, temp, mode)
except (socket.timeout, OSError):
_LOGGER.error("Setting operation mode failed")
return
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
cube = self._cubehandle.cube
device = cube.device_by_rf(self._rf_address)
if not cube.is_thermostat(device):
return {}
return {ATTR_VALVE_POSITION: device.valve_position}
def update(self):
"""Get latest data from MAX! Cube."""
self._cubehandle.update()
|
import datetime
import json
import time
import OpenSSL.crypto
import josepy as jose
import dns.resolver
from acme import challenges, errors, messages
from acme.client import BackwardsCompatibleClientV2, ClientNetwork
from acme.errors import TimeoutError
from acme.messages import Error as AcmeError
from flask import current_app
from lemur.common.utils import generate_private_key
from lemur.dns_providers import service as dns_provider_service
from lemur.exceptions import InvalidAuthority, UnknownProvider, InvalidConfiguration
from lemur.extensions import metrics, sentry
from lemur.plugins.lemur_acme import cloudflare, dyn, route53, ultradns, powerdns
from lemur.authorities import service as authorities_service
from retrying import retry
class AuthorizationRecord(object):
def __init__(self, domain, target_domain, authz, dns_challenge, change_id, cname_delegation):
self.domain = domain
self.target_domain = target_domain
self.authz = authz
self.dns_challenge = dns_challenge
self.change_id = change_id
self.cname_delegation = cname_delegation
class AcmeHandler(object):
def reuse_account(self, authority):
if not authority.options:
raise InvalidAuthority("Invalid authority. Options not set")
existing_key = False
existing_regr = False
for option in json.loads(authority.options):
if option["name"] == "acme_private_key" and option["value"]:
existing_key = True
if option["name"] == "acme_regr" and option["value"]:
existing_regr = True
if not existing_key and current_app.config.get("ACME_PRIVATE_KEY"):
existing_key = True
if not existing_regr and current_app.config.get("ACME_REGR"):
existing_regr = True
if existing_key and existing_regr:
return True
else:
return False
def strip_wildcard(self, host):
"""Removes the leading *. and returns Host and whether it was removed or not (True/False)"""
prefix = "*."
if host.startswith(prefix):
return host[len(prefix):], True
return host, False
def maybe_add_extension(self, host, dns_provider_options):
if dns_provider_options and dns_provider_options.get(
"acme_challenge_extension"
):
host = host + dns_provider_options.get("acme_challenge_extension")
return host
def request_certificate(self, acme_client, authorizations, order):
for authorization in authorizations:
for authz in authorization.authz:
authorization_resource, _ = acme_client.poll(authz)
deadline = datetime.datetime.now() + datetime.timedelta(seconds=360)
try:
orderr = acme_client.poll_and_finalize(order, deadline)
except (AcmeError, TimeoutError):
sentry.captureException(extra={"order_url": str(order.uri)})
metrics.send("request_certificate_error", "counter", 1, metric_tags={"uri": order.uri})
current_app.logger.error(
f"Unable to resolve Acme order: {order.uri}", exc_info=True
)
raise
except errors.ValidationError:
if order.fullchain_pem:
orderr = order
else:
raise
metrics.send("request_certificate_success", "counter", 1, metric_tags={"uri": order.uri})
current_app.logger.info(
f"Successfully resolved Acme order: {order.uri}", exc_info=True
)
pem_certificate, pem_certificate_chain = self.extract_cert_and_chain(orderr.fullchain_pem)
current_app.logger.debug(
"{0} {1}".format(type(pem_certificate), type(pem_certificate_chain))
)
return pem_certificate, pem_certificate_chain
def extract_cert_and_chain(self, fullchain_pem):
pem_certificate = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM,
OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, fullchain_pem
),
).decode()
if current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA", False) \
and datetime.datetime.now() < datetime.datetime.strptime(
current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA_EXPIRATION_DATE", "17/03/21"), '%d/%m/%y'):
pem_certificate_chain = current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA")
else:
pem_certificate_chain = fullchain_pem[len(pem_certificate):].lstrip()
return pem_certificate, pem_certificate_chain
@retry(stop_max_attempt_number=5, wait_fixed=5000)
def setup_acme_client(self, authority):
if not authority.options:
raise InvalidAuthority("Invalid authority. Options not set")
options = {}
for option in json.loads(authority.options):
options[option["name"]] = option.get("value")
email = options.get("email", current_app.config.get("ACME_EMAIL"))
tel = options.get("telephone", current_app.config.get("ACME_TEL"))
directory_url = options.get(
"acme_url", current_app.config.get("ACME_DIRECTORY_URL")
)
existing_key = options.get(
"acme_private_key", current_app.config.get("ACME_PRIVATE_KEY")
)
existing_regr = options.get("acme_regr", current_app.config.get("ACME_REGR"))
if existing_key and existing_regr:
current_app.logger.debug("Reusing existing ACME account")
# Reuse the same account for each certificate issuance
key = jose.JWK.json_loads(existing_key)
regr = messages.RegistrationResource.json_loads(existing_regr)
current_app.logger.debug(
"Connecting with directory at {0}".format(directory_url)
)
net = ClientNetwork(key, account=regr)
client = BackwardsCompatibleClientV2(net, key, directory_url)
return client, {}
else:
# Create an account for each certificate issuance
key = jose.JWKRSA(key=generate_private_key("RSA2048"))
current_app.logger.debug("Creating a new ACME account")
current_app.logger.debug(
"Connecting with directory at {0}".format(directory_url)
)
net = ClientNetwork(key, account=None, timeout=3600)
client = BackwardsCompatibleClientV2(net, key, directory_url)
registration = client.new_account_and_tos(
messages.NewRegistration.from_data(email=email)
)
# if store_account is checked, add the private_key and registration resources to the options
if options['store_account']:
new_options = json.loads(authority.options)
# the key returned by fields_to_partial_json is missing the key type, so we add it manually
key_dict = key.fields_to_partial_json()
key_dict["kty"] = "RSA"
acme_private_key = {
"name": "acme_private_key",
"value": json.dumps(key_dict)
}
new_options.append(acme_private_key)
acme_regr = {
"name": "acme_regr",
"value": json.dumps({"body": {}, "uri": registration.uri})
}
new_options.append(acme_regr)
authorities_service.update_options(authority.id, options=json.dumps(new_options))
current_app.logger.debug("Connected: {0}".format(registration.uri))
return client, registration
def get_domains(self, options):
"""
Fetches all domains currently requested
:param options:
:return:
"""
current_app.logger.debug("Fetching domains")
domains = [options["common_name"]]
if options.get("extensions"):
for dns_name in options["extensions"]["sub_alt_names"]["names"]:
if dns_name.value not in domains:
domains.append(dns_name.value)
current_app.logger.debug("Got these domains: {0}".format(domains))
return domains
def revoke_certificate(self, certificate, crl_reason=0):
if not self.reuse_account(certificate.authority):
raise InvalidConfiguration("There is no ACME account saved, unable to revoke the certificate.")
acme_client, _ = self.setup_acme_client(certificate.authority)
fullchain_com = jose.ComparableX509(
OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, certificate.body))
try:
acme_client.revoke(fullchain_com, crl_reason) # revocation reason as int (per RFC 5280 section 5.3.1)
except (errors.ConflictError, errors.ClientError, errors.Error) as e:
# Certificate already revoked.
current_app.logger.error("Certificate revocation failed with message: " + e.detail)
metrics.send("acme_revoke_certificate_failure", "counter", 1)
return False
current_app.logger.warning("Certificate succesfully revoked: " + certificate.name)
metrics.send("acme_revoke_certificate_success", "counter", 1)
return True
class AcmeDnsHandler(AcmeHandler):
def __init__(self):
self.dns_providers_for_domain = {}
try:
self.all_dns_providers = dns_provider_service.get_all_dns_providers()
except Exception as e:
metrics.send("AcmeHandler_init_error", "counter", 1)
sentry.captureException()
current_app.logger.error(f"Unable to fetch DNS Providers: {e}")
self.all_dns_providers = []
def get_all_zones(self, dns_provider):
dns_provider_options = json.loads(dns_provider.credentials)
account_number = dns_provider_options.get("account_id")
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
return dns_provider_plugin.get_zones(account_number=account_number)
def get_dns_challenges(self, host, authorizations):
"""Get dns challenges for provided domain"""
domain_to_validate, is_wildcard = self.strip_wildcard(host)
dns_challenges = []
for authz in authorizations:
if not authz.body.identifier.value.lower() == domain_to_validate.lower():
continue
if is_wildcard and not authz.body.wildcard:
continue
if not is_wildcard and authz.body.wildcard:
continue
for combo in authz.body.challenges:
if isinstance(combo.chall, challenges.DNS01):
dns_challenges.append(combo)
return dns_challenges
def get_dns_provider(self, type):
provider_types = {
"cloudflare": cloudflare,
"dyn": dyn,
"route53": route53,
"ultradns": ultradns,
"powerdns": powerdns
}
provider = provider_types.get(type)
if not provider:
raise UnknownProvider("No such DNS provider: {}".format(type))
return provider
def start_dns_challenge(
self,
acme_client,
account_number,
domain,
target_domain,
dns_provider,
order,
dns_provider_options,
):
current_app.logger.debug(f"Starting DNS challenge for {domain} using target domain {target_domain}.")
change_ids = []
cname_delegation = domain != target_domain
dns_challenges = self.get_dns_challenges(domain, order.authorizations)
host_to_validate, _ = self.strip_wildcard(target_domain)
host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options)
if not dns_challenges:
sentry.captureException()
metrics.send("start_dns_challenge_error_no_dns_challenges", "counter", 1)
raise Exception("Unable to determine DNS challenges from authorizations")
for dns_challenge in dns_challenges:
if not cname_delegation:
host_to_validate = dns_challenge.validation_domain_name(host_to_validate)
change_id = dns_provider.create_txt_record(
host_to_validate,
dns_challenge.validation(acme_client.client.net.key),
account_number,
)
change_ids.append(change_id)
return AuthorizationRecord(
domain, target_domain, order.authorizations, dns_challenges, change_ids, cname_delegation
)
def complete_dns_challenge(self, acme_client, authz_record):
current_app.logger.debug(
"Finalizing DNS challenge for {0}".format(
authz_record.authz[0].body.identifier.value
)
)
dns_providers = self.dns_providers_for_domain.get(authz_record.target_domain)
if not dns_providers:
metrics.send("complete_dns_challenge_error_no_dnsproviders", "counter", 1)
raise Exception(
"No DNS providers found for domain: {}".format(authz_record.target_domain)
)
for dns_provider in dns_providers:
# Grab account number (For Route53)
dns_provider_options = json.loads(dns_provider.credentials)
account_number = dns_provider_options.get("account_id")
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
for change_id in authz_record.change_id:
try:
dns_provider_plugin.wait_for_dns_change(
change_id, account_number=account_number
)
except Exception:
metrics.send("complete_dns_challenge_error", "counter", 1)
sentry.captureException()
current_app.logger.debug(
f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: "
f"{account_number}",
exc_info=True,
)
raise
for dns_challenge in authz_record.dns_challenge:
response = dns_challenge.response(acme_client.client.net.key)
verified = response.simple_verify(
dns_challenge.chall,
authz_record.target_domain,
acme_client.client.net.key.public_key(),
)
if not verified:
metrics.send("complete_dns_challenge_verification_error", "counter", 1)
raise ValueError("Failed verification")
time.sleep(5)
res = acme_client.answer_challenge(dns_challenge, response)
current_app.logger.debug(f"answer_challenge response: {res}")
def get_authorizations(self, acme_client, order, order_info):
authorizations = []
for domain in order_info.domains:
# If CNAME exists, set host to the target address
target_domain = domain
if current_app.config.get("ACME_ENABLE_DELEGATED_CNAME", False):
cname_result, _ = self.strip_wildcard(domain)
cname_result = challenges.DNS01().validation_domain_name(cname_result)
cname_result = self.get_cname(cname_result)
if cname_result:
target_domain = cname_result
self.autodetect_dns_providers(target_domain)
metrics.send(
"get_authorizations_cname_delegation_for_domain", "counter", 1, metric_tags={"domain": domain}
)
if not self.dns_providers_for_domain.get(target_domain):
metrics.send(
"get_authorizations_no_dns_provider_for_domain", "counter", 1
)
raise Exception("No DNS providers found for domain: {}".format(target_domain))
for dns_provider in self.dns_providers_for_domain[target_domain]:
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
dns_provider_options = json.loads(dns_provider.credentials)
account_number = dns_provider_options.get("account_id")
authz_record = self.start_dns_challenge(
acme_client,
account_number,
domain,
target_domain,
dns_provider_plugin,
order,
dns_provider.options,
)
authorizations.append(authz_record)
return authorizations
def autodetect_dns_providers(self, domain):
"""
Get DNS providers associated with a domain when it has not been provided for certificate creation.
:param domain:
:return: dns_providers: List of DNS providers that have the correct zone.
"""
self.dns_providers_for_domain[domain] = []
match_length = 0
for dns_provider in self.all_dns_providers:
if not dns_provider.domains:
continue
for name in dns_provider.domains:
if name == domain or domain.endswith("." + name):
if len(name) > match_length:
self.dns_providers_for_domain[domain] = [dns_provider]
match_length = len(name)
elif len(name) == match_length:
self.dns_providers_for_domain[domain].append(dns_provider)
return self.dns_providers_for_domain
def finalize_authorizations(self, acme_client, authorizations):
for authz_record in authorizations:
self.complete_dns_challenge(acme_client, authz_record)
for authz_record in authorizations:
dns_challenges = authz_record.dns_challenge
for dns_challenge in dns_challenges:
dns_providers = self.dns_providers_for_domain.get(authz_record.target_domain)
for dns_provider in dns_providers:
# Grab account number (For Route53)
dns_provider_plugin = self.get_dns_provider(
dns_provider.provider_type
)
dns_provider_options = json.loads(dns_provider.credentials)
account_number = dns_provider_options.get("account_id")
host_to_validate, _ = self.strip_wildcard(authz_record.target_domain)
host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options)
if not authz_record.cname_delegation:
host_to_validate = challenges.DNS01().validation_domain_name(host_to_validate)
dns_provider_plugin.delete_txt_record(
authz_record.change_id,
account_number,
host_to_validate,
dns_challenge.validation(acme_client.client.net.key),
)
return authorizations
def cleanup_dns_challenges(self, acme_client, authorizations):
"""
Best effort attempt to delete DNS challenges that may not have been deleted previously. This is usually called
on an exception
:param acme_client:
:param account_number:
:param dns_provider:
:param authorizations:
:param dns_provider_options:
:return:
"""
for authz_record in authorizations:
dns_providers = self.dns_providers_for_domain.get(authz_record.target_domain)
for dns_provider in dns_providers:
# Grab account number (For Route53)
dns_provider_options = json.loads(dns_provider.credentials)
account_number = dns_provider_options.get("account_id")
dns_challenges = authz_record.dns_challenge
host_to_validate, _ = self.strip_wildcard(authz_record.target_domain)
host_to_validate = self.maybe_add_extension(
host_to_validate, dns_provider_options
)
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
for dns_challenge in dns_challenges:
if not authz_record.cname_delegation:
host_to_validate = dns_challenge.validation_domain_name(host_to_validate)
try:
dns_provider_plugin.delete_txt_record(
authz_record.change_id,
account_number,
host_to_validate,
dns_challenge.validation(acme_client.client.net.key),
)
except Exception as e:
# If this fails, it's most likely because the record doesn't exist (It was already cleaned up)
# or we're not authorized to modify it.
metrics.send("cleanup_dns_challenges_error", "counter", 1)
sentry.captureException()
pass
def get_cname(self, domain):
"""
:param domain: Domain name to look up a CNAME for.
:return: First CNAME target or False if no CNAME record exists.
"""
try:
result = dns.resolver.query(domain, 'CNAME')
if len(result) > 0:
return str(result[0].target).rstrip('.')
except dns.exception.DNSException:
return False
|
from garminconnect import (
GarminConnectAuthenticationError,
GarminConnectConnectionError,
GarminConnectTooManyRequestsError,
)
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.garmin_connect.const import DOMAIN
from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
MOCK_CONF = {
CONF_ID: "First Lastname",
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "mypassw0rd",
}
@pytest.fixture(name="mock_garmin_connect")
def mock_garmin():
"""Mock Garmin."""
with patch(
"homeassistant.components.garmin_connect.config_flow.Garmin",
) as garmin:
garmin.return_value.get_full_name.return_value = MOCK_CONF[CONF_ID]
yield garmin.return_value
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_user(hass, mock_garmin_connect):
"""Test registering an integration and finishing flow works."""
with patch(
"homeassistant.components.garmin_connect.async_setup_entry", return_value=True
), patch("homeassistant.components.garmin_connect.async_setup", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == MOCK_CONF
async def test_connection_error(hass, mock_garmin_connect):
"""Test for connection error."""
mock_garmin_connect.login.side_effect = GarminConnectConnectionError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_authentication_error(hass, mock_garmin_connect):
"""Test for authentication error."""
mock_garmin_connect.login.side_effect = GarminConnectAuthenticationError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_toomanyrequest_error(hass, mock_garmin_connect):
"""Test for toomanyrequests error."""
mock_garmin_connect.login.side_effect = GarminConnectTooManyRequestsError(
"errormsg"
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "too_many_requests"}
async def test_unknown_error(hass, mock_garmin_connect):
"""Test for unknown error."""
mock_garmin_connect.login.side_effect = Exception
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
async def test_abort_if_already_setup(hass, mock_garmin_connect):
"""Test abort if already setup."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_ID])
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
|
import argparse
import pytest
from paasta_tools.cli.cli import get_argparser
from paasta_tools.cli.cli import main
def each_command():
parser = get_argparser()
# We're doing some wacky inspection here, let's make sure things are sane
(subparsers,) = [
action
for action in parser._actions
if isinstance(action, argparse._SubParsersAction)
]
# Remove our dummy help command, paasta help --help is nonsense
choices = tuple(set(subparsers.choices) - {"help"})
assert choices
assert "local-run" in choices
return choices
@pytest.mark.parametrize("cmd", each_command())
def test_help(cmd, capfd):
# Should pass and produce something
with pytest.raises(SystemExit) as excinfo:
main((cmd, "--help"))
assert excinfo.value.code == 0
assert cmd in capfd.readouterr()[0]
def test_invalid_arguments_returns_non_zero():
with pytest.raises(SystemExit) as excinfo:
main(("get-latest-deployment", "--herp"))
assert excinfo.value.code == 1
|
import logging
import pytest
from homeassistant.components.counter import (
ATTR_EDITABLE,
ATTR_INITIAL,
ATTR_MAXIMUM,
ATTR_MINIMUM,
ATTR_STEP,
CONF_ICON,
CONF_INITIAL,
CONF_NAME,
CONF_RESTORE,
CONF_STEP,
DEFAULT_INITIAL,
DEFAULT_STEP,
DOMAIN,
)
from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_ICON, ATTR_NAME
from homeassistant.core import Context, CoreState, State
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
from tests.common import mock_restore_cache
from tests.components.counter.common import (
async_decrement,
async_increment,
async_reset,
)
_LOGGER = logging.getLogger(__name__)
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
"id": "from_storage",
"initial": 10,
"name": "from storage",
"maximum": 100,
"minimum": 3,
"step": 2,
"restore": False,
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
async def test_config(hass):
"""Test config."""
invalid_configs = [None, 1, {}, {"name with space": None}]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_config_options(hass):
"""Test configuration options."""
count_start = len(hass.states.async_entity_ids())
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
config = {
DOMAIN: {
"test_1": {},
"test_2": {
CONF_NAME: "Hello World",
CONF_ICON: "mdi:work",
CONF_INITIAL: 10,
CONF_RESTORE: False,
CONF_STEP: 5,
},
"test_3": None,
}
}
assert await async_setup_component(hass, "counter", config)
await hass.async_block_till_done()
_LOGGER.debug("ENTITIES: %s", hass.states.async_entity_ids())
assert count_start + 3 == len(hass.states.async_entity_ids())
await hass.async_block_till_done()
state_1 = hass.states.get("counter.test_1")
state_2 = hass.states.get("counter.test_2")
state_3 = hass.states.get("counter.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is not None
assert 0 == int(state_1.state)
assert ATTR_ICON not in state_1.attributes
assert ATTR_FRIENDLY_NAME not in state_1.attributes
assert 10 == int(state_2.state)
assert "Hello World" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work" == state_2.attributes.get(ATTR_ICON)
assert DEFAULT_INITIAL == state_3.attributes.get(ATTR_INITIAL)
assert DEFAULT_STEP == state_3.attributes.get(ATTR_STEP)
async def test_methods(hass):
"""Test increment, decrement, and reset methods."""
config = {DOMAIN: {"test_1": {}}}
assert await async_setup_component(hass, "counter", config)
entity_id = "counter.test_1"
state = hass.states.get(entity_id)
assert 0 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 1 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 2 == int(state.state)
async_decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 1 == int(state.state)
async_reset(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 0 == int(state.state)
async def test_methods_with_config(hass):
"""Test increment, decrement, and reset methods with configuration."""
config = {
DOMAIN: {"test": {CONF_NAME: "Hello World", CONF_INITIAL: 10, CONF_STEP: 5}}
}
assert await async_setup_component(hass, "counter", config)
entity_id = "counter.test"
state = hass.states.get(entity_id)
assert 10 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 15 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 20 == int(state.state)
async_decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 15 == int(state.state)
async def test_initial_state_overrules_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass, (State("counter.test1", "11"), State("counter.test2", "-22"))
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test1": {CONF_RESTORE: False},
"test2": {CONF_INITIAL: 10, CONF_RESTORE: False},
}
},
)
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 0
state = hass.states.get("counter.test2")
assert state
assert int(state.state) == 10
async def test_restore_state_overrules_initial_state(hass):
"""Ensure states are restored on startup."""
attr = {"initial": 6, "minimum": 1, "maximum": 8, "step": 2}
mock_restore_cache(
hass,
(
State("counter.test1", "11"),
State("counter.test2", "-22"),
State("counter.test3", "5", attr),
),
)
hass.state = CoreState.starting
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test1": {}, "test2": {CONF_INITIAL: 10}, "test3": {}}}
)
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 11
state = hass.states.get("counter.test2")
assert state
assert int(state.state) == -22
state = hass.states.get("counter.test3")
assert state
assert int(state.state) == 5
assert state.attributes.get("initial") == 6
assert state.attributes.get("minimum") == 1
assert state.attributes.get("maximum") == 8
assert state.attributes.get("step") == 2
async def test_no_initial_state_and_no_restore_state(hass):
"""Ensure that entity is create without initial and restore feature."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_STEP: 5}}})
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 0
async def test_counter_context(hass, hass_admin_user):
"""Test that counter context works."""
assert await async_setup_component(hass, "counter", {"counter": {"test": {}}})
state = hass.states.get("counter.test")
assert state is not None
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_counter_min(hass, hass_admin_user):
"""Test that min works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"minimum": "0", "initial": "0"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
await hass.services.async_call(
"counter",
"decrement",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "0"
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "1"
async def test_counter_max(hass, hass_admin_user):
"""Test that max works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"maximum": "0", "initial": "0"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "0"
await hass.services.async_call(
"counter",
"decrement",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "-1"
async def test_configure(hass, hass_admin_user):
"""Test that setting values through configure works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"maximum": "10", "initial": "10"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "10"
assert 10 == state.attributes.get("maximum")
# update max
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "maximum": 0},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
assert 0 == state.attributes.get("maximum")
# disable max
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "maximum": None},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
assert state.attributes.get("maximum") is None
# update min
assert state.attributes.get("minimum") is None
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "minimum": 5},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 5 == state.attributes.get("minimum")
# disable min
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "minimum": None},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert state.attributes.get("minimum") is None
# update step
assert 1 == state.attributes.get("step")
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "step": 3},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 3 == state.attributes.get("step")
# update value
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "value": 6},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "6"
# update initial
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "initial": 5},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "6"
assert 5 == state.attributes.get("initial")
# update all
await hass.services.async_call(
"counter",
"configure",
{
"entity_id": state.entity_id,
"step": 5,
"minimum": 0,
"maximum": 9,
"value": 5,
"initial": 6,
},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 5 == state.attributes.get("step")
assert 0 == state.attributes.get("minimum")
assert 9 == state.attributes.get("maximum")
assert 6 == state.attributes.get("initial")
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.from_storage")
assert int(state.state) == 10
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"minimum": 1,
"maximum": 10,
"initial": 5,
"step": 1,
"restore": False,
}
}
}
)
state = hass.states.get(f"{DOMAIN}.from_storage")
assert int(state.state) == 10
assert state.attributes[ATTR_FRIENDLY_NAME] == "from storage"
assert state.attributes[ATTR_EDITABLE] is True
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert int(state.state) == 5
assert state.attributes[ATTR_EDITABLE] is False
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"minimum": 1,
"maximum": 10,
"initial": 5,
"step": 1,
"restore": False,
}
}
}
)
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_update_min_max(hass, hass_ws_client, storage_setup):
"""Test updating min/max updates the state."""
items = [
{
"id": "from_storage",
"initial": 15,
"name": "from storage",
"maximum": 100,
"minimum": 10,
"step": 3,
"restore": True,
}
]
assert await storage_setup(items)
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert int(state.state) == 15
assert state.attributes[ATTR_MAXIMUM] == 100
assert state.attributes[ATTR_MINIMUM] == 10
assert state.attributes[ATTR_STEP] == 3
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
"minimum": 19,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert int(state.state) == 19
assert state.attributes[ATTR_MINIMUM] == 19
assert state.attributes[ATTR_MAXIMUM] == 100
assert state.attributes[ATTR_STEP] == 3
await client.send_json(
{
"id": 7,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
"maximum": 5,
"minimum": 2,
"step": 5,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert int(state.state) == 5
assert state.attributes[ATTR_MINIMUM] == 2
assert state.attributes[ATTR_MAXIMUM] == 5
assert state.attributes[ATTR_STEP] == 5
await client.send_json(
{
"id": 8,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
"maximum": None,
"minimum": None,
"step": 6,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert int(state.state) == 5
assert ATTR_MINIMUM not in state.attributes
assert ATTR_MAXIMUM not in state.attributes
assert state.attributes[ATTR_STEP] == 6
async def test_create(hass, hass_ws_client, storage_setup):
"""Test creating counter using WS."""
items = []
assert await storage_setup(items)
counter_id = "new_counter"
input_entity_id = f"{DOMAIN}.{counter_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, counter_id) is None
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/create", "name": "new counter"})
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert int(state.state) == 0
assert ATTR_MINIMUM not in state.attributes
assert ATTR_MAXIMUM not in state.attributes
assert state.attributes[ATTR_STEP] == 1
|
import attr
from PyQt5.QtCore import QUrl, QByteArray
from PyQt5.QtWebEngineCore import (QWebEngineUrlRequestInterceptor,
QWebEngineUrlRequestInfo)
from qutebrowser.config import websettings
from qutebrowser.browser import shared
from qutebrowser.utils import utils, log, debug, qtutils
from qutebrowser.extensions import interceptors
from qutebrowser.misc import objects
@attr.s
class WebEngineRequest(interceptors.Request):
"""QtWebEngine-specific request interceptor functionality."""
_WHITELISTED_REQUEST_METHODS = {QByteArray(b'GET'), QByteArray(b'HEAD')}
_webengine_info: QWebEngineUrlRequestInfo = attr.ib(default=None)
#: If this request has been redirected already
_redirected: bool = attr.ib(init=False, default=False)
def redirect(self, url: QUrl) -> None:
if self._redirected:
raise interceptors.RedirectFailedException(
"Request already redirected.")
if self._webengine_info is None:
raise interceptors.RedirectFailedException(
"Request improperly initialized.")
# Redirecting a request that contains payload data is not allowed.
# To be safe, abort on any request not in a whitelist.
if (self._webengine_info.requestMethod()
not in self._WHITELISTED_REQUEST_METHODS):
raise interceptors.RedirectFailedException(
"Request method does not support redirection.")
self._webengine_info.redirect(url)
self._redirected = True
class RequestInterceptor(QWebEngineUrlRequestInterceptor):
"""Handle ad blocking and custom headers."""
def __init__(self, parent=None):
super().__init__(parent)
# This dict should be from QWebEngine Resource Types to qutebrowser
# extension ResourceTypes. If a ResourceType is added to Qt, this table
# should be updated too.
self._resource_types = {
QWebEngineUrlRequestInfo.ResourceTypeMainFrame:
interceptors.ResourceType.main_frame,
QWebEngineUrlRequestInfo.ResourceTypeSubFrame:
interceptors.ResourceType.sub_frame,
QWebEngineUrlRequestInfo.ResourceTypeStylesheet:
interceptors.ResourceType.stylesheet,
QWebEngineUrlRequestInfo.ResourceTypeScript:
interceptors.ResourceType.script,
QWebEngineUrlRequestInfo.ResourceTypeImage:
interceptors.ResourceType.image,
QWebEngineUrlRequestInfo.ResourceTypeFontResource:
interceptors.ResourceType.font_resource,
QWebEngineUrlRequestInfo.ResourceTypeSubResource:
interceptors.ResourceType.sub_resource,
QWebEngineUrlRequestInfo.ResourceTypeObject:
interceptors.ResourceType.object,
QWebEngineUrlRequestInfo.ResourceTypeMedia:
interceptors.ResourceType.media,
QWebEngineUrlRequestInfo.ResourceTypeWorker:
interceptors.ResourceType.worker,
QWebEngineUrlRequestInfo.ResourceTypeSharedWorker:
interceptors.ResourceType.shared_worker,
QWebEngineUrlRequestInfo.ResourceTypePrefetch:
interceptors.ResourceType.prefetch,
QWebEngineUrlRequestInfo.ResourceTypeFavicon:
interceptors.ResourceType.favicon,
QWebEngineUrlRequestInfo.ResourceTypeXhr:
interceptors.ResourceType.xhr,
QWebEngineUrlRequestInfo.ResourceTypePing:
interceptors.ResourceType.ping,
QWebEngineUrlRequestInfo.ResourceTypeServiceWorker:
interceptors.ResourceType.service_worker,
QWebEngineUrlRequestInfo.ResourceTypeCspReport:
interceptors.ResourceType.csp_report,
QWebEngineUrlRequestInfo.ResourceTypePluginResource:
interceptors.ResourceType.plugin_resource,
QWebEngineUrlRequestInfo.ResourceTypeUnknown:
interceptors.ResourceType.unknown,
}
try:
preload_main_frame = (QWebEngineUrlRequestInfo.
ResourceTypeNavigationPreloadMainFrame)
preload_sub_frame = (QWebEngineUrlRequestInfo.
ResourceTypeNavigationPreloadSubFrame)
except AttributeError:
# Added in Qt 5.14
pass
else:
self._resource_types[preload_main_frame] = (
interceptors.ResourceType.preload_main_frame)
self._resource_types[preload_sub_frame] = (
interceptors.ResourceType.preload_sub_frame)
def install(self, profile):
"""Install the interceptor on the given QWebEngineProfile."""
try:
# Qt >= 5.13, GUI thread
profile.setUrlRequestInterceptor(self)
except AttributeError:
# Qt 5.12, IO thread
profile.setRequestInterceptor(self)
# Gets called in the IO thread -> showing crash window will fail
@utils.prevent_exceptions(None, not qtutils.version_check('5.13'))
def interceptRequest(self, info):
"""Handle the given request.
Reimplementing this virtual function and setting the interceptor on a
profile makes it possible to intercept URL requests.
On Qt < 5.13, this function is executed on the IO thread, and therefore
running long tasks here will block networking.
info contains the information about the URL request and will track
internally whether its members have been altered.
Args:
info: QWebEngineUrlRequestInfo &info
"""
if 'log-requests' in objects.debug_flags:
resource_type_str = debug.qenum_key(QWebEngineUrlRequestInfo,
info.resourceType())
navigation_type_str = debug.qenum_key(QWebEngineUrlRequestInfo,
info.navigationType())
log.network.debug("{} {}, first-party {}, resource {}, "
"navigation {}".format(
bytes(info.requestMethod()).decode('ascii'),
info.requestUrl().toDisplayString(),
info.firstPartyUrl().toDisplayString(),
resource_type_str, navigation_type_str))
url = info.requestUrl()
first_party = info.firstPartyUrl()
if not url.isValid():
log.network.debug("Ignoring invalid intercepted URL: {}".format(
url.errorString()))
return
# Per QWebEngineUrlRequestInfo::ResourceType documentation, if we fail
# our lookup, we should fall back to ResourceTypeUnknown
try:
resource_type = self._resource_types[info.resourceType()]
except KeyError:
log.network.warning(
"Resource type {} not found in RequestInterceptor dict."
.format(debug.qenum_key(QWebEngineUrlRequestInfo,
info.resourceType())))
resource_type = interceptors.ResourceType.unknown
if ((url.scheme(), url.host(), url.path()) ==
('qute', 'settings', '/set')):
if (first_party != QUrl('qute://settings/') or
info.resourceType() !=
QWebEngineUrlRequestInfo.ResourceTypeXhr):
log.network.warning("Blocking malicious request from {} to {}"
.format(first_party.toDisplayString(),
url.toDisplayString()))
info.block(True)
return
# FIXME:qtwebengine only block ads for NavigationTypeOther?
request = WebEngineRequest(
first_party_url=first_party,
request_url=url,
resource_type=resource_type,
webengine_info=info)
interceptors.run(request)
if request.is_blocked:
info.block(True)
for header, value in shared.custom_headers(url=url):
info.setHttpHeader(header, value)
user_agent = websettings.user_agent(url)
info.setHttpHeader(b'User-Agent', user_agent.encode('ascii'))
|
import urllib2
import diamond.collector
import datetime
class HttpCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(HttpCollector, self).get_default_config_help()
config_help.update({
'req_port': 'Port',
'req_url':
'array of full URL to get (ex : https://www.ici.net/mypage.html)',
'req_vhost':
'Host header variable if needed. Will be added to every request',
})
return config_help
def get_default_config(self):
default_config = super(HttpCollector, self).get_default_config()
default_config['path'] = 'http'
default_config['req_vhost'] = ''
default_config['req_url'] = ['http://localhost/']
default_config['headers'] = {'User-Agent': 'Diamond HTTP collector', }
return default_config
def collect(self):
# create urllib2 vars
if self.config['req_vhost'] != "":
self.config['headers']['Host'] = self.config['req_vhost']
# time the request
for url in self.config['req_url']:
self.log.debug("collecting %s", str(url))
req_start = datetime.datetime.now()
req = urllib2.Request(url, headers=self.config['headers'])
try:
handle = urllib2.urlopen(req)
the_page = handle.read()
req_end = datetime.datetime.now()
req_time = req_end - req_start
# build a compatible name : no '.' and no'/' in the name
metric_name = url.replace(
'/', '_').replace(
'.', '_').replace(
'\\', '').replace(
':', '')
# metric_name = url.split("/")[-1].replace(".", "_")
if metric_name == '':
metric_name = "root"
self.publish_gauge(
metric_name + '.time',
req_time.seconds * 1000000 + req_time.microseconds)
self.publish_gauge(
metric_name + '.size',
len(the_page))
except IOError as e:
self.log.error("Unable to open %s",
self.config['req_url'])
except Exception as e:
self.log.error("Unknown error opening url: %s", e)
|
import asyncio
import logging
from smart_meter_texas import Account, Client
from smart_meter_texas.exceptions import (
SmartMeterTexasAPIError,
SmartMeterTexasAuthError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import (
DataUpdateCoordinator,
Debouncer,
UpdateFailed,
)
from .const import (
DATA_COORDINATOR,
DATA_SMART_METER,
DEBOUNCE_COOLDOWN,
DOMAIN,
SCAN_INTERVAL,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Smart Meter Texas component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Smart Meter Texas from a config entry."""
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
account = Account(username, password)
smart_meter_texas_data = SmartMeterTexasData(hass, entry, account)
try:
await smart_meter_texas_data.client.authenticate()
except SmartMeterTexasAuthError:
_LOGGER.error("Username or password was not accepted")
return False
except asyncio.TimeoutError as error:
raise ConfigEntryNotReady from error
await smart_meter_texas_data.setup()
async def async_update_data():
_LOGGER.debug("Fetching latest data")
await smart_meter_texas_data.read_meters()
return smart_meter_texas_data
# Use a DataUpdateCoordinator to manage the updates. This is due to the
# Smart Meter Texas API which takes around 30 seconds to read a meter.
# This avoids Home Assistant from complaining about the component taking
# too long to update.
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="Smart Meter Texas",
update_method=async_update_data,
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=DEBOUNCE_COOLDOWN, immediate=True
),
)
hass.data[DOMAIN][entry.entry_id] = {
DATA_COORDINATOR: coordinator,
DATA_SMART_METER: smart_meter_texas_data,
}
asyncio.create_task(coordinator.async_refresh())
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
class SmartMeterTexasData:
"""Manages coordinatation of API data updates."""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry, account: Account):
"""Initialize the data coordintator."""
self._entry = entry
self.account = account
websession = aiohttp_client.async_get_clientsession(hass)
self.client = Client(websession, account)
self.meters = []
async def setup(self):
"""Fetch all of the user's meters."""
self.meters = await self.account.fetch_meters(self.client)
_LOGGER.debug("Discovered %s meter(s)", len(self.meters))
async def read_meters(self):
"""Read each meter."""
for meter in self.meters:
try:
await meter.read_meter(self.client)
except (SmartMeterTexasAPIError, SmartMeterTexasAuthError) as error:
raise UpdateFailed(error) from error
return self.meters
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import sys
import os
import pytest
from qutebrowser import qutebrowser
from qutebrowser.config import qtargs
from qutebrowser.utils import usertypes
from helpers import utils
class TestQtArgs:
@pytest.fixture
def parser(self, mocker):
"""Fixture to provide an argparser.
Monkey-patches .exit() of the argparser so it doesn't exit on errors.
"""
parser = qutebrowser.get_argparser()
mocker.patch.object(parser, 'exit', side_effect=Exception)
return parser
@pytest.fixture(autouse=True)
def reduce_args(self, monkeypatch, config_stub):
"""Make sure no --disable-shared-workers/referer argument get added."""
monkeypatch.setattr(qtargs.qtutils, 'qVersion', lambda: '5.15.0')
config_stub.val.content.headers.referer = 'always'
@pytest.mark.parametrize('args, expected', [
# No Qt arguments
(['--debug'], [sys.argv[0]]),
# Qt flag
(['--debug', '--qt-flag', 'reverse'], [sys.argv[0], '--reverse']),
# Qt argument with value
(['--qt-arg', 'stylesheet', 'foo'],
[sys.argv[0], '--stylesheet', 'foo']),
# --qt-arg given twice
(['--qt-arg', 'stylesheet', 'foo', '--qt-arg', 'geometry', 'bar'],
[sys.argv[0], '--stylesheet', 'foo', '--geometry', 'bar']),
# --qt-flag given twice
(['--qt-flag', 'foo', '--qt-flag', 'bar'],
[sys.argv[0], '--foo', '--bar']),
])
def test_qt_args(self, monkeypatch, config_stub, args, expected, parser):
"""Test commandline with no Qt arguments given."""
# Avoid scrollbar overlay argument
config_stub.val.scrolling.bar = 'never'
# Avoid WebRTC pipewire feature
monkeypatch.setattr(qtargs.utils, 'is_linux', False)
parsed = parser.parse_args(args)
assert qtargs.qt_args(parsed) == expected
def test_qt_both(self, config_stub, parser):
"""Test commandline with a Qt argument and flag."""
args = parser.parse_args(['--qt-arg', 'stylesheet', 'foobar',
'--qt-flag', 'reverse'])
qt_args = qtargs.qt_args(args)
assert qt_args[0] == sys.argv[0]
assert '--reverse' in qt_args
assert '--stylesheet' in qt_args
assert 'foobar' in qt_args
def test_with_settings(self, config_stub, parser):
parsed = parser.parse_args(['--qt-flag', 'foo'])
config_stub.val.qt.args = ['bar']
args = qtargs.qt_args(parsed)
assert args[0] == sys.argv[0]
for arg in ['--foo', '--bar']:
assert arg in args
@pytest.mark.parametrize('backend, expected', [
(usertypes.Backend.QtWebEngine, True),
(usertypes.Backend.QtWebKit, False),
])
def test_shared_workers(self, config_stub, monkeypatch, parser,
backend, expected):
monkeypatch.setattr(qtargs.qtutils, 'qVersion', lambda: '5.14.0')
monkeypatch.setattr(qtargs.objects, 'backend', backend)
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
assert ('--disable-shared-workers' in args) == expected
@pytest.mark.parametrize('backend, version_check, debug_flag, expected', [
# Qt >= 5.12.3: Enable with -D stack, do nothing without it.
(usertypes.Backend.QtWebEngine, True, True, True),
(usertypes.Backend.QtWebEngine, True, False, None),
# Qt < 5.12.3: Do nothing with -D stack, disable without it.
(usertypes.Backend.QtWebEngine, False, True, None),
(usertypes.Backend.QtWebEngine, False, False, False),
# QtWebKit: Do nothing
(usertypes.Backend.QtWebKit, True, True, None),
(usertypes.Backend.QtWebKit, True, False, None),
(usertypes.Backend.QtWebKit, False, True, None),
(usertypes.Backend.QtWebKit, False, False, None),
])
def test_in_process_stack_traces(self, monkeypatch, parser, backend,
version_check, debug_flag, expected):
monkeypatch.setattr(qtargs.qtutils, 'version_check',
lambda version, compiled=False: version_check)
monkeypatch.setattr(qtargs.objects, 'backend', backend)
parsed = parser.parse_args(['--debug-flag', 'stack'] if debug_flag
else [])
args = qtargs.qt_args(parsed)
if expected is None:
assert '--disable-in-process-stack-traces' not in args
assert '--enable-in-process-stack-traces' not in args
elif expected:
assert '--disable-in-process-stack-traces' not in args
assert '--enable-in-process-stack-traces' in args
else:
assert '--disable-in-process-stack-traces' in args
assert '--enable-in-process-stack-traces' not in args
@pytest.mark.parametrize('flags, args', [
([], []),
(['--debug-flag', 'chromium'], ['--enable-logging', '--v=1']),
(['--debug-flag', 'wait-renderer-process'], ['--renderer-startup-dialog']),
])
def test_chromium_flags(self, monkeypatch, parser, flags, args):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
parsed = parser.parse_args(flags)
args = qtargs.qt_args(parsed)
if args:
for arg in args:
assert arg in args
else:
assert '--enable-logging' not in args
assert '--v=1' not in args
assert '--renderer-startup-dialog' not in args
@pytest.mark.parametrize('config, added', [
('none', False),
('qt-quick', False),
('software-opengl', False),
('chromium', True),
])
def test_disable_gpu(self, config, added,
config_stub, monkeypatch, parser):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.qt.force_software_rendering = config
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
assert ('--disable-gpu' in args) == added
@pytest.mark.parametrize('policy, arg', [
('all-interfaces', None),
('default-public-and-private-interfaces',
'--force-webrtc-ip-handling-policy='
'default_public_and_private_interfaces'),
('default-public-interface-only',
'--force-webrtc-ip-handling-policy='
'default_public_interface_only'),
('disable-non-proxied-udp',
'--force-webrtc-ip-handling-policy='
'disable_non_proxied_udp'),
])
def test_webrtc(self, config_stub, monkeypatch, parser,
policy, arg):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.content.webrtc_ip_handling_policy = policy
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
if arg is None:
assert not any(a.startswith('--force-webrtc-ip-handling-policy=')
for a in args)
else:
assert arg in args
@pytest.mark.parametrize('canvas_reading, added', [
(True, False), # canvas reading enabled
(False, True),
])
def test_canvas_reading(self, config_stub, monkeypatch, parser,
canvas_reading, added):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.content.canvas_reading = canvas_reading
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
assert ('--disable-reading-from-canvas' in args) == added
@pytest.mark.parametrize('process_model, added', [
('process-per-site-instance', False),
('process-per-site', True),
('single-process', True),
])
def test_process_model(self, config_stub, monkeypatch, parser,
process_model, added):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.qt.process_model = process_model
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
if added:
assert '--' + process_model in args
else:
assert '--process-per-site' not in args
assert '--single-process' not in args
assert '--process-per-site-instance' not in args
assert '--process-per-tab' not in args
@pytest.mark.parametrize('low_end_device_mode, arg', [
('auto', None),
('always', '--enable-low-end-device-mode'),
('never', '--disable-low-end-device-mode'),
])
def test_low_end_device_mode(self, config_stub, monkeypatch, parser,
low_end_device_mode, arg):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.qt.low_end_device_mode = low_end_device_mode
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
if arg is None:
assert '--enable-low-end-device-mode' not in args
assert '--disable-low-end-device-mode' not in args
else:
assert arg in args
@pytest.mark.parametrize('referer, arg', [
('always', None),
('never', '--no-referrers'),
('same-domain', '--reduced-referrer-granularity'),
])
def test_referer(self, config_stub, monkeypatch, parser, referer, arg):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
config_stub.val.content.headers.referer = referer
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
if arg is None:
assert '--no-referrers' not in args
assert '--reduced-referrer-granularity' not in args
else:
assert arg in args
@pytest.mark.parametrize('dark, new_qt, added', [
(True, True, True),
(True, False, False),
(False, True, False),
(False, False, False),
])
@utils.qt514
def test_prefers_color_scheme_dark(self, config_stub, monkeypatch, parser,
dark, new_qt, added):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setattr(qtargs.qtutils, 'version_check',
lambda version, exact=False, compiled=True:
new_qt)
config_stub.val.colors.webpage.prefers_color_scheme_dark = dark
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
assert ('--force-dark-mode' in args) == added
@pytest.mark.parametrize('bar, is_mac, added', [
# Overlay bar enabled
('overlay', False, True),
# No overlay on mac
('overlay', True, False),
# Overlay disabled
('when-searching', False, False),
('always', False, False),
('never', False, False),
])
def test_overlay_scrollbar(self, config_stub, monkeypatch, parser,
bar, is_mac, added):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setattr(qtargs.utils, 'is_mac', is_mac)
# Avoid WebRTC pipewire feature
monkeypatch.setattr(qtargs.utils, 'is_linux', False)
config_stub.val.scrolling.bar = bar
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
assert ('--enable-features=OverlayScrollbar' in args) == added
@pytest.mark.parametrize('via_commandline', [True, False])
@pytest.mark.parametrize('overlay, passed_features, expected_features', [
(True,
'CustomFeature',
'CustomFeature,OverlayScrollbar'),
(True,
'CustomFeature1,CustomFeature2',
'CustomFeature1,CustomFeature2,OverlayScrollbar'),
(False,
'CustomFeature',
'CustomFeature'),
])
def test_overlay_features_flag(self, config_stub, monkeypatch, parser,
via_commandline, overlay, passed_features,
expected_features):
"""If enable-features is already specified, we should combine both."""
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setattr(qtargs.qtutils, 'version_check',
lambda version, exact=False, compiled=True:
True)
monkeypatch.setattr(qtargs.utils, 'is_mac', False)
# Avoid WebRTC pipewire feature
monkeypatch.setattr(qtargs.utils, 'is_linux', False)
stripped_prefix = 'enable-features='
config_flag = stripped_prefix + passed_features
config_stub.val.scrolling.bar = 'overlay' if overlay else 'never'
config_stub.val.qt.args = ([] if via_commandline else [config_flag])
parsed = parser.parse_args(['--qt-flag', config_flag]
if via_commandline else [])
args = qtargs.qt_args(parsed)
prefix = '--' + stripped_prefix
overlay_flag = prefix + 'OverlayScrollbar'
combined_flag = prefix + expected_features
assert len([arg for arg in args if arg.startswith(prefix)]) == 1
assert combined_flag in args
assert overlay_flag not in args
def test_blink_settings(self, config_stub, monkeypatch, parser):
from qutebrowser.browser.webengine import darkmode
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setattr(darkmode, '_variant',
lambda: darkmode.Variant.qt_515_2)
config_stub.val.colors.webpage.darkmode.enabled = True
parsed = parser.parse_args([])
args = qtargs.qt_args(parsed)
expected = ('--blink-settings=forceDarkModeEnabled=true,'
'forceDarkModeImagePolicy=2')
assert expected in args
class TestEnvVars:
@pytest.mark.parametrize('config_opt, config_val, envvar, expected', [
('qt.force_software_rendering', 'software-opengl',
'QT_XCB_FORCE_SOFTWARE_OPENGL', '1'),
('qt.force_software_rendering', 'qt-quick',
'QT_QUICK_BACKEND', 'software'),
('qt.force_software_rendering', 'chromium',
'QT_WEBENGINE_DISABLE_NOUVEAU_WORKAROUND', '1'),
('qt.force_platform', 'toaster', 'QT_QPA_PLATFORM', 'toaster'),
('qt.force_platformtheme', 'lxde', 'QT_QPA_PLATFORMTHEME', 'lxde'),
('window.hide_decoration', True,
'QT_WAYLAND_DISABLE_WINDOWDECORATION', '1')
])
def test_env_vars(self, monkeypatch, config_stub,
config_opt, config_val, envvar, expected):
"""Check settings which set an environment variable."""
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setenv(envvar, '') # to make sure it gets restored
monkeypatch.delenv(envvar)
config_stub.set_obj(config_opt, config_val)
qtargs.init_envvars()
assert os.environ[envvar] == expected
@pytest.mark.parametrize('new_qt', [True, False])
def test_highdpi(self, monkeypatch, config_stub, new_qt):
"""Test HighDPI environment variables.
Depending on the Qt version, there's a different variable which should
be set...
"""
new_var = 'QT_ENABLE_HIGHDPI_SCALING'
old_var = 'QT_AUTO_SCREEN_SCALE_FACTOR'
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebEngine)
monkeypatch.setattr(qtargs.qtutils, 'version_check',
lambda version, exact=False, compiled=True:
new_qt)
for envvar in [new_var, old_var]:
monkeypatch.setenv(envvar, '') # to make sure it gets restored
monkeypatch.delenv(envvar)
config_stub.set_obj('qt.highdpi', True)
qtargs.init_envvars()
envvar = new_var if new_qt else old_var
assert os.environ[envvar] == '1'
def test_env_vars_webkit(self, monkeypatch, config_stub):
monkeypatch.setattr(qtargs.objects, 'backend',
usertypes.Backend.QtWebKit)
qtargs.init_envvars()
|
from typing import Optional
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt, QSize, QTimer
from PyQt5.QtWidgets import (QLineEdit, QWidget, QHBoxLayout, QLabel,
QStyleOption, QStyle, QLayout, QApplication,
QSplitter)
from PyQt5.QtGui import QValidator, QPainter, QResizeEvent
from qutebrowser.config import config, configfiles
from qutebrowser.utils import utils, log, usertypes
from qutebrowser.misc import cmdhistory
from qutebrowser.browser import inspector
from qutebrowser.keyinput import keyutils, modeman
class MinimalLineEditMixin:
"""A mixin to give a QLineEdit a minimal look and nicer repr()."""
def __init__(self):
self.setStyleSheet( # type: ignore[attr-defined]
"""
QLineEdit {
border: 0px;
padding-left: 1px;
background-color: transparent;
}
"""
)
self.setAttribute( # type: ignore[attr-defined]
Qt.WA_MacShowFocusRect, False)
def keyPressEvent(self, e):
"""Override keyPressEvent to paste primary selection on Shift + Ins."""
if e.key() == Qt.Key_Insert and e.modifiers() == Qt.ShiftModifier:
try:
text = utils.get_clipboard(selection=True, fallback=True)
except utils.ClipboardError:
e.ignore()
else:
e.accept()
self.insert(text) # type: ignore[attr-defined]
return
super().keyPressEvent(e) # type: ignore[misc]
def __repr__(self):
return utils.get_repr(self)
class CommandLineEdit(QLineEdit):
"""A QLineEdit with a history and prompt chars.
Attributes:
history: The command history object.
_validator: The current command validator.
_promptlen: The length of the current prompt.
"""
def __init__(self, *, parent=None):
super().__init__(parent)
self.history = cmdhistory.History(parent=self)
self._validator = _CommandValidator(self)
self.setValidator(self._validator)
self.textEdited.connect(self.on_text_edited)
self.cursorPositionChanged.connect(self.__on_cursor_position_changed)
self._promptlen = 0
def __repr__(self):
return utils.get_repr(self, text=self.text())
@pyqtSlot(str)
def on_text_edited(self, _text):
"""Slot for textEdited. Stop history browsing."""
self.history.stop()
@pyqtSlot(int, int)
def __on_cursor_position_changed(self, _old, new):
"""Prevent the cursor moving to the prompt.
We use __ here to avoid accidentally overriding it in subclasses.
"""
if new < self._promptlen:
self.cursorForward(self.hasSelectedText(), self._promptlen - new)
def set_prompt(self, text):
"""Set the current prompt to text.
This updates the validator, and makes sure the user can't move the
cursor behind the prompt.
"""
self._validator.prompt = text
self._promptlen = len(text)
class _CommandValidator(QValidator):
"""Validator to prevent the : from getting deleted.
Attributes:
prompt: The current prompt.
"""
def __init__(self, parent=None):
super().__init__(parent)
self.prompt = None
def validate(self, string, pos):
"""Override QValidator::validate.
Args:
string: The string to validate.
pos: The current cursor position.
Return:
A tuple (status, string, pos) as a QValidator should.
"""
if self.prompt is None or string.startswith(self.prompt):
return (QValidator.Acceptable, string, pos)
else:
return (QValidator.Invalid, string, pos)
class DetailFold(QWidget):
"""A "fold" widget with an arrow to show/hide details.
Attributes:
_folded: Whether the widget is currently folded or not.
_hbox: The HBoxLayout the arrow/label are in.
_arrow: The FoldArrow widget.
Signals:
toggled: Emitted when the widget was folded/unfolded.
arg 0: bool, if the contents are currently visible.
"""
toggled = pyqtSignal(bool)
def __init__(self, text, parent=None):
super().__init__(parent)
self._folded = True
self._hbox = QHBoxLayout(self)
self._hbox.setContentsMargins(0, 0, 0, 0)
self._arrow = _FoldArrow()
self._hbox.addWidget(self._arrow)
label = QLabel(text)
self._hbox.addWidget(label)
self._hbox.addStretch()
def toggle(self):
"""Toggle the fold of the widget."""
self._folded = not self._folded
self._arrow.fold(self._folded)
self.toggled.emit(not self._folded)
def mousePressEvent(self, e):
"""Toggle the fold if the widget was pressed.
Args:
e: The QMouseEvent.
"""
if e.button() == Qt.LeftButton:
e.accept()
self.toggle()
else:
super().mousePressEvent(e)
class _FoldArrow(QWidget):
"""The arrow shown for the DetailFold widget.
Attributes:
_folded: Whether the widget is currently folded or not.
"""
def __init__(self, parent=None):
super().__init__(parent)
self._folded = True
def fold(self, folded):
"""Fold/unfold the widget.
Args:
folded: The new desired state.
"""
self._folded = folded
self.update()
def paintEvent(self, _event):
"""Paint the arrow.
Args:
_paint: The QPaintEvent (unused).
"""
opt = QStyleOption()
opt.initFrom(self)
painter = QPainter(self)
if self._folded:
elem = QStyle.PE_IndicatorArrowRight
else:
elem = QStyle.PE_IndicatorArrowDown
self.style().drawPrimitive(elem, opt, painter, self)
def minimumSizeHint(self):
"""Return a sensible size."""
return QSize(8, 8)
class WrapperLayout(QLayout):
"""A Qt layout which simply wraps a single widget.
This is used so the widget is hidden behind a defined API and can't
easily be accidentally accessed.
"""
def __init__(self, parent=None):
super().__init__(parent)
self._widget: Optional[QWidget] = None
self._container: Optional[QWidget] = None
def addItem(self, _widget):
raise utils.Unreachable
def sizeHint(self):
"""Get the size of the underlying widget."""
if self._widget is None:
return QSize()
return self._widget.sizeHint()
def itemAt(self, _index):
return None
def takeAt(self, _index):
raise utils.Unreachable
def setGeometry(self, rect):
"""Pass through setGeometry calls to the underlying widget."""
if self._widget is None:
return
self._widget.setGeometry(rect)
def wrap(self, container, widget):
"""Wrap the given widget in the given container."""
self._container = container
self._widget = widget
container.setFocusProxy(widget)
widget.setParent(container)
def unwrap(self):
"""Remove the widget from this layout.
Does nothing if it nothing was wrapped before.
"""
if self._widget is None:
return
assert self._container is not None
self._widget.setParent(None) # type: ignore[call-overload]
self._widget.deleteLater()
self._widget = None
self._container.setFocusProxy(None) # type: ignore[arg-type]
class FullscreenNotification(QLabel):
"""A label telling the user this page is now fullscreen."""
def __init__(self, parent=None):
super().__init__(parent)
self.setStyleSheet("""
background-color: rgba(50, 50, 50, 80%);
color: white;
border-radius: 20px;
padding: 30px;
""")
all_bindings = config.key_instance.get_reverse_bindings_for('normal')
bindings = all_bindings.get('fullscreen --leave')
if bindings:
key = bindings[0]
self.setText("Press {} to exit fullscreen.".format(key))
else:
self.setText("Page is now fullscreen.")
self.resize(self.sizeHint())
if config.val.content.fullscreen.window:
geom = self.parentWidget().geometry()
else:
geom = QApplication.desktop().screenGeometry(self)
self.move((geom.width() - self.sizeHint().width()) // 2, 30)
def set_timeout(self, timeout):
"""Hide the widget after the given timeout."""
QTimer.singleShot(timeout, self._on_timeout)
@pyqtSlot()
def _on_timeout(self):
"""Hide and delete the widget."""
self.hide()
self.deleteLater()
class InspectorSplitter(QSplitter):
"""Allows putting an inspector inside the tab.
Attributes:
_main_idx: index of the main webview widget
_position: position of the inspector (right/left/top/bottom)
_preferred_size: the preferred size of the inpector widget in pixels
Class attributes:
_PROTECTED_MAIN_SIZE: How much space should be reserved for the main
content (website).
_SMALL_SIZE_THRESHOLD: If the window size is under this threshold, we
consider this a temporary "emergency" situation.
"""
_PROTECTED_MAIN_SIZE = 150
_SMALL_SIZE_THRESHOLD = 300
def __init__(self, win_id: int, main_webview: QWidget,
parent: QWidget = None) -> None:
super().__init__(parent)
self._win_id = win_id
self.addWidget(main_webview)
self.setFocusProxy(main_webview)
self.splitterMoved.connect(self._on_splitter_moved)
self._main_idx: Optional[int] = None
self._inspector_idx: Optional[int] = None
self._position: Optional[inspector.Position] = None
self._preferred_size: Optional[int] = None
def cycle_focus(self):
"""Cycle keyboard focus between the main/inspector widget."""
if self.count() == 1:
raise inspector.Error("No inspector inside main window")
assert self._main_idx is not None
assert self._inspector_idx is not None
main_widget = self.widget(self._main_idx)
inspector_widget = self.widget(self._inspector_idx)
if not inspector_widget.isVisible():
raise inspector.Error("No inspector inside main window")
if main_widget.hasFocus():
inspector_widget.setFocus()
modeman.enter(self._win_id, usertypes.KeyMode.insert,
reason='Inspector focused', only_if_normal=True)
elif inspector_widget.hasFocus():
main_widget.setFocus()
def set_inspector(self, inspector_widget: inspector.AbstractWebInspector,
position: inspector.Position) -> None:
"""Set the position of the inspector."""
assert position != inspector.Position.window
if position in [inspector.Position.right, inspector.Position.bottom]:
self._main_idx = 0
self._inspector_idx = 1
else:
self._inspector_idx = 0
self._main_idx = 1
self.setOrientation(Qt.Horizontal
if position in [inspector.Position.left,
inspector.Position.right]
else Qt.Vertical)
self.insertWidget(self._inspector_idx, inspector_widget)
self._position = position
self._load_preferred_size()
self._adjust_size()
def _save_preferred_size(self) -> None:
"""Save the preferred size of the inspector widget."""
assert self._position is not None
size = str(self._preferred_size)
configfiles.state['inspector'][self._position.name] = size
def _load_preferred_size(self) -> None:
"""Load the preferred size of the inspector widget."""
assert self._position is not None
full = (self.width() if self.orientation() == Qt.Horizontal
else self.height())
# If we first open the inspector with a window size of < 300px
# (self._SMALL_SIZE_THRESHOLD), we don't want to default to half of the
# window size as the small window is likely a temporary situation and
# the inspector isn't very usable in that state.
self._preferred_size = max(self._SMALL_SIZE_THRESHOLD, full // 2)
try:
size = int(configfiles.state['inspector'][self._position.name])
except KeyError:
# First start
pass
except ValueError as e:
log.misc.error("Could not read inspector size: {}".format(e))
else:
self._preferred_size = int(size)
def _adjust_size(self) -> None:
"""Adjust the size of the inspector similarly to Chromium.
In general, we want to keep the absolute size of the inspector (rather
than the ratio) the same, as it's confusing when the layout of its
contents changes.
We're essentially handling three different cases:
1) We have plenty of space -> Keep inspector at the preferred absolute
size.
2) We're slowly running out of space. Make sure the page still has
150px (self._PROTECTED_MAIN_SIZE) left, give the rest to the
inspector.
3) The window is very small (< 300px, self._SMALL_SIZE_THRESHOLD).
Keep Qt's behavior of keeping the aspect ratio, as all hope is lost
at this point.
"""
sizes = self.sizes()
total = sizes[0] + sizes[1]
assert self._main_idx is not None
assert self._inspector_idx is not None
assert self._preferred_size is not None
if total >= self._preferred_size + self._PROTECTED_MAIN_SIZE:
# Case 1 above
sizes[self._inspector_idx] = self._preferred_size
sizes[self._main_idx] = total - self._preferred_size
self.setSizes(sizes)
elif (sizes[self._main_idx] < self._PROTECTED_MAIN_SIZE and
total >= self._SMALL_SIZE_THRESHOLD):
# Case 2 above
handle_size = self.handleWidth()
sizes[self._main_idx] = (
self._PROTECTED_MAIN_SIZE - handle_size // 2)
sizes[self._inspector_idx] = (
total - self._PROTECTED_MAIN_SIZE + handle_size // 2)
self.setSizes(sizes)
else:
# Case 3 above
pass
@pyqtSlot()
def _on_splitter_moved(self) -> None:
assert self._inspector_idx is not None
sizes = self.sizes()
self._preferred_size = sizes[self._inspector_idx]
self._save_preferred_size()
def resizeEvent(self, e: QResizeEvent) -> None:
"""Window resize event."""
super().resizeEvent(e)
if self.count() == 2:
self._adjust_size()
class KeyTesterWidget(QWidget):
"""Widget displaying key presses."""
def __init__(self, parent=None):
super().__init__(parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self._layout = QHBoxLayout(self)
self._label = QLabel(text="Waiting for keypress...")
self._layout.addWidget(self._label)
def keyPressEvent(self, e):
"""Show pressed keys."""
lines = [
str(keyutils.KeyInfo.from_event(e)),
'',
'key: 0x{:x}'.format(int(e.key())),
'modifiers: 0x{:x}'.format(int(e.modifiers())),
'text: {!r}'.format(e.text()),
]
self._label.setText('\n'.join(lines))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
import re
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import units
import six
from six.moves import range
import yaml
FLAGS = flags.FLAGS
INTEGER_GROUP_REGEXP = re.compile(r'(\d+)(-(\d+))?(-(\d+))?$')
INTEGER_GROUP_REGEXP_COLONS = re.compile(r'(-?\d+)(:(-?\d+))?(:(-?\d+))?$')
class IntegerList(object):
"""An immutable list of nonnegative integers.
The list contains either single integers (ex: 5) or ranges (ex:
8-12). Additionally, the user can provide a step to the range like so:
8-24-2. The list can include as many elements as will fit in
memory. Furthermore, the memory required to hold a range will not
grow with the size of the range.
Make a list with
lst = IntegerList(groups)
where groups is a list whose elements are either single integers,
2-tuples holding the low and high bounds of a range
(inclusive), or 3-tuples holding the low and high bounds, followed
by the step size. (Ex: [5, (8,12)] represents the integer list
5,8,9,10,11,12, and [(8-14-2)] represents the list 8,10,12,14.)
For negative number ranges use a colon separator (ex: "-2:1" is the integer
list -2, -1, 0, 1).
"""
def __init__(self, groups):
self.groups = groups
length = 0
for elt in groups:
if isinstance(elt, six.integer_types):
length += 1
if isinstance(elt, tuple):
length += len(self._CreateXrangeFromTuple(elt))
self.length = length
def __len__(self):
return self.length
def __getitem__(self, idx):
if not isinstance(idx, int):
raise TypeError()
if idx < 0 or idx >= self.length:
raise IndexError()
group_idx = 0
while idx > 0:
group = self.groups[group_idx]
if not isinstance(group, tuple):
group_idx += 1
idx -= 1
else:
group_len = len(self._CreateXrangeFromTuple(group))
if idx >= group_len:
group_idx += 1
idx -= group_len
else:
step = 1 if len(group) == 2 else group[2]
return group[0] + idx * step
if isinstance(self.groups[group_idx], tuple):
return self.groups[group_idx][0]
else:
return self.groups[group_idx]
def __eq__(self, other):
if other is None:
return False
return tuple(self) == tuple(other)
def __ne__(self, other):
if other is None:
return True
return tuple(self) != tuple(other)
def __iter__(self):
for group in self.groups:
if isinstance(group, six.integer_types):
yield group
else:
for val in self._CreateXrangeFromTuple(group):
yield val
def __str__(self):
return IntegerListSerializer().serialize(self)
def __repr__(self):
return 'IntegerList([%s])' % self
def _CreateXrangeFromTuple(self, input_tuple):
start = input_tuple[0]
step = 1 if len(input_tuple) == 2 else input_tuple[2]
stop_inclusive = input_tuple[1] + (1 if step > 0 else -1)
return range(start, stop_inclusive, step)
def _IsNonIncreasing(result, val):
"""Determines if result would be non-increasing if val is appended.
Args:
result: list integers and/or range tuples.
val: integer or range tuple to append.
Returns:
bool indicating if the appended list is non-increasing.
"""
if result:
if isinstance(result[-1], tuple):
# extract high from previous tuple
prev = result[-1][1]
else:
# previous is int
prev = result[-1]
if val <= prev:
return True
return False
class IntegerListParser(flags.ArgumentParser):
"""Parse a string containing a comma-separated list of nonnegative integers.
The list may contain single integers and dash-separated ranges. For
example, "1,3,5-7" parses to [1,3,5,6,7] and "1-7-3" parses to
[1,4,7].
Can pass the flag on_nonincreasing to the constructor to tell it
what to do if the list is nonincreasing. Options are
- None: do nothing.
- IntegerListParser.WARN: log a warning.
- IntegerListParser.EXCEPTION: raise a ValueError.
As a special case, instead of a string, can pass a list of integers
or an IntegerList. In these cases, the return value iterates over
the same integers as were in the argument.
For negative number ranges use a colon separator, for example "-3:4:2" parses
to [-3, -1, 1, 3].
"""
syntactic_help = ('A comma-separated list of integers or integer '
'ranges. Ex: -1,3,5:7 is read as -1,3,5,6,7.')
WARN = 'warn'
EXCEPTION = 'exception'
def __init__(self, on_nonincreasing=None):
super(IntegerListParser, self).__init__()
self.on_nonincreasing = on_nonincreasing
def parse(self, inp):
"""Parse an integer list.
Args:
inp: a string, a list, or an IntegerList.
Returns:
An iterable of integers.
Raises:
ValueError: if inp doesn't follow a format it recognizes.
"""
if isinstance(inp, IntegerList):
return inp
elif isinstance(inp, list):
return IntegerList(inp)
elif isinstance(inp, int):
return IntegerList([inp])
def HandleNonIncreasing():
if self.on_nonincreasing == IntegerListParser.WARN:
logging.warning('Integer list %s is not increasing', inp)
elif self.on_nonincreasing == IntegerListParser.EXCEPTION:
raise ValueError('Integer list %s is not increasing' % inp)
groups = inp.split(',')
result = []
for group in groups:
match = INTEGER_GROUP_REGEXP.match(
group) or INTEGER_GROUP_REGEXP_COLONS.match(group)
if match is None:
raise ValueError('Invalid integer list %s' % inp)
elif match.group(2) is None:
val = int(match.group(1))
if _IsNonIncreasing(result, val):
HandleNonIncreasing()
result.append(val)
else:
low = int(match.group(1))
high = int(match.group(3))
step = int(match.group(5)) if match.group(5) is not None else 1
step = -step if step > 0 and low > high else step
if high <= low or (_IsNonIncreasing(result, low)):
HandleNonIncreasing()
result.append((low, high, step))
return IntegerList(result)
def flag_type(self):
return 'integer list'
class IntegerListSerializer(flags.ArgumentSerializer):
def _SerializeRange(self, val):
separator = ':' if any(item < 0 for item in val) else '-'
return separator.join(str(item) for item in val)
def serialize(self, il):
return ','.join([str(val) if isinstance(val, six.integer_types)
else self._SerializeRange(val)
for val in il.groups])
def DEFINE_integerlist(name, default, help, on_nonincreasing=None,
flag_values=FLAGS, **kwargs):
"""Register a flag whose value must be an integer list."""
parser = IntegerListParser(on_nonincreasing=on_nonincreasing)
serializer = IntegerListSerializer()
flags.DEFINE(parser, name, default, help, flag_values, serializer, **kwargs)
class OverrideFlags(object):
"""Context manager that applies any config_dict overrides to flag_values."""
def __init__(self, flag_values, config_dict):
"""Initializes an OverrideFlags context manager.
Args:
flag_values: FlagValues that is temporarily modified so that any options
in override_dict that are not 'present' in flag_values are applied to
flag_values.
Upon exit, flag_values will be restored to its original state.
config_dict: Merged config flags from the benchmark config and benchmark
configuration yaml file.
"""
self._flag_values = flag_values
self._config_dict = config_dict
self._flags_to_reapply = {}
def __enter__(self):
"""Overrides flag_values with options in override_dict."""
if not self._config_dict:
return
for key, value in six.iteritems(self._config_dict):
if key not in self._flag_values:
raise errors.Config.UnrecognizedOption(
'Unrecognized option {0}.{1}. Each option within {0} must '
'correspond to a valid command-line flag.'.format('flags', key))
if not self._flag_values[key].present:
self._flags_to_reapply[key] = self._flag_values[key].value
try:
self._flag_values[key].parse(value) # Set 'present' to True.
except flags.IllegalFlagValueError as e:
raise errors.Config.InvalidValue(
'Invalid {0}.{1} value: "{2}" (of type "{3}").{4}{5}'.format(
'flags', key, value,
value.__class__.__name__, os.linesep, e))
def __exit__(self, *unused_args, **unused_kwargs):
"""Restores flag_values to its original state."""
if not self._flags_to_reapply:
return
for key, value in six.iteritems(self._flags_to_reapply):
self._flag_values[key].value = value
self._flag_values[key].present = 0
class UnitsParser(flags.ArgumentParser):
"""Parse a flag containing a unit expression.
Attributes:
convertible_to: list of units.Unit instances. A parsed expression must be
convertible to at least one of the Units in this list. For example,
if the parser requires that its inputs are convertible to bits, then
values expressed in KiB and GB are valid, but values expressed in meters
are not.
"""
syntactic_help = ('A quantity with a unit. Ex: 12.3MB.')
def __init__(self, convertible_to):
"""Initialize the UnitsParser.
Args:
convertible_to: Either an individual unit specification or a series of
unit specifications, where each unit specification is either a string
(e.g. 'byte') or a units.Unit. The parser input must be convertible to
at least one of the specified Units, or the parse() method will raise
a ValueError.
"""
if isinstance(convertible_to, (six.string_types, units.Unit)):
self.convertible_to = [units.Unit(convertible_to)]
else:
self.convertible_to = [units.Unit(u) for u in convertible_to]
def parse(self, inp):
"""Parse the input.
Args:
inp: a string or a units.Quantity. If a string, it has the format
"<number><units>", as in "12KB", or "2.5GB".
Returns:
A units.Quantity.
Raises:
ValueError: If the input cannot be parsed, or if it parses to a value with
improper units.
"""
if isinstance(inp, units.Quantity):
quantity = inp
else:
try:
quantity = units.ParseExpression(inp)
except Exception as e:
raise ValueError("Couldn't parse unit expression %r: %s" %
(inp, str(e)))
if not isinstance(quantity, units.Quantity):
raise ValueError('Expression %r evaluates to a unitless value.' % inp)
for unit in self.convertible_to:
try:
quantity.to(unit)
break
except units.DimensionalityError:
pass
else:
raise ValueError(
'Expression {0!r} is not convertible to an acceptable unit '
'({1}).'.format(inp, ', '.join(str(u) for u in self.convertible_to)))
return quantity
class UnitsSerializer(flags.ArgumentSerializer):
def serialize(self, units):
return str(units)
def DEFINE_units(name, default, help, convertible_to,
flag_values=flags.FLAGS, **kwargs):
"""Register a flag whose value is a units expression.
Args:
name: string. The name of the flag.
default: units.Quantity. The default value.
help: string. A help message for the user.
convertible_to: Either an individual unit specification or a series of unit
specifications, where each unit specification is either a string (e.g.
'byte') or a units.Unit. The flag value must be convertible to at least
one of the specified Units to be considered valid.
flag_values: the absl.flags.FlagValues object to define the flag in.
"""
parser = UnitsParser(convertible_to=convertible_to)
serializer = UnitsSerializer()
flags.DEFINE(parser, name, default, help, flag_values, serializer, **kwargs)
def StringToBytes(string):
"""Convert an object size, represented as a string, to bytes.
Args:
string: the object size, as a string with a quantity and a unit.
Returns:
an integer. The number of bytes in the size.
Raises:
ValueError, if either the string does not represent an object size
or if the size does not contain an integer number of bytes.
"""
try:
quantity = units.ParseExpression(string)
except Exception:
# Catching all exceptions is ugly, but we don't know what sort of
# exception pint might throw, and we want to turn any of them into
# ValueError.
raise ValueError("Couldn't parse size %s" % string)
try:
bytes = quantity.m_as(units.byte)
except units.DimensionalityError:
raise ValueError("Quantity %s is not a size" % string)
if bytes != int(bytes):
raise ValueError("Size %s has a non-integer number (%s) of bytes!" %
(string, bytes))
if bytes < 0:
raise ValueError("Size %s has a negative number of bytes!" % string)
return int(bytes)
def StringToRawPercent(string):
"""Convert a string to a raw percentage value.
Args:
string: the percentage, with '%' on the end.
Returns:
A floating-point number, holding the percentage value.
Raises:
ValueError, if the string can't be read as a percentage.
"""
if len(string) <= 1:
raise ValueError("String '%s' too short to be percentage." % string)
if string[-1] != '%':
raise ValueError("Percentage '%s' must end with '%%'" % string)
# This will raise a ValueError if it can't convert the string to a float.
val = float(string[:-1])
if val < 0.0 or val > 100.0:
raise ValueError('Quantity %s is not a valid percentage' % val)
return val
# The YAML flag type is necessary because flags can be read either via
# the command line or from a config file. If they come from a config
# file, they will already be parsed as YAML, but if they come from the
# command line, they will be raw strings. The point of this flag is to
# guarantee a consistent representation to the rest of the program.
class YAMLParser(flags.ArgumentParser):
"""Parse a flag containing YAML."""
syntactic_help = 'A YAML expression.'
def parse(self, inp):
"""Parse the input.
Args:
inp: A string or the result of yaml.safe_load. If a string, should be
a valid YAML document.
"""
if isinstance(inp, six.string_types):
# This will work unless the user writes a config with a quoted
# string that, if unquoted, would be parsed as a non-string
# Python type (example: '123'). In that case, the first
# yaml.safe_load() in the config system will strip away the quotation
# marks, and this second yaml.safe_load() will parse it as the
# non-string type. However, I think this is the best we can do
# without significant changes to the config system, and the
# problem is unlikely to occur in PKB.
try:
return yaml.safe_load(inp)
except yaml.YAMLError as e:
raise ValueError("Couldn't parse YAML string '%s': %s" %
(inp, str(e)))
else:
return inp
class YAMLSerializer(flags.ArgumentSerializer):
def serialize(self, val):
return yaml.dump(val)
def DEFINE_yaml(name, default, help, flag_values=flags.FLAGS, **kwargs):
"""Register a flag whose value is a YAML expression.
Args:
name: string. The name of the flag.
default: object. The default value of the flag.
help: string. A help message for the user.
flag_values: the absl.flags.FlagValues object to define the flag in.
kwargs: extra arguments to pass to absl.flags.DEFINE().
"""
parser = YAMLParser()
serializer = YAMLSerializer()
flags.DEFINE(parser, name, default, help, flag_values, serializer, **kwargs)
def ParseKeyValuePairs(strings):
"""Parses colon separated key value pairs from a list of strings.
Pairs should be separated by a comma and key and value by a colon, e.g.,
['k1:v1', 'k2:v2,k3:v3'].
Args:
strings: A list of strings.
Returns:
A dict populated with keys and values from the flag.
"""
pairs = {}
for pair in [kv for s in strings for kv in s.split(',')]:
try:
key, value = pair.split(':', 1)
pairs[key] = value
except ValueError:
logging.error('Bad key value pair format. Skipping "%s".', pair)
continue
return pairs
def GetProvidedCommandLineFlags():
"""Return flag names and values that were specified on the command line.
Returns:
A dictionary of provided flags in the form: {flag_name: flag_value}.
"""
return {k: FLAGS[k].value for k in FLAGS if FLAGS[k].present}
|
import mock
from scrapy.http import Request
from scrapy.settings import Settings
from scrapy_redis.dupefilter import RFPDupeFilter
def get_redis_mock():
server = mock.Mock()
def sadd(key, fp, added=0, db={}):
fingerprints = db.setdefault(key, set())
if key not in fingerprints:
fingerprints.add(key)
added += 1
return added
server.sadd = sadd
return server
class TestRFPDupeFilter(object):
def setup(self):
self.server = get_redis_mock()
self.key = 'dupefilter:1'
self.df = RFPDupeFilter(self.server, self.key)
def test_request_seen(self):
req = Request('http://example.com')
assert not self.df.request_seen(req)
assert self.df.request_seen(req)
def test_overridable_request_fingerprinter(self):
req = Request('http://example.com')
self.df.request_fingerprint = mock.Mock(wraps=self.df.request_fingerprint)
assert not self.df.request_seen(req)
self.df.request_fingerprint.assert_called_with(req)
def test_clear_deletes(self):
self.df.clear()
self.server.delete.assert_called_with(self.key)
def test_close_calls_clear(self):
self.df.clear = mock.Mock(wraps=self.df.clear)
self.df.close()
self.df.close(reason='foo')
assert self.df.clear.call_count == 2
def test_log_dupes():
def _test(df, dupes, logcount):
df.logger.debug = mock.Mock(wraps=df.logger.debug)
for i in range(dupes):
req = Request('http://example')
df.log(req, spider=mock.Mock())
assert df.logger.debug.call_count == logcount
server = get_redis_mock()
df_quiet = RFPDupeFilter(server, 'foo') # debug=False
_test(df_quiet, 5, 1)
df_debug = RFPDupeFilter(server, 'foo', debug=True)
_test(df_debug, 5, 5)
@mock.patch('scrapy_redis.dupefilter.get_redis_from_settings')
class TestFromMethods(object):
def setup(self):
self.settings = Settings({
'DUPEFILTER_DEBUG': True,
})
def test_from_settings(self, get_redis_from_settings):
df = RFPDupeFilter.from_settings(self.settings)
self.assert_dupefilter(df, get_redis_from_settings)
def test_from_crawler(self, get_redis_from_settings):
crawler = mock.Mock(settings=self.settings)
df = RFPDupeFilter.from_crawler(crawler)
self.assert_dupefilter(df, get_redis_from_settings)
def assert_dupefilter(self, df, get_redis_from_settings):
assert df.server is get_redis_from_settings.return_value
assert df.key.startswith('dupefilter:')
assert df.debug # true
|
from xknx.devices import Weather as XknxWeather
from homeassistant.components.weather import WeatherEntity
from homeassistant.const import TEMP_CELSIUS
from .const import DOMAIN
from .knx_entity import KnxEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the scenes for KNX platform."""
entities = []
for device in hass.data[DOMAIN].xknx.devices:
if isinstance(device, XknxWeather):
entities.append(KNXWeather(device))
async_add_entities(entities)
class KNXWeather(KnxEntity, WeatherEntity):
"""Representation of a KNX weather device."""
def __init__(self, device: XknxWeather):
"""Initialize of a KNX sensor."""
super().__init__(device)
@property
def temperature(self):
"""Return current temperature."""
return self._device.temperature
@property
def temperature_unit(self):
"""Return temperature unit."""
return TEMP_CELSIUS
@property
def pressure(self):
"""Return current air pressure."""
# KNX returns pA - HA requires hPa
return (
self._device.air_pressure / 100
if self._device.air_pressure is not None
else None
)
@property
def condition(self):
"""Return current weather condition."""
return self._device.ha_current_state().value
@property
def humidity(self):
"""Return current humidity."""
return self._device.humidity if self._device.humidity is not None else None
@property
def wind_speed(self):
"""Return current wind speed in km/h."""
# KNX only supports wind speed in m/s
return (
self._device.wind_speed * 3.6
if self._device.wind_speed is not None
else None
)
|
from datetime import timedelta
from homeassistant import data_entry_flow
from homeassistant.components.luftdaten import DOMAIN, config_flow
from homeassistant.components.luftdaten.const import CONF_SENSOR_ID
from homeassistant.const import CONF_SCAN_INTERVAL, CONF_SHOW_ON_MAP
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_SENSOR_ID: "12345abcde"}
MockConfigEntry(domain=DOMAIN, data=conf).add_to_hass(hass)
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {CONF_SENSOR_ID: "already_configured"}
async def test_communication_error(hass):
"""Test that no sensor is added while unable to communicate with API."""
conf = {CONF_SENSOR_ID: "12345abcde"}
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
with patch("luftdaten.Luftdaten.get_data", return_value=None):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {CONF_SENSOR_ID: "invalid_sensor"}
async def test_invalid_sensor(hass):
"""Test that an invalid sensor throws an error."""
conf = {CONF_SENSOR_ID: "12345abcde"}
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
with patch("luftdaten.Luftdaten.get_data", return_value=False), patch(
"luftdaten.Luftdaten.validate_sensor", return_value=False
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {CONF_SENSOR_ID: "invalid_sensor"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import(hass):
"""Test that the import step works."""
conf = {CONF_SENSOR_ID: "12345abcde", CONF_SHOW_ON_MAP: False}
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
with patch("luftdaten.Luftdaten.get_data", return_value=True), patch(
"luftdaten.Luftdaten.validate_sensor", return_value=True
):
result = await flow.async_step_import(import_config=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "12345abcde"
assert result["data"] == {
CONF_SENSOR_ID: "12345abcde",
CONF_SHOW_ON_MAP: False,
CONF_SCAN_INTERVAL: 600,
}
async def test_step_user(hass):
"""Test that the user step works."""
conf = {
CONF_SENSOR_ID: "12345abcde",
CONF_SHOW_ON_MAP: False,
CONF_SCAN_INTERVAL: timedelta(minutes=5),
}
flow = config_flow.LuftDatenFlowHandler()
flow.hass = hass
with patch("luftdaten.Luftdaten.get_data", return_value=True), patch(
"luftdaten.Luftdaten.validate_sensor", return_value=True
):
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "12345abcde"
assert result["data"] == {
CONF_SENSOR_ID: "12345abcde",
CONF_SHOW_ON_MAP: False,
CONF_SCAN_INTERVAL: 300,
}
|
from dataclasses import dataclass
import pytest
import homeassistant.components.azure_event_hub as azure_event_hub
from homeassistant.const import STATE_ON
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
AZURE_EVENT_HUB_PATH = "homeassistant.components.azure_event_hub"
PRODUCER_PATH = f"{AZURE_EVENT_HUB_PATH}.EventHubProducerClient"
MIN_CONFIG = {
"event_hub_namespace": "namespace",
"event_hub_instance_name": "name",
"event_hub_sas_policy": "policy",
"event_hub_sas_key": "key",
}
@dataclass
class FilterTest:
"""Class for capturing a filter test."""
id: str
should_pass: bool
@pytest.fixture(autouse=True, name="mock_client", scope="module")
def mock_client_fixture():
"""Mock the azure event hub producer client."""
with patch(f"{PRODUCER_PATH}.send_batch") as mock_send_batch, patch(
f"{PRODUCER_PATH}.close"
) as mock_close, patch(f"{PRODUCER_PATH}.__init__", return_value=None) as mock_init:
yield (
mock_init,
mock_send_batch,
mock_close,
)
@pytest.fixture(autouse=True, name="mock_batch")
def mock_batch_fixture():
"""Mock batch creator and return mocked batch object."""
mock_batch = MagicMock()
with patch(f"{PRODUCER_PATH}.create_batch", return_value=mock_batch):
yield mock_batch
@pytest.fixture(autouse=True, name="mock_policy")
def mock_policy_fixture():
"""Mock azure shared key credential."""
with patch(f"{AZURE_EVENT_HUB_PATH}.EventHubSharedKeyCredential") as policy:
yield policy
@pytest.fixture(autouse=True, name="mock_event_data")
def mock_event_data_fixture():
"""Mock the azure event data component."""
with patch(f"{AZURE_EVENT_HUB_PATH}.EventData") as event_data:
yield event_data
@pytest.fixture(autouse=True, name="mock_call_later")
def mock_call_later_fixture():
"""Mock async_call_later to allow queue processing on demand."""
with patch(f"{AZURE_EVENT_HUB_PATH}.async_call_later") as mock_call_later:
yield mock_call_later
async def test_minimal_config(hass):
"""Test the minimal config and defaults of component."""
config = {azure_event_hub.DOMAIN: MIN_CONFIG}
assert await async_setup_component(hass, azure_event_hub.DOMAIN, config)
async def test_full_config(hass):
"""Test the full config of component."""
config = {
azure_event_hub.DOMAIN: {
"send_interval": 10,
"max_delay": 10,
"filter": {
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
"exclude_domains": ["light"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
}
}
config[azure_event_hub.DOMAIN].update(MIN_CONFIG)
assert await async_setup_component(hass, azure_event_hub.DOMAIN, config)
async def _setup(hass, mock_call_later, filter_config):
"""Shared set up for filtering tests."""
config = {azure_event_hub.DOMAIN: {"filter": filter_config}}
config[azure_event_hub.DOMAIN].update(MIN_CONFIG)
assert await async_setup_component(hass, azure_event_hub.DOMAIN, config)
await hass.async_block_till_done()
mock_call_later.assert_called_once()
return mock_call_later.call_args[0][2]
async def _run_filter_tests(hass, tests, process_queue, mock_batch):
"""Run a series of filter tests on azure event hub."""
for test in tests:
hass.states.async_set(test.id, STATE_ON)
await hass.async_block_till_done()
await process_queue(None)
if test.should_pass:
mock_batch.add.assert_called_once()
mock_batch.add.reset_mock()
else:
mock_batch.add.assert_not_called()
async def test_allowlist(hass, mock_batch, mock_call_later):
"""Test an allowlist only config."""
process_queue = await _setup(
hass,
mock_call_later,
{
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
},
)
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
await _run_filter_tests(hass, tests, process_queue, mock_batch)
async def test_denylist(hass, mock_batch, mock_call_later):
"""Test a denylist only config."""
process_queue = await _setup(
hass,
mock_call_later,
{
"exclude_domains": ["climate"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
)
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
await _run_filter_tests(hass, tests, process_queue, mock_batch)
async def test_filtered_allowlist(hass, mock_batch, mock_call_later):
"""Test an allowlist config with a filtering denylist."""
process_queue = await _setup(
hass,
mock_call_later,
{
"include_domains": ["light"],
"include_entity_globs": ["*.included_*"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
tests = [
FilterTest("light.included", True),
FilterTest("light.excluded_test", False),
FilterTest("light.excluded", False),
FilterTest("sensor.included_test", True),
FilterTest("climate.included_test", False),
]
await _run_filter_tests(hass, tests, process_queue, mock_batch)
async def test_filtered_denylist(hass, mock_batch, mock_call_later):
"""Test a denylist config with a filtering allowlist."""
process_queue = await _setup(
hass,
mock_call_later,
{
"include_entities": ["climate.included", "sensor.excluded_test"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
tests = [
FilterTest("climate.excluded", False),
FilterTest("climate.included", True),
FilterTest("switch.excluded_test", False),
FilterTest("sensor.excluded_test", True),
FilterTest("light.excluded", False),
FilterTest("light.included", True),
]
await _run_filter_tests(hass, tests, process_queue, mock_batch)
|
import unittest
from plumbum import local, SshMachine
from parallel import Cluster
TEST_HOST = "127.0.0.1"
class TestParallel(unittest.TestCase):
def setUp(self):
self.remotes = []
def connect(self):
m = SshMachine(TEST_HOST)
self.remotes.append(m)
return m
def tearDown(self):
for m in self.remotes:
m.close()
def test_parallel(self):
m = Cluster(local, local)
import time
t = time.time()
ret = m["sleep"]("2")
assert(len(ret) == 2)
assert(2 <= time.time() - t < 4)
def test_locals(self):
m = Cluster(local, local, local)
# we should get 3 different proc ids
ret = m["bash"]["-c"]["echo $$"]()
ret = list(map(int, ret))
assert(len(set(ret))==3)
def test_sessions(self):
m = Cluster(local, self.connect(), local, self.connect())
# we should get 4 different proc ids
ret, stdout, stderr = m.session().run("echo $$")
ret = [int(pid) for pid in stdout]
assert(len(set(ret))==4)
def test_commands(self):
cmds = local["echo"]["1"] & local["echo"]["2"]
ret = cmds()
a, b = map(int, ret)
assert((a, b) == (1, 2))
|
from unittest import TestCase
from django.http import HttpRequest
from weblate.utils.views import get_page_limit
def fake_request(page, limit):
request = HttpRequest()
request.GET["page"] = page
request.GET["limit"] = limit
return request
class PageLimitTest(TestCase):
def test_defaults(self):
self.assertEqual((1, 42), get_page_limit(fake_request("x", "x"), 42))
def test_negative(self):
self.assertEqual((1, 10), get_page_limit(fake_request("-1", "-1"), 42))
def test_valid(self):
self.assertEqual((33, 66), get_page_limit(fake_request("33", "66"), 42))
|
from flexx.util.testing import run_tests_if_main, skipif, skip, raises
from flexx.event.both_tester import run_in_both, this_is_js
from flexx.event._js import create_js_component_class
from flexx.event import mutate_array, Dict
from flexx import event
loop = event.loop
Component = event.Component
class Foo(event.Component):
an_attr = event.Attribute()
spam = 3
eggs = [1, 2, 3]
a_prop = event.AnyProp(settable=True)
def init(self):
super().init()
self._an_attr = 54
class FooSubclass(Foo):
pass
class Bar(event.Component):
a_prop = event.AnyProp()
@event.action
def a_action(self):
pass
@event.reaction
def a_reaction(self):
pass
@event.emitter
def a_emitter(self, v):
return {}
@event.emitter # deliberately define it twice
def a_emitter(self, v):
return {'x':1}
class Bar2(Bar):
pass
@run_in_both(FooSubclass)
def test_component_id1():
"""
? Component
? Foo
? FooSubclass
"""
f = Component()
print(f.id)
f = Foo()
print(f.id)
f = FooSubclass()
print(f.id)
@run_in_both(FooSubclass, js=False)
def test_component_id2():
"""
true
true
true
"""
f = Component()
print(f.id in str(f))
f = Foo()
print(f.id in str(f))
f = FooSubclass()
print(f.id in str(f))
@run_in_both(Foo)
def test_component_pending_events():
"""
2
None
"""
f = Foo()
print(len(f._Component__pending_events)) # The event for foo, plus None-mark
loop.iter()
# Its important that we dont keep collecting events, for obvious reasons
print(f._Component__pending_events)
@run_in_both(Foo, Bar)
def test_component_class_attributes1():
"""
Component
[]
[]
[]
[]
Foo
['set_a_prop']
[]
[]
['a_prop']
Bar
['a_action']
['a_reaction']
['a_emitter']
['a_prop']
"""
print('Component')
c = Component()
print(c.__actions__)
print(c.__reactions__)
print(c.__emitters__)
print(c.__properties__)
print('Foo')
c = Foo()
print(c.__actions__)
print(c.__reactions__)
print(c.__emitters__)
print(c.__properties__)
print('Bar')
c = Bar()
print(c.__actions__)
print(c.__reactions__)
print(c.__emitters__)
print(c.__properties__)
@run_in_both(Foo)
def test_component_class_attributes2():
"""
3
[1, 2, 3]
"""
f = Foo()
print(f.spam)
print(f.eggs)
@run_in_both(FooSubclass)
def test_component_class_attributes3():
"""
3
[1, 2, 3]
"""
f = FooSubclass()
print(f.spam)
print(f.eggs)
class CompWithInit1(event.Component):
def init(self, a, b=3):
print('i', a, b)
@run_in_both(CompWithInit1)
def test_component_init1():
"""
i 1 2
i 1 3
"""
CompWithInit1(1, 2)
CompWithInit1(1)
class CompWithInit2(event.Component):
foo1 = event.IntProp(1)
foo2 = event.IntProp(2, settable=True)
foo3 = event.IntProp(3)
def init(self, set_foos):
if set_foos:
self._mutate_foo1(11)
self.set_foo2(12)
self.set_foo3(13)
@event.action
def set_foo3(self, v):
self._mutate_foo3(v+100)
@run_in_both(CompWithInit2)
def test_component_init2():
"""
1 2 103
11 12 113
6 7 108
11 12 113
12
99
"""
m = CompWithInit2(False)
print(m.foo1, m.foo2, m.foo3)
m = CompWithInit2(True)
print(m.foo1, m.foo2, m.foo3)
m = CompWithInit2(False, foo1=6, foo2=7, foo3=8)
print(m.foo1, m.foo2, m.foo3)
m = CompWithInit2(True, foo1=6, foo2=7, foo3=8)
print(m.foo1, m.foo2, m.foo3)
# This works, because when a componentn is "active" it allows mutations
m.set_foo2(99)
print(m.foo2)
with m:
m.set_foo2(99)
print(m.foo2)
class CompWithInit3(event.Component):
sub = event.ComponentProp(settable=True)
@event.reaction('sub.a_prop')
def _on_sub(self, *events):
for ev in events:
print('sub prop changed', ev.new_value)
@run_in_both(CompWithInit3, Foo)
def test_component_init3():
"""
sub prop changed 7
sub prop changed 9
"""
# Verify that reconnect events are handled ok when applying events in init
f1 = Foo(a_prop=7)
f2 = Foo(a_prop=8)
c = CompWithInit3(sub=f1)
# Simulate that we're in a component's init
with c:
c.set_sub(f2)
f2.set_a_prop(9)
# In the iter, the pending events will be flushed. One of these events
# is the changed sub. We don't want to reconnect for properties that
# did not change (because that's a waste of CPU cycles), but we not miss
# any changes.
loop.iter()
class CompWithInit4(event.Component):
a_prop = event.IntProp(settable=True)
def init(self, other, value):
self.set_a_prop(value)
other.set_a_prop(value)
@event.action
def create(self, other, value):
self.set_a_prop(value)
CompWithInit4(other, value)
@run_in_both(CompWithInit4, Foo)
def test_component_init4():
"""
0 8
8 8
0 9
9 9
"""
# Verify that the behavior of an init() (can mutate self, but not other
# components) is consistent, also when instantiated from an action.
c1 = Foo(a_prop=0)
c2 = Foo(a_prop=0)
c3 = CompWithInit4(c1, 8)
print(c1.a_prop, c3.a_prop)
loop.iter()
print(c1.a_prop, c3.a_prop)
c3.create(c2, 9)
loop.iter()
print(c2.a_prop, c3.a_prop)
loop.iter()
print(c2.a_prop, c3.a_prop)
@run_in_both(Foo)
def test_component_instance_attributes1():
"""
? Component
54
? cannot set
? attribute, not a property
"""
c = Component()
print(c.id)
c = Foo()
print(c.an_attr)
try:
c.an_attr = 0
except Exception as err:
print(err)
try:
Foo(an_attr=3)
except AttributeError as err:
print(err)
def test_component_instance_attributes2(): # Py only
with raises(TypeError):
class X(Component):
a = event.Attribute(doc=3)
@run_in_both(Foo, Bar)
def test_component_event_types():
"""
[]
['a_prop']
['a_emitter', 'a_prop']
"""
c = Component()
print(c.get_event_types())
c = Foo()
print(c.get_event_types())
c = Bar()
print(c.get_event_types())
class Foo2(event.Component):
@event.reaction('!x')
def spam(self, *events):
pass
@event.reaction('!x')
def eggs(self, *events):
pass
@run_in_both(Foo2)
def test_get_event_handlers():
"""
['bar', 'eggs', 'spam']
['zz2', 'bar', 'eggs', 'spam', 'zz1']
[]
fail ValueError
"""
foo = Foo2()
def bar(*events):
pass
bar = foo.reaction('!x', bar)
# sorted by label name
print([r.get_name() for r in foo.get_event_handlers('x')])
def zz1(*events):
pass
def zz2(*events):
pass
zz1 = foo.reaction('!x', zz1)
zz2 = foo.reaction('!x:a', zz2)
# sorted by label name
print([r.get_name() for r in foo.get_event_handlers('x')])
# Nonexisting event type is ok
print([r.get_name() for r in foo.get_event_handlers('y')])
# No labels allowed
try:
foo.get_event_handlers('x:a')
except ValueError:
print('fail ValueError')
def test_that_methods_starting_with_on_are_not_autoconverted():
# Because we did that at some point
# There is also a warning, but seems a bit of a fuzz to test
class Foo3(event.Component):
def on_foo(self, *events):
pass
@event.reaction('bar')
def on_bar(self, *events):
pass
foo = Foo3()
assert isinstance(foo.on_bar, event.Reaction)
assert not isinstance(foo.on_foo, event.Reaction)
@run_in_both(Foo)
def test_component_fails():
"""
fail TypeError
fail AttributeError
fail RuntimeError
fail ValueError
"""
f = Foo()
loop._processing_action = True
try:
f._mutate(3, 3) # prop name must be str
except TypeError:
print('fail TypeError')
try:
f._mutate('invalidpropname', 3) # prop name invalid
except AttributeError:
print('fail AttributeError')
f.reaction('!foo', lambda: None) # Ok
try:
f.reaction(lambda: None) # Component.reaction cannot be implicit
except RuntimeError:
print('fail RuntimeError')
try:
f.reaction(42, lambda: None) # 42 is not a string
except ValueError:
print('fail ValueError')
@run_in_both(Component)
def test_registering_handlers():
"""
ok
"""
c = Component()
def handler1(*evts):
events.extend(evts)
def handler2(*evts):
events.extend(evts)
def handler3(*evts):
events.extend(evts)
handler1 = c.reaction('!foo', handler1)
handler2 = c.reaction('!foo', handler2)
handler3 = c.reaction('!foo', handler3)
handler1.dispose()
handler2.dispose()
handler3.dispose()
# Checks before we start
assert c.get_event_types() == ['foo']
assert c.get_event_handlers('foo') == []
# Test adding handlers
c._register_reaction('foo', handler1)
c._register_reaction('foo:a', handler2)
c._register_reaction('foo:z', handler3)
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Wont add twice
c._register_reaction('foo', handler1)
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Unregestering one handler
c.disconnect('foo', handler2)
assert c.get_event_handlers('foo') == [handler1, handler3]
# Reset
c._register_reaction('foo:a', handler2)
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Unregestering one handler + invalid label -> no unregister
c.disconnect('foo:xx', handler2)
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Reset
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Unregestering one handler by label
c.disconnect('foo:a')
assert c.get_event_handlers('foo') == [handler1, handler3]
# Reset
c._register_reaction('foo:a', handler2)
assert c.get_event_handlers('foo') == [handler2, handler1, handler3]
# Unregestering by type
c.disconnect('foo')
assert c.get_event_handlers('foo') == []
print('ok')
class CompCheckActive(event.Component):
def init(self, do_iter=False):
if do_iter:
loop.iter()
else:
ac = loop.get_active_components()
print('active', len(ac), ac[-2].a_prop)
@run_in_both(Foo, CompCheckActive)
def test_component_active1():
"""
0
active 2 7
active 2 42
0
? RuntimeError
? RuntimeError
0
"""
print(len(loop.get_active_components()))
f = Foo(a_prop=7)
with f:
CompCheckActive()
f.set_a_prop(42)
loop.iter()
with f:
CompCheckActive()
print(len(loop.get_active_components()))
loop.iter()
# Invoke error (once for newly created component, once for f
with f:
CompCheckActive(True)
print(len(loop.get_active_components()))
@run_in_both(Foo, Bar)
def test_component_active2():
"""
None
? Foo
? Bar
? Foo
None
"""
f = Foo()
b = Bar()
print(loop.get_active_component())
with f:
print(loop.get_active_component().id)
with b:
print(loop.get_active_component().id)
print(loop.get_active_component().id)
print(loop.get_active_component())
@run_in_both()
def test_mutate_array1():
"""
[1, 2, 5, 6]
[1, 2, 3, 3, 4, 4, 5, 6]
[1, 2, 3, 4, 5, 6]
[1, 2, 3, 4, 50, 60]
[]
"""
a = []
mutate_array(a, dict(mutation='set', index=0, objects=[1,2,5,6]))
print(a)
mutate_array(a, dict(mutation='insert', index=2, objects=[3, 3, 4, 4]))
print(a)
mutate_array(a, dict(mutation='remove', index=3, objects=2))
print(a)
mutate_array(a, dict(mutation='replace', index=4, objects=[50, 60]))
print(a)
mutate_array(a, dict(mutation='set', index=0, objects=[]))
print(a)
@run_in_both(js=False)
def test_mutate_array2():
"""
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
[0, 1, 2, 0, 0, 0, 0, 7, 8, 9, 10, 11]
[0, 1, 2, 3, 4, 5, 0, 0, 8, 9, 0, 0]
"""
try:
import numpy as np
except ImportError:
skip('No numpy')
a = np.arange(12)
print(list(a.flat))
mutate_array(a, dict(mutation='replace', index=3, objects=np.zeros((4,))))
print(list(a.flat))
a = np.arange(12)
a.shape = 3, 4
mutate_array(a, dict(mutation='replace', index=(1, 2), objects=np.zeros((2,2))))
print(list(a.flat))
def test_produced_js():
js1 = create_js_component_class(Bar, 'Bar')
js2 = create_js_component_class(Bar2, 'Bar2')
assert '__properties__ = ["a_prop"]' in js1
assert '__properties__ = ["a_prop"]' in js2
assert js1.count('a_prop') >= 3
assert js2.count('a_prop') == 1
assert '__actions__ = ["a_action"]' in js1
assert '__actions__ = ["a_action"]' in js2
assert js1.count('a_action') >= 2
assert js2.count('a_action') == 1
run_tests_if_main()
|
from datetime import timedelta
import logging
from amcrest import AmcrestError
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_SOUND,
BinarySensorEntity,
)
from homeassistant.const import CONF_BINARY_SENSORS, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import Throttle
from .const import (
BINARY_SENSOR_SCAN_INTERVAL_SECS,
DATA_AMCREST,
DEVICES,
SENSOR_DEVICE_CLASS,
SENSOR_EVENT_CODE,
SENSOR_NAME,
SERVICE_EVENT,
SERVICE_UPDATE,
)
from .helpers import log_update_error, service_signal
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=BINARY_SENSOR_SCAN_INTERVAL_SECS)
_ONLINE_SCAN_INTERVAL = timedelta(seconds=60 - BINARY_SENSOR_SCAN_INTERVAL_SECS)
BINARY_SENSOR_AUDIO_DETECTED = "audio_detected"
BINARY_SENSOR_AUDIO_DETECTED_POLLED = "audio_detected_polled"
BINARY_SENSOR_MOTION_DETECTED = "motion_detected"
BINARY_SENSOR_MOTION_DETECTED_POLLED = "motion_detected_polled"
BINARY_SENSOR_ONLINE = "online"
BINARY_POLLED_SENSORS = [
BINARY_SENSOR_AUDIO_DETECTED_POLLED,
BINARY_SENSOR_MOTION_DETECTED_POLLED,
BINARY_SENSOR_ONLINE,
]
_AUDIO_DETECTED_PARAMS = ("Audio Detected", DEVICE_CLASS_SOUND, "AudioMutation")
_MOTION_DETECTED_PARAMS = ("Motion Detected", DEVICE_CLASS_MOTION, "VideoMotion")
BINARY_SENSORS = {
BINARY_SENSOR_AUDIO_DETECTED: _AUDIO_DETECTED_PARAMS,
BINARY_SENSOR_AUDIO_DETECTED_POLLED: _AUDIO_DETECTED_PARAMS,
BINARY_SENSOR_MOTION_DETECTED: _MOTION_DETECTED_PARAMS,
BINARY_SENSOR_MOTION_DETECTED_POLLED: _MOTION_DETECTED_PARAMS,
BINARY_SENSOR_ONLINE: ("Online", DEVICE_CLASS_CONNECTIVITY, None),
}
BINARY_SENSORS = {
k: dict(zip((SENSOR_NAME, SENSOR_DEVICE_CLASS, SENSOR_EVENT_CODE), v))
for k, v in BINARY_SENSORS.items()
}
_EXCLUSIVE_OPTIONS = [
{BINARY_SENSOR_MOTION_DETECTED, BINARY_SENSOR_MOTION_DETECTED_POLLED},
]
_UPDATE_MSG = "Updating %s binary sensor"
def check_binary_sensors(value):
"""Validate binary sensor configurations."""
for exclusive_options in _EXCLUSIVE_OPTIONS:
if len(set(value) & exclusive_options) > 1:
raise vol.Invalid(
f"must contain at most one of {', '.join(exclusive_options)}."
)
return value
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a binary sensor for an Amcrest IP Camera."""
if discovery_info is None:
return
name = discovery_info[CONF_NAME]
device = hass.data[DATA_AMCREST][DEVICES][name]
async_add_entities(
[
AmcrestBinarySensor(name, device, sensor_type)
for sensor_type in discovery_info[CONF_BINARY_SENSORS]
],
True,
)
class AmcrestBinarySensor(BinarySensorEntity):
"""Binary sensor for Amcrest camera."""
def __init__(self, name, device, sensor_type):
"""Initialize entity."""
self._name = f"{name} {BINARY_SENSORS[sensor_type][SENSOR_NAME]}"
self._signal_name = name
self._api = device.api
self._sensor_type = sensor_type
self._state = None
self._device_class = BINARY_SENSORS[sensor_type][SENSOR_DEVICE_CLASS]
self._event_code = BINARY_SENSORS[sensor_type][SENSOR_EVENT_CODE]
self._unsub_dispatcher = []
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return self._sensor_type in BINARY_POLLED_SENSORS
@property
def name(self):
"""Return entity name."""
return self._name
@property
def is_on(self):
"""Return if entity is on."""
return self._state
@property
def device_class(self):
"""Return device class."""
return self._device_class
@property
def available(self):
"""Return True if entity is available."""
return self._sensor_type == BINARY_SENSOR_ONLINE or self._api.available
def update(self):
"""Update entity."""
if self._sensor_type == BINARY_SENSOR_ONLINE:
self._update_online()
else:
self._update_others()
@Throttle(_ONLINE_SCAN_INTERVAL)
def _update_online(self):
if not (self._api.available or self.is_on):
return
_LOGGER.debug(_UPDATE_MSG, self._name)
if self._api.available:
# Send a command to the camera to test if we can still communicate with it.
# Override of Http.command() in __init__.py will set self._api.available
# accordingly.
try:
self._api.current_time
except AmcrestError:
pass
self._state = self._api.available
def _update_others(self):
if not self.available:
return
_LOGGER.debug(_UPDATE_MSG, self._name)
try:
self._state = "channels" in self._api.event_channels_happened(
self._event_code
)
except AmcrestError as error:
log_update_error(_LOGGER, "update", self.name, "binary sensor", error)
async def async_on_demand_update(self):
"""Update state."""
if self._sensor_type == BINARY_SENSOR_ONLINE:
_LOGGER.debug(_UPDATE_MSG, self._name)
self._state = self._api.available
self.async_write_ha_state()
return
self.async_schedule_update_ha_state(True)
@callback
def async_event_received(self, start):
"""Update state from received event."""
_LOGGER.debug(_UPDATE_MSG, self._name)
self._state = start
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to signals."""
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(SERVICE_UPDATE, self._signal_name),
self.async_on_demand_update,
)
)
if self._event_code and self._sensor_type not in BINARY_POLLED_SENSORS:
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(SERVICE_EVENT, self._signal_name, self._event_code),
self.async_event_received,
)
)
async def async_will_remove_from_hass(self):
"""Disconnect from update signal."""
for unsub_dispatcher in self._unsub_dispatcher:
unsub_dispatcher()
|
import io
import logging
from pathlib import Path
from PIL import Image, ImageDraw, UnidentifiedImageError
import simplehound.core as hound
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingEntity,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY
from homeassistant.core import split_entity_id
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
from homeassistant.util.pil import draw_box
_LOGGER = logging.getLogger(__name__)
EVENT_PERSON_DETECTED = "sighthound.person_detected"
ATTR_BOUNDING_BOX = "bounding_box"
ATTR_PEOPLE = "people"
CONF_ACCOUNT_TYPE = "account_type"
CONF_SAVE_FILE_FOLDER = "save_file_folder"
CONF_SAVE_TIMESTAMPTED_FILE = "save_timestamped_file"
DATETIME_FORMAT = "%Y-%m-%d_%H:%M:%S"
DEV = "dev"
PROD = "prod"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_ACCOUNT_TYPE, default=DEV): vol.In([DEV, PROD]),
vol.Optional(CONF_SAVE_FILE_FOLDER): cv.isdir,
vol.Optional(CONF_SAVE_TIMESTAMPTED_FILE, default=False): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform."""
# Validate credentials by processing image.
api_key = config[CONF_API_KEY]
account_type = config[CONF_ACCOUNT_TYPE]
api = hound.cloud(api_key, account_type)
try:
api.detect(b"Test")
except hound.SimplehoundException as exc:
_LOGGER.error("Sighthound error %s setup aborted", exc)
return
save_file_folder = config.get(CONF_SAVE_FILE_FOLDER)
if save_file_folder:
save_file_folder = Path(save_file_folder)
entities = []
for camera in config[CONF_SOURCE]:
sighthound = SighthoundEntity(
api,
camera[CONF_ENTITY_ID],
camera.get(CONF_NAME),
save_file_folder,
config[CONF_SAVE_TIMESTAMPTED_FILE],
)
entities.append(sighthound)
add_entities(entities)
class SighthoundEntity(ImageProcessingEntity):
"""Create a sighthound entity."""
def __init__(
self, api, camera_entity, name, save_file_folder, save_timestamped_file
):
"""Init."""
self._api = api
self._camera = camera_entity
if name:
self._name = name
else:
camera_name = split_entity_id(camera_entity)[1]
self._name = f"sighthound_{camera_name}"
self._state = None
self._last_detection = None
self._image_width = None
self._image_height = None
self._save_file_folder = save_file_folder
self._save_timestamped_file = save_timestamped_file
def process_image(self, image):
"""Process an image."""
detections = self._api.detect(image)
people = hound.get_people(detections)
self._state = len(people)
if self._state > 0:
self._last_detection = dt_util.now().strftime(DATETIME_FORMAT)
metadata = hound.get_metadata(detections)
self._image_width = metadata["image_width"]
self._image_height = metadata["image_height"]
for person in people:
self.fire_person_detected_event(person)
if self._save_file_folder and self._state > 0:
self.save_image(image, people, self._save_file_folder)
def fire_person_detected_event(self, person):
"""Send event with detected total_persons."""
self.hass.bus.fire(
EVENT_PERSON_DETECTED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_BOUNDING_BOX: hound.bbox_to_tf_style(
person["boundingBox"], self._image_width, self._image_height
),
},
)
def save_image(self, image, people, directory):
"""Save a timestamped image with bounding boxes around targets."""
try:
img = Image.open(io.BytesIO(bytearray(image))).convert("RGB")
except UnidentifiedImageError:
_LOGGER.warning("Sighthound unable to process image, bad data")
return
draw = ImageDraw.Draw(img)
for person in people:
box = hound.bbox_to_tf_style(
person["boundingBox"], self._image_width, self._image_height
)
draw_box(draw, box, self._image_width, self._image_height)
latest_save_path = directory / f"{self._name}_latest.jpg"
img.save(latest_save_path)
if self._save_timestamped_file:
timestamp_save_path = directory / f"{self._name}_{self._last_detection}.jpg"
img.save(timestamp_save_path)
_LOGGER.info("Sighthound saved file %s", timestamp_save_path)
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ATTR_PEOPLE
@property
def device_state_attributes(self):
"""Return the attributes."""
if not self._last_detection:
return {}
return {"last_person": self._last_detection}
|
import pytest
import copy
from pathlib import Path
import shutil
import matchzoo as mz
from keras.backend import clear_session
@pytest.fixture(scope='module', params=[
mz.tasks.Ranking(loss=mz.losses.RankCrossEntropyLoss(num_neg=2)),
mz.tasks.Classification(num_classes=2),
])
def task(request):
return request.param
@pytest.fixture(scope='module')
def train_raw(task):
return mz.datasets.toy.load_data('train', task)[:5]
@pytest.fixture(scope='module', params=mz.models.list_available())
def model_class(request):
return request.param
@pytest.fixture(scope='module')
def embedding():
return mz.datasets.toy.load_embedding()
@pytest.fixture(scope='module')
def setup(task, model_class, train_raw, embedding):
clear_session() # prevent OOM during CI tests
return mz.auto.prepare(
task=task,
model_class=model_class,
data_pack=train_raw,
embedding=embedding
)
@pytest.fixture(scope='module')
def model(setup):
return setup[0]
@pytest.fixture(scope='module')
def preprocessor(setup):
return setup[1]
@pytest.fixture(scope='module')
def gen_builder(setup):
return setup[2]
@pytest.fixture(scope='module')
def embedding_matrix(setup):
return setup[3]
@pytest.fixture(scope='module')
def data(train_raw, preprocessor, gen_builder):
return gen_builder.build(preprocessor.transform(train_raw))[0]
@pytest.mark.slow
def test_model_fit_eval_predict(model, data):
x, y = data
batch_size = len(x['id_left'])
assert model.fit(x, y, batch_size=batch_size, verbose=0)
assert model.evaluate(x, y, batch_size=batch_size)
assert model.predict(x, batch_size=batch_size) is not None
@pytest.mark.cron
def test_save_load_model(model):
tmpdir = '.matchzoo_test_save_load_tmpdir'
if Path(tmpdir).exists():
shutil.rmtree(tmpdir)
try:
model.save(tmpdir)
assert mz.load_model(tmpdir)
with pytest.raises(FileExistsError):
model.save(tmpdir)
finally:
if Path(tmpdir).exists():
shutil.rmtree(tmpdir)
@pytest.mark.cron
def test_hyper_space(model):
for _ in range(2):
new_params = copy.deepcopy(model.params)
sample = mz.hyper_spaces.sample(new_params.hyper_space)
for key, value in sample.items():
new_params[key] = value
new_model = new_params['model_class'](params=new_params)
new_model.build()
new_model.compile()
|
from .coherencemodel import CoherenceModel # noqa:F401
from .hdpmodel import HdpModel # noqa:F401
from .ldamodel import LdaModel # noqa:F401
from .lsimodel import LsiModel # noqa:F401
from .tfidfmodel import TfidfModel # noqa:F401
from .rpmodel import RpModel # noqa:F401
from .logentropy_model import LogEntropyModel # noqa:F401
from .word2vec import Word2Vec, FAST_VERSION # noqa:F401
from .doc2vec import Doc2Vec # noqa:F401
from .keyedvectors import KeyedVectors # noqa:F401
from .ldamulticore import LdaMulticore # noqa:F401
from .phrases import Phrases # noqa:F401
from .normmodel import NormModel # noqa:F401
from .atmodel import AuthorTopicModel # noqa:F401
from .ldaseqmodel import LdaSeqModel # noqa:F401
from .fasttext import FastText # noqa:F401
from .translation_matrix import TranslationMatrix, BackMappingTranslationMatrix # noqa:F401
from . import wrappers # noqa:F401
from gensim import interfaces, utils
class VocabTransform(interfaces.TransformationABC):
"""
Remap feature ids to new values.
Given a mapping between old ids and new ids (some old ids may be missing = these
features are to be discarded), this will wrap a corpus so that iterating over
`VocabTransform[corpus]` returns the same vectors but with the new ids.
Old features that have no counterpart in the new ids are discarded. This
can be used to filter vocabulary of a corpus "online":
.. sourcecode:: pycon
>>> old2new = {oldid: newid for newid, oldid in enumerate(ids_you_want_to_keep)}
>>> vt = VocabTransform(old2new)
>>> for vec_with_new_ids in vt[corpus_with_old_ids]:
>>> pass
"""
def __init__(self, old2new, id2token=None):
self.old2new = old2new
self.id2token = id2token
def __getitem__(self, bow):
"""
Return representation with the ids transformed.
"""
# if the input vector is in fact a corpus, return a transformed corpus as a result
is_corpus, bow = utils.is_corpus(bow)
if is_corpus:
return self._apply(bow)
return sorted((self.old2new[oldid], weight) for oldid, weight in bow if oldid in self.old2new)
|
import logging
import homeassistant.core as ha
from .const import API_DIRECTIVE, API_HEADER, EVENT_ALEXA_SMART_HOME
from .errors import AlexaBridgeUnreachableError, AlexaError
from .handlers import HANDLERS
from .messages import AlexaDirective
_LOGGER = logging.getLogger(__name__)
async def async_handle_message(hass, config, request, context=None, enabled=True):
"""Handle incoming API messages.
If enabled is False, the response to all messagess will be a
BRIDGE_UNREACHABLE error. This can be used if the API has been disabled in
configuration.
"""
assert request[API_DIRECTIVE][API_HEADER]["payloadVersion"] == "3"
if context is None:
context = ha.Context()
directive = AlexaDirective(request)
try:
if not enabled:
raise AlexaBridgeUnreachableError(
"Alexa API not enabled in Home Assistant configuration"
)
if directive.has_endpoint:
directive.load_entity(hass, config)
funct_ref = HANDLERS.get((directive.namespace, directive.name))
if funct_ref:
response = await funct_ref(hass, config, directive, context)
if directive.has_endpoint:
response.merge_context_properties(directive.endpoint)
else:
_LOGGER.warning(
"Unsupported API request %s/%s", directive.namespace, directive.name
)
response = directive.error()
except AlexaError as err:
response = directive.error(
error_type=err.error_type, error_message=err.error_message
)
request_info = {"namespace": directive.namespace, "name": directive.name}
if directive.has_endpoint:
request_info["entity_id"] = directive.entity_id
hass.bus.async_fire(
EVENT_ALEXA_SMART_HOME,
{
"request": request_info,
"response": {"namespace": response.namespace, "name": response.name},
},
context=context,
)
return response.serialize()
|
from pykeyboard import PyKeyboard # pylint: disable=import-error
import voluptuous as vol
from homeassistant.const import (
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_VOLUME_DOWN,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_UP,
)
DOMAIN = "keyboard"
TAP_KEY_SCHEMA = vol.Schema({})
def setup(hass, config):
"""Listen for keyboard events."""
keyboard = PyKeyboard()
keyboard.special_key_assignment()
hass.services.register(
DOMAIN,
SERVICE_VOLUME_UP,
lambda service: keyboard.tap_key(keyboard.volume_up_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_VOLUME_DOWN,
lambda service: keyboard.tap_key(keyboard.volume_down_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_VOLUME_MUTE,
lambda service: keyboard.tap_key(keyboard.volume_mute_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_PLAY_PAUSE,
lambda service: keyboard.tap_key(keyboard.media_play_pause_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_NEXT_TRACK,
lambda service: keyboard.tap_key(keyboard.media_next_track_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_PREVIOUS_TRACK,
lambda service: keyboard.tap_key(keyboard.media_prev_track_key),
schema=TAP_KEY_SCHEMA,
)
return True
|
from datetime import timedelta
import logging
from gitlab import Gitlab, GitlabAuthenticationError, GitlabGetError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_NAME,
CONF_SCAN_INTERVAL,
CONF_TOKEN,
CONF_URL,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_BUILD_BRANCH = "build branch"
ATTR_BUILD_COMMIT_DATE = "commit date"
ATTR_BUILD_COMMIT_ID = "commit id"
ATTR_BUILD_DURATION = "build_duration"
ATTR_BUILD_FINISHED = "build_finished"
ATTR_BUILD_ID = "build id"
ATTR_BUILD_STARTED = "build_started"
ATTR_BUILD_STATUS = "build_status"
ATTRIBUTION = "Information provided by https://gitlab.com/"
CONF_GITLAB_ID = "gitlab_id"
DEFAULT_NAME = "GitLab CI Status"
DEFAULT_URL = "https://gitlab.com"
ICON_HAPPY = "mdi:emoticon-happy"
ICON_OTHER = "mdi:git"
ICON_SAD = "mdi:emoticon-sad"
SCAN_INTERVAL = timedelta(seconds=300)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_GITLAB_ID): cv.string,
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_URL, default=DEFAULT_URL): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GitLab sensor platform."""
_name = config.get(CONF_NAME)
_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
_url = config.get(CONF_URL)
_gitlab_data = GitLabData(
priv_token=config[CONF_TOKEN],
gitlab_id=config[CONF_GITLAB_ID],
interval=_interval,
url=_url,
)
add_entities([GitLabSensor(_gitlab_data, _name)], True)
class GitLabSensor(Entity):
"""Representation of a GitLab sensor."""
def __init__(self, gitlab_data, name):
"""Initialize the GitLab sensor."""
self._available = False
self._state = None
self._started_at = None
self._finished_at = None
self._duration = None
self._commit_id = None
self._commit_date = None
self._build_id = None
self._branch = None
self._gitlab_data = gitlab_data
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_BUILD_STATUS: self._state,
ATTR_BUILD_STARTED: self._started_at,
ATTR_BUILD_FINISHED: self._finished_at,
ATTR_BUILD_DURATION: self._duration,
ATTR_BUILD_COMMIT_ID: self._commit_id,
ATTR_BUILD_COMMIT_DATE: self._commit_date,
ATTR_BUILD_ID: self._build_id,
ATTR_BUILD_BRANCH: self._branch,
}
@property
def icon(self):
"""Return the icon to use in the frontend."""
if self._state == "success":
return ICON_HAPPY
if self._state == "failed":
return ICON_SAD
return ICON_OTHER
def update(self):
"""Collect updated data from GitLab API."""
self._gitlab_data.update()
self._state = self._gitlab_data.status
self._started_at = self._gitlab_data.started_at
self._finished_at = self._gitlab_data.finished_at
self._duration = self._gitlab_data.duration
self._commit_id = self._gitlab_data.commit_id
self._commit_date = self._gitlab_data.commit_date
self._build_id = self._gitlab_data.build_id
self._branch = self._gitlab_data.branch
self._available = self._gitlab_data.available
class GitLabData:
"""GitLab Data object."""
def __init__(self, gitlab_id, priv_token, interval, url):
"""Fetch data from GitLab API for most recent CI job."""
self._gitlab_id = gitlab_id
self._gitlab = Gitlab(url, private_token=priv_token, per_page=1)
self._gitlab.auth()
self.update = Throttle(interval)(self._update)
self.available = False
self.status = None
self.started_at = None
self.finished_at = None
self.duration = None
self.commit_id = None
self.commit_date = None
self.build_id = None
self.branch = None
def _update(self):
try:
_projects = self._gitlab.projects.get(self._gitlab_id)
_last_pipeline = _projects.pipelines.list(page=1)[0]
_last_job = _last_pipeline.jobs.list(page=1)[0]
self.status = _last_pipeline.attributes.get("status")
self.started_at = _last_job.attributes.get("started_at")
self.finished_at = _last_job.attributes.get("finished_at")
self.duration = _last_job.attributes.get("duration")
_commit = _last_job.attributes.get("commit")
self.commit_id = _commit.get("id")
self.commit_date = _commit.get("committed_date")
self.build_id = _last_job.attributes.get("id")
self.branch = _last_job.attributes.get("ref")
self.available = True
except GitlabAuthenticationError as erra:
_LOGGER.error("Authentication Error: %s", erra)
self.available = False
except GitlabGetError as errg:
_LOGGER.error("Project Not Found: %s", errg)
self.available = False
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import MinecraftServer, MinecraftServerEntity
from .const import DOMAIN, ICON_STATUS, NAME_STATUS
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Minecraft Server binary sensor platform."""
server = hass.data[DOMAIN][config_entry.unique_id]
# Create entities list.
entities = [MinecraftServerStatusBinarySensor(server)]
# Add binary sensor entities.
async_add_entities(entities, True)
class MinecraftServerStatusBinarySensor(MinecraftServerEntity, BinarySensorEntity):
"""Representation of a Minecraft Server status binary sensor."""
def __init__(self, server: MinecraftServer) -> None:
"""Initialize status binary sensor."""
super().__init__(
server=server,
type_name=NAME_STATUS,
icon=ICON_STATUS,
device_class=DEVICE_CLASS_CONNECTIVITY,
)
self._is_on = False
@property
def is_on(self) -> bool:
"""Return binary state."""
return self._is_on
async def async_update(self) -> None:
"""Update status."""
self._is_on = self._server.online
|
import os
import sys
import glob
import shutil
import errno
import logging
from contextlib import contextmanager
from plumbum.lib import _setdoc, IS_WIN32, six
from plumbum.path.base import Path, FSUser
from plumbum.path.remote import RemotePath
try:
from pwd import getpwuid, getpwnam
from grp import getgrgid, getgrnam
except ImportError:
def getpwuid(x): # type: ignore
return (None, )
def getgrgid(x): # type: ignore
return (None, )
def getpwnam(x): # type: ignore
raise OSError("`getpwnam` not supported")
def getgrnam(x): # type: ignore
raise OSError("`getgrnam` not supported")
try: # Py3
import urllib.parse as urlparse
import urllib.request as urllib
except ImportError:
import urlparse # type: ignore
import urllib # type: ignore
logger = logging.getLogger("plumbum.local")
#===================================================================================================
# Local Paths
#===================================================================================================
class LocalPath(Path):
"""The class implementing local-machine paths"""
CASE_SENSITIVE = not IS_WIN32
def __new__(cls, *parts):
if len(parts) == 1 and \
isinstance(parts[0], cls) and \
not isinstance(parts[0], LocalWorkdir):
return parts[0]
if not parts:
raise TypeError("At least one path part is required (none given)")
if any(isinstance(path, RemotePath) for path in parts):
raise TypeError(
"LocalPath cannot be constructed from %r" % (parts, ))
self = super(LocalPath, cls).__new__(
cls, os.path.normpath(os.path.join(*(str(p) for p in parts))))
return self
@property
def _path(self):
return str(self)
def _get_info(self):
return self._path
def _form(self, *parts):
return LocalPath(*parts)
@property # type: ignore
@_setdoc(Path)
def name(self):
return os.path.basename(str(self))
@property # type: ignore
@_setdoc(Path)
def dirname(self):
return LocalPath(os.path.dirname(str(self)))
@property # type: ignore
@_setdoc(Path)
def suffix(self):
return os.path.splitext(str(self))[1]
@property
def suffixes(self):
exts = []
base = str(self)
while True:
base, ext = os.path.splitext(base)
if ext:
exts.append(ext)
else:
return list(reversed(exts))
@property # type: ignore
@_setdoc(Path)
def uid(self):
uid = self.stat().st_uid
name = getpwuid(uid)[0]
return FSUser(uid, name)
@property # type: ignore
@_setdoc(Path)
def gid(self):
gid = self.stat().st_gid
name = getgrgid(gid)[0]
return FSUser(gid, name)
@_setdoc(Path)
def join(self, *others):
return LocalPath(self, *others)
@_setdoc(Path)
def list(self):
return [self / fn for fn in os.listdir(str(self))]
@_setdoc(Path)
def iterdir(self):
try:
return (self / fn.name for fn in os.scandir(str(self)))
except AttributeError:
return (self / fn for fn in os.listdir(str(self)))
@_setdoc(Path)
def is_dir(self):
return os.path.isdir(str(self))
@_setdoc(Path)
def is_file(self):
return os.path.isfile(str(self))
@_setdoc(Path)
def is_symlink(self):
return os.path.islink(str(self))
@_setdoc(Path)
def exists(self):
return os.path.exists(str(self))
@_setdoc(Path)
def stat(self):
return os.stat(str(self))
@_setdoc(Path)
def with_name(self, name):
return LocalPath(self.dirname) / name
@property # type: ignore
@_setdoc(Path)
def stem(self):
return self.name.rsplit(os.path.extsep)[0]
@_setdoc(Path)
def with_suffix(self, suffix, depth=1):
if (suffix and not suffix.startswith(os.path.extsep)
or suffix == os.path.extsep):
raise ValueError("Invalid suffix %r" % (suffix))
name = self.name
depth = len(self.suffixes) if depth is None else min(
depth, len(self.suffixes))
for i in range(depth):
name, ext = os.path.splitext(name)
return LocalPath(self.dirname) / (name + suffix)
@_setdoc(Path)
def glob(self, pattern):
fn = lambda pat: [LocalPath(m) for m in glob.glob(str(self / pat))]
return self._glob(pattern, fn)
@_setdoc(Path)
def delete(self):
if not self.exists():
return
if self.is_dir():
shutil.rmtree(str(self))
else:
try:
os.remove(str(self))
except OSError: # pragma: no cover
# file might already been removed (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.ENOENT:
raise
@_setdoc(Path)
def move(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot move local path %s to %r" % (self, dst))
shutil.move(str(self), str(dst))
return LocalPath(dst)
@_setdoc(Path)
def copy(self, dst, override=None):
if isinstance(dst, RemotePath):
raise TypeError("Cannot copy local path %s to %r" % (self, dst))
dst = LocalPath(dst)
if override is False and dst.exists():
raise TypeError("File exists and override was not specified")
if override:
dst.delete()
if self.is_dir():
shutil.copytree(str(self), str(dst))
else:
dst_dir = LocalPath(dst).dirname
if not dst_dir.exists():
dst_dir.mkdir()
shutil.copy2(str(self), str(dst))
return dst
@_setdoc(Path)
def mkdir(self, mode=0o777, parents=True, exist_ok=True):
if not self.exists() or not exist_ok:
try:
if parents:
os.makedirs(str(self), mode)
else:
os.mkdir(str(self), mode)
except OSError: # pragma: no cover
# directory might already exist (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.EEXIST or not exist_ok:
raise
@_setdoc(Path)
def open(self, mode="r"):
return open(str(self), mode)
@_setdoc(Path)
def read(self, encoding=None, mode='r'):
if encoding and 'b' not in mode:
mode = mode + 'b'
with self.open(mode) as f:
data = f.read()
if encoding:
data = data.decode(encoding)
return data
@_setdoc(Path)
def write(self, data, encoding=None, mode=None):
if encoding:
data = data.encode(encoding)
if mode is None:
if isinstance(data, six.unicode_type):
mode = 'w'
else:
mode = 'wb'
with self.open(mode) as f:
f.write(data)
@_setdoc(Path)
def touch(self):
with open(str(self), 'a'):
os.utime(str(self), None)
@_setdoc(Path)
def chown(self, owner=None, group=None, recursive=None):
if not hasattr(os, "chown"):
raise OSError("os.chown() not supported")
uid = self.uid if owner is None else (owner if isinstance(owner, int)
else getpwnam(owner)[2])
gid = self.gid if group is None else (group if isinstance(group, int)
else getgrnam(group)[2])
os.chown(str(self), uid, gid)
if recursive or (recursive is None and self.is_dir()):
for subpath in self.walk():
os.chown(str(subpath), uid, gid)
@_setdoc(Path)
def chmod(self, mode):
if not hasattr(os, "chmod"):
raise OSError("os.chmod() not supported")
os.chmod(str(self), mode)
@_setdoc(Path)
def access(self, mode=0):
return os.access(str(self), self._access_mode_to_flags(mode))
@_setdoc(Path)
def link(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot create a hardlink from local path %s to %r"
% (self, dst))
if hasattr(os, "link"):
os.link(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", "/H", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", "/H", str(dst), str(self))
@_setdoc(Path)
def symlink(self, dst):
if isinstance(dst, RemotePath):
raise TypeError("Cannot create a symlink from local path %s to %r"
% (self, dst))
if hasattr(os, "symlink"):
os.symlink(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", str(dst), str(self))
@_setdoc(Path)
def unlink(self):
try:
if hasattr(os, "symlink") or not self.is_dir():
os.unlink(str(self))
else:
# windows: use rmdir for directories and directory symlinks
os.rmdir(str(self))
except OSError: # pragma: no cover
# file might already been removed (a race with other threads/processes)
_, ex, _ = sys.exc_info()
if ex.errno != errno.ENOENT:
raise
@_setdoc(Path)
def as_uri(self, scheme='file'):
return urlparse.urljoin(
str(scheme) + ':', urllib.pathname2url(str(self)))
@property # type: ignore
@_setdoc(Path)
def drive(self):
return os.path.splitdrive(str(self))[0]
@property # type: ignore
@_setdoc(Path)
def root(self):
return os.path.sep
class LocalWorkdir(LocalPath):
"""Working directory manipulator"""
def __hash__(self):
raise TypeError("unhashable type")
def __new__(cls):
return super(LocalWorkdir, cls).__new__(cls, os.getcwd())
def chdir(self, newdir):
"""Changes the current working directory to the given one
:param newdir: The destination director (a string or a ``LocalPath``)
"""
if isinstance(newdir, RemotePath):
raise TypeError("newdir cannot be %r" % (newdir, ))
logger.debug("Chdir to %s", newdir)
os.chdir(str(newdir))
return self.__class__()
def getpath(self):
"""Returns the current working directory as a ``LocalPath`` object"""
return LocalPath(self._path)
@contextmanager
def __call__(self, newdir):
"""A context manager used to ``chdir`` into a directory and then ``chdir`` back to
the previous location; much like ``pushd``/``popd``.
:param newdir: The destination directory (a string or a ``LocalPath``)
"""
prev = self._path
newdir = self.chdir(newdir)
try:
yield newdir
finally:
self.chdir(prev)
|
import asyncio
import pytest
from jinja2 import DictLoader
from jinja2 import Environment
from jinja2 import Template
from jinja2.asyncsupport import auto_aiter
from jinja2.exceptions import TemplateNotFound
from jinja2.exceptions import TemplatesNotFound
from jinja2.exceptions import UndefinedError
def run(coro):
loop = asyncio.get_event_loop()
return loop.run_until_complete(coro)
def test_basic_async():
t = Template(
"{% for item in [1, 2, 3] %}[{{ item }}]{% endfor %}", enable_async=True
)
async def func():
return await t.render_async()
rv = run(func())
assert rv == "[1][2][3]"
def test_await_on_calls():
t = Template("{{ async_func() + normal_func() }}", enable_async=True)
async def async_func():
return 42
def normal_func():
return 23
async def func():
return await t.render_async(async_func=async_func, normal_func=normal_func)
rv = run(func())
assert rv == "65"
def test_await_on_calls_normal_render():
t = Template("{{ async_func() + normal_func() }}", enable_async=True)
async def async_func():
return 42
def normal_func():
return 23
rv = t.render(async_func=async_func, normal_func=normal_func)
assert rv == "65"
def test_await_and_macros():
t = Template(
"{% macro foo(x) %}[{{ x }}][{{ async_func() }}]{% endmacro %}{{ foo(42) }}",
enable_async=True,
)
async def async_func():
return 42
async def func():
return await t.render_async(async_func=async_func)
rv = run(func())
assert rv == "[42][42]"
def test_async_blocks():
t = Template(
"{% block foo %}<Test>{% endblock %}{{ self.foo() }}",
enable_async=True,
autoescape=True,
)
async def func():
return await t.render_async()
rv = run(func())
assert rv == "<Test><Test>"
def test_async_generate():
t = Template("{% for x in [1, 2, 3] %}{{ x }}{% endfor %}", enable_async=True)
rv = list(t.generate())
assert rv == ["1", "2", "3"]
def test_async_iteration_in_templates():
t = Template("{% for x in rng %}{{ x }}{% endfor %}", enable_async=True)
async def async_iterator():
for item in [1, 2, 3]:
yield item
rv = list(t.generate(rng=async_iterator()))
assert rv == ["1", "2", "3"]
def test_async_iteration_in_templates_extended():
t = Template(
"{% for x in rng %}{{ loop.index0 }}/{{ x }}{% endfor %}", enable_async=True
)
stream = t.generate(rng=auto_aiter(range(1, 4)))
assert next(stream) == "0"
assert "".join(stream) == "/11/22/3"
@pytest.fixture
def test_env_async():
env = Environment(
loader=DictLoader(
dict(
module="{% macro test() %}[{{ foo }}|{{ bar }}]{% endmacro %}",
header="[{{ foo }}|{{ 23 }}]",
o_printer="({{ o }})",
)
),
enable_async=True,
)
env.globals["bar"] = 23
return env
class TestAsyncImports:
def test_context_imports(self, test_env_async):
t = test_env_async.from_string('{% import "module" as m %}{{ m.test() }}')
assert t.render(foo=42) == "[|23]"
t = test_env_async.from_string(
'{% import "module" as m without context %}{{ m.test() }}'
)
assert t.render(foo=42) == "[|23]"
t = test_env_async.from_string(
'{% import "module" as m with context %}{{ m.test() }}'
)
assert t.render(foo=42) == "[42|23]"
t = test_env_async.from_string('{% from "module" import test %}{{ test() }}')
assert t.render(foo=42) == "[|23]"
t = test_env_async.from_string(
'{% from "module" import test without context %}{{ test() }}'
)
assert t.render(foo=42) == "[|23]"
t = test_env_async.from_string(
'{% from "module" import test with context %}{{ test() }}'
)
assert t.render(foo=42) == "[42|23]"
def test_trailing_comma(self, test_env_async):
test_env_async.from_string('{% from "foo" import bar, baz with context %}')
test_env_async.from_string('{% from "foo" import bar, baz, with context %}')
test_env_async.from_string('{% from "foo" import bar, with context %}')
test_env_async.from_string('{% from "foo" import bar, with, context %}')
test_env_async.from_string('{% from "foo" import bar, with with context %}')
def test_exports(self, test_env_async):
m = run(
test_env_async.from_string(
"""
{% macro toplevel() %}...{% endmacro %}
{% macro __private() %}...{% endmacro %}
{% set variable = 42 %}
{% for item in [1] %}
{% macro notthere() %}{% endmacro %}
{% endfor %}
"""
)._get_default_module_async()
)
assert run(m.toplevel()) == "..."
assert not hasattr(m, "__missing")
assert m.variable == 42
assert not hasattr(m, "notthere")
class TestAsyncIncludes:
def test_context_include(self, test_env_async):
t = test_env_async.from_string('{% include "header" %}')
assert t.render(foo=42) == "[42|23]"
t = test_env_async.from_string('{% include "header" with context %}')
assert t.render(foo=42) == "[42|23]"
t = test_env_async.from_string('{% include "header" without context %}')
assert t.render(foo=42) == "[|23]"
def test_choice_includes(self, test_env_async):
t = test_env_async.from_string('{% include ["missing", "header"] %}')
assert t.render(foo=42) == "[42|23]"
t = test_env_async.from_string(
'{% include ["missing", "missing2"] ignore missing %}'
)
assert t.render(foo=42) == ""
t = test_env_async.from_string('{% include ["missing", "missing2"] %}')
pytest.raises(TemplateNotFound, t.render)
with pytest.raises(TemplatesNotFound) as e:
t.render()
assert e.value.templates == ["missing", "missing2"]
assert e.value.name == "missing2"
def test_includes(t, **ctx):
ctx["foo"] = 42
assert t.render(ctx) == "[42|23]"
t = test_env_async.from_string('{% include ["missing", "header"] %}')
test_includes(t)
t = test_env_async.from_string("{% include x %}")
test_includes(t, x=["missing", "header"])
t = test_env_async.from_string('{% include [x, "header"] %}')
test_includes(t, x="missing")
t = test_env_async.from_string("{% include x %}")
test_includes(t, x="header")
t = test_env_async.from_string("{% include x %}")
test_includes(t, x="header")
t = test_env_async.from_string("{% include [x] %}")
test_includes(t, x="header")
def test_include_ignoring_missing(self, test_env_async):
t = test_env_async.from_string('{% include "missing" %}')
pytest.raises(TemplateNotFound, t.render)
for extra in "", "with context", "without context":
t = test_env_async.from_string(
'{% include "missing" ignore missing ' + extra + " %}"
)
assert t.render() == ""
def test_context_include_with_overrides(self, test_env_async):
env = Environment(
loader=DictLoader(
dict(
main="{% for item in [1, 2, 3] %}{% include 'item' %}{% endfor %}",
item="{{ item }}",
)
)
)
assert env.get_template("main").render() == "123"
def test_unoptimized_scopes(self, test_env_async):
t = test_env_async.from_string(
"""
{% macro outer(o) %}
{% macro inner() %}
{% include "o_printer" %}
{% endmacro %}
{{ inner() }}
{% endmacro %}
{{ outer("FOO") }}
"""
)
assert t.render().strip() == "(FOO)"
def test_unoptimized_scopes_autoescape(self):
env = Environment(
loader=DictLoader(dict(o_printer="({{ o }})",)),
autoescape=True,
enable_async=True,
)
t = env.from_string(
"""
{% macro outer(o) %}
{% macro inner() %}
{% include "o_printer" %}
{% endmacro %}
{{ inner() }}
{% endmacro %}
{{ outer("FOO") }}
"""
)
assert t.render().strip() == "(FOO)"
class TestAsyncForLoop:
def test_simple(self, test_env_async):
tmpl = test_env_async.from_string("{% for item in seq %}{{ item }}{% endfor %}")
assert tmpl.render(seq=list(range(10))) == "0123456789"
def test_else(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for item in seq %}XXX{% else %}...{% endfor %}"
)
assert tmpl.render() == "..."
def test_empty_blocks(self, test_env_async):
tmpl = test_env_async.from_string(
"<{% for item in seq %}{% else %}{% endfor %}>"
)
assert tmpl.render() == "<>"
@pytest.mark.parametrize(
"transform", [lambda x: x, iter, reversed, lambda x: (i for i in x), auto_aiter]
)
def test_context_vars(self, test_env_async, transform):
t = test_env_async.from_string(
"{% for item in seq %}{{ loop.index }}|{{ loop.index0 }}"
"|{{ loop.revindex }}|{{ loop.revindex0 }}|{{ loop.first }}"
"|{{ loop.last }}|{{ loop.length }}\n{% endfor %}"
)
out = t.render(seq=transform([42, 24]))
assert out == "1|0|2|1|True|False|2\n2|1|1|0|False|True|2\n"
def test_cycling(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in seq %}{{
loop.cycle('<1>', '<2>') }}{% endfor %}{%
for item in seq %}{{ loop.cycle(*through) }}{% endfor %}"""
)
output = tmpl.render(seq=list(range(4)), through=("<1>", "<2>"))
assert output == "<1><2>" * 4
def test_lookaround(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in seq -%}
{{ loop.previtem|default('x') }}-{{ item }}-{{
loop.nextitem|default('x') }}|
{%- endfor %}"""
)
output = tmpl.render(seq=list(range(4)))
assert output == "x-0-1|0-1-2|1-2-3|2-3-x|"
def test_changed(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in seq -%}
{{ loop.changed(item) }},
{%- endfor %}"""
)
output = tmpl.render(seq=[None, None, 1, 2, 2, 3, 4, 4, 4])
assert output == "True,False,True,True,False,True,True,False,False,"
def test_scope(self, test_env_async):
tmpl = test_env_async.from_string("{% for item in seq %}{% endfor %}{{ item }}")
output = tmpl.render(seq=list(range(10)))
assert not output
def test_varlen(self, test_env_async):
def inner():
yield from range(5)
tmpl = test_env_async.from_string(
"{% for item in iter %}{{ item }}{% endfor %}"
)
output = tmpl.render(iter=inner())
assert output == "01234"
def test_noniter(self, test_env_async):
tmpl = test_env_async.from_string("{% for item in none %}...{% endfor %}")
pytest.raises(TypeError, tmpl.render)
def test_recursive(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in seq recursive -%}
[{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
{%- endfor %}"""
)
assert (
tmpl.render(
seq=[
dict(a=1, b=[dict(a=1), dict(a=2)]),
dict(a=2, b=[dict(a=1), dict(a=2)]),
dict(a=3, b=[dict(a="a")]),
]
)
== "[1<[1][2]>][2<[1][2]>][3<[a]>]"
)
def test_recursive_lookaround(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in seq recursive -%}
[{{ loop.previtem.a if loop.previtem is defined else 'x' }}.{{
item.a }}.{{ loop.nextitem.a if loop.nextitem is defined else 'x'
}}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
{%- endfor %}"""
)
assert (
tmpl.render(
seq=[
dict(a=1, b=[dict(a=1), dict(a=2)]),
dict(a=2, b=[dict(a=1), dict(a=2)]),
dict(a=3, b=[dict(a="a")]),
]
)
== "[x.1.2<[x.1.2][1.2.x]>][1.2.3<[x.1.2][1.2.x]>][2.3.x<[x.a.x]>]"
)
def test_recursive_depth0(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for item in seq recursive %}[{{ loop.depth0 }}:{{ item.a }}"
"{% if item.b %}<{{ loop(item.b) }}>{% endif %}]{% endfor %}"
)
assert (
tmpl.render(
seq=[
dict(a=1, b=[dict(a=1), dict(a=2)]),
dict(a=2, b=[dict(a=1), dict(a=2)]),
dict(a=3, b=[dict(a="a")]),
]
)
== "[0:1<[1:1][1:2]>][0:2<[1:1][1:2]>][0:3<[1:a]>]"
)
def test_recursive_depth(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for item in seq recursive %}[{{ loop.depth }}:{{ item.a }}"
"{% if item.b %}<{{ loop(item.b) }}>{% endif %}]{% endfor %}"
)
assert (
tmpl.render(
seq=[
dict(a=1, b=[dict(a=1), dict(a=2)]),
dict(a=2, b=[dict(a=1), dict(a=2)]),
dict(a=3, b=[dict(a="a")]),
]
)
== "[1:1<[2:1][2:2]>][1:2<[2:1][2:2]>][1:3<[2:a]>]"
)
def test_looploop(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for row in table %}
{%- set rowloop = loop -%}
{% for cell in row -%}
[{{ rowloop.index }}|{{ loop.index }}]
{%- endfor %}
{%- endfor %}"""
)
assert tmpl.render(table=["ab", "cd"]) == "[1|1][1|2][2|1][2|2]"
def test_reversed_bug(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for i in items %}{{ i }}"
"{% if not loop.last %}"
",{% endif %}{% endfor %}"
)
assert tmpl.render(items=reversed([3, 2, 1])) == "1,2,3"
def test_loop_errors(self, test_env_async):
tmpl = test_env_async.from_string(
"""{% for item in [1] if loop.index
== 0 %}...{% endfor %}"""
)
pytest.raises(UndefinedError, tmpl.render)
tmpl = test_env_async.from_string(
"""{% for item in [] %}...{% else
%}{{ loop }}{% endfor %}"""
)
assert tmpl.render() == ""
def test_loop_filter(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for item in range(10) if item is even %}[{{ item }}]{% endfor %}"
)
assert tmpl.render() == "[0][2][4][6][8]"
tmpl = test_env_async.from_string(
"""
{%- for item in range(10) if item is even %}[{{
loop.index }}:{{ item }}]{% endfor %}"""
)
assert tmpl.render() == "[1:0][2:2][3:4][4:6][5:8]"
def test_scoped_special_var(self, test_env_async):
t = test_env_async.from_string(
"{% for s in seq %}[{{ loop.first }}{% for c in s %}"
"|{{ loop.first }}{% endfor %}]{% endfor %}"
)
assert t.render(seq=("ab", "cd")) == "[True|True|False][False|True|False]"
def test_scoped_loop_var(self, test_env_async):
t = test_env_async.from_string(
"{% for x in seq %}{{ loop.first }}"
"{% for y in seq %}{% endfor %}{% endfor %}"
)
assert t.render(seq="ab") == "TrueFalse"
t = test_env_async.from_string(
"{% for x in seq %}{% for y in seq %}"
"{{ loop.first }}{% endfor %}{% endfor %}"
)
assert t.render(seq="ab") == "TrueFalseTrueFalse"
def test_recursive_empty_loop_iter(self, test_env_async):
t = test_env_async.from_string(
"""
{%- for item in foo recursive -%}{%- endfor -%}
"""
)
assert t.render(dict(foo=[])) == ""
def test_call_in_loop(self, test_env_async):
t = test_env_async.from_string(
"""
{%- macro do_something() -%}
[{{ caller() }}]
{%- endmacro %}
{%- for i in [1, 2, 3] %}
{%- call do_something() -%}
{{ i }}
{%- endcall %}
{%- endfor -%}
"""
)
assert t.render() == "[1][2][3]"
def test_scoping_bug(self, test_env_async):
t = test_env_async.from_string(
"""
{%- for item in foo %}...{{ item }}...{% endfor %}
{%- macro item(a) %}...{{ a }}...{% endmacro %}
{{- item(2) -}}
"""
)
assert t.render(foo=(1,)) == "...1......2..."
def test_unpacking(self, test_env_async):
tmpl = test_env_async.from_string(
"{% for a, b, c in [[1, 2, 3]] %}{{ a }}|{{ b }}|{{ c }}{% endfor %}"
)
assert tmpl.render() == "1|2|3"
def test_recursive_loop_filter(self, test_env_async):
t = test_env_async.from_string(
"""
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{%- for page in [site.root] if page.url != this recursive %}
<url><loc>{{ page.url }}</loc></url>
{{- loop(page.children) }}
{%- endfor %}
</urlset>
"""
)
sm = t.render(
this="/foo",
site={"root": {"url": "/", "children": [{"url": "/foo"}, {"url": "/bar"}]}},
)
lines = [x.strip() for x in sm.splitlines() if x.strip()]
assert lines == [
'<?xml version="1.0" encoding="UTF-8"?>',
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">',
"<url><loc>/</loc></url>",
"<url><loc>/bar</loc></url>",
"</urlset>",
]
def test_nonrecursive_loop_filter(self, test_env_async):
t = test_env_async.from_string(
"""
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{%- for page in items if page.url != this %}
<url><loc>{{ page.url }}</loc></url>
{%- endfor %}
</urlset>
"""
)
sm = t.render(
this="/foo", items=[{"url": "/"}, {"url": "/foo"}, {"url": "/bar"}]
)
lines = [x.strip() for x in sm.splitlines() if x.strip()]
assert lines == [
'<?xml version="1.0" encoding="UTF-8"?>',
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">',
"<url><loc>/</loc></url>",
"<url><loc>/bar</loc></url>",
"</urlset>",
]
def test_bare_async(self, test_env_async):
t = test_env_async.from_string('{% extends "header" %}')
assert t.render(foo=42) == "[42|23]"
def test_awaitable_property_slicing(self, test_env_async):
t = test_env_async.from_string("{% for x in a.b[:1] %}{{ x }}{% endfor %}")
assert t.render(a=dict(b=[1, 2, 3])) == "1"
def test_namespace_awaitable(test_env_async):
async def _test():
t = test_env_async.from_string(
'{% set ns = namespace(foo="Bar") %}{{ ns.foo }}'
)
actual = await t.render_async()
assert actual == "Bar"
run(_test())
|
from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON
from .conftest import setup_platform
async def test_entity_and_device_attributes(hass, scene):
"""Test the attributes of the entity are correct."""
# Arrange
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Act
await setup_platform(hass, SCENE_DOMAIN, scenes=[scene])
# Assert
entry = entity_registry.async_get("scene.test_scene")
assert entry
assert entry.unique_id == scene.scene_id
async def test_scene_activate(hass, scene):
"""Test the scene is activated."""
await setup_platform(hass, SCENE_DOMAIN, scenes=[scene])
await hass.services.async_call(
SCENE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "scene.test_scene"},
blocking=True,
)
state = hass.states.get("scene.test_scene")
assert state.attributes["icon"] == scene.icon
assert state.attributes["color"] == scene.color
assert state.attributes["location_id"] == scene.location_id
# pylint: disable=protected-access
assert scene.execute.call_count == 1 # type: ignore
async def test_unload_config_entry(hass, scene):
"""Test the scene is removed when the config entry is unloaded."""
# Arrange
config_entry = await setup_platform(hass, SCENE_DOMAIN, scenes=[scene])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, SCENE_DOMAIN)
# Assert
assert not hass.states.get("scene.test_scene")
|
import socket
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_ALLOW_BANDWIDTH_SENSORS,
CONF_ALLOW_UPTIME_SENSORS,
CONF_BLOCK_CLIENT,
CONF_CONTROLLER,
CONF_DETECTION_TIME,
CONF_IGNORE_WIRED_BUG,
CONF_POE_CLIENTS,
CONF_SITE_ID,
CONF_SSID_FILTER,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
CONF_TRACK_WIRED_CLIENTS,
CONTROLLER_ID,
DEFAULT_POE_CLIENTS,
DOMAIN as UNIFI_DOMAIN,
LOGGER,
)
from .controller import get_controller
from .errors import AlreadyConfigured, AuthenticationRequired, CannotConnect
DEFAULT_PORT = 8443
DEFAULT_SITE_ID = "default"
DEFAULT_VERIFY_SSL = False
@callback
def get_controller_id_from_config_entry(config_entry):
"""Return controller with a matching bridge id."""
return CONTROLLER_ID.format(
host=config_entry.data[CONF_CONTROLLER][CONF_HOST],
site=config_entry.data[CONF_CONTROLLER][CONF_SITE_ID],
)
class UnifiFlowHandler(config_entries.ConfigFlow, domain=UNIFI_DOMAIN):
"""Handle a UniFi config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return UnifiOptionsFlowHandler(config_entry)
def __init__(self):
"""Initialize the UniFi flow."""
self.config = None
self.desc = None
self.sites = None
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
try:
self.config = {
CONF_HOST: user_input[CONF_HOST],
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
CONF_PORT: user_input.get(CONF_PORT),
CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL),
CONF_SITE_ID: DEFAULT_SITE_ID,
}
controller = await get_controller(self.hass, **self.config)
self.sites = await controller.sites()
return await self.async_step_site()
except AuthenticationRequired:
errors["base"] = "faulty_credentials"
except CannotConnect:
errors["base"] = "service_unavailable"
except Exception: # pylint: disable=broad-except
LOGGER.error(
"Unknown error connecting with UniFi Controller at %s",
user_input[CONF_HOST],
)
return self.async_abort(reason="unknown")
host = ""
if await async_discover_unifi(self.hass):
host = "unifi"
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
}
),
errors=errors,
)
async def async_step_site(self, user_input=None):
"""Select site to control."""
errors = {}
if user_input is not None:
try:
desc = user_input.get(CONF_SITE_ID, self.desc)
for site in self.sites.values():
if desc == site["desc"]:
self.config[CONF_SITE_ID] = site["name"]
break
for entry in self._async_current_entries():
controller = entry.data[CONF_CONTROLLER]
if (
controller[CONF_HOST] == self.config[CONF_HOST]
and controller[CONF_SITE_ID] == self.config[CONF_SITE_ID]
):
raise AlreadyConfigured
data = {CONF_CONTROLLER: self.config}
return self.async_create_entry(title=desc, data=data)
except AlreadyConfigured:
return self.async_abort(reason="already_configured")
if len(self.sites) == 1:
self.desc = next(iter(self.sites.values()))["desc"]
return await self.async_step_site(user_input={})
sites = []
for site in self.sites.values():
sites.append(site["desc"])
return self.async_show_form(
step_id="site",
data_schema=vol.Schema({vol.Required(CONF_SITE_ID): vol.In(sites)}),
errors=errors,
)
class UnifiOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle Unifi options."""
def __init__(self, config_entry):
"""Initialize UniFi options flow."""
self.config_entry = config_entry
self.options = dict(config_entry.options)
self.controller = None
async def async_step_init(self, user_input=None):
"""Manage the UniFi options."""
self.controller = self.hass.data[UNIFI_DOMAIN][self.config_entry.entry_id]
self.options[CONF_BLOCK_CLIENT] = self.controller.option_block_clients
if self.show_advanced_options:
return await self.async_step_device_tracker()
return await self.async_step_simple_options()
async def async_step_simple_options(self, user_input=None):
"""For simple Jack."""
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
clients_to_block = {}
for client in self.controller.api.clients.values():
clients_to_block[
client.mac
] = f"{client.name or client.hostname} ({client.mac})"
return self.async_show_form(
step_id="simple_options",
data_schema=vol.Schema(
{
vol.Optional(
CONF_TRACK_CLIENTS,
default=self.controller.option_track_clients,
): bool,
vol.Optional(
CONF_TRACK_DEVICES,
default=self.controller.option_track_devices,
): bool,
vol.Optional(
CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT]
): cv.multi_select(clients_to_block),
}
),
)
async def async_step_device_tracker(self, user_input=None):
"""Manage the device tracker options."""
if user_input is not None:
self.options.update(user_input)
return await self.async_step_client_control()
ssids = (
set(self.controller.api.wlans)
| {
f"{wlan.name}{wlan.name_combine_suffix}"
for wlan in self.controller.api.wlans.values()
if not wlan.name_combine_enabled
}
| {
wlan["name"]
for ap in self.controller.api.devices.values()
for wlan in ap.wlan_overrides
if "name" in wlan
}
)
ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))}
return self.async_show_form(
step_id="device_tracker",
data_schema=vol.Schema(
{
vol.Optional(
CONF_TRACK_CLIENTS,
default=self.controller.option_track_clients,
): bool,
vol.Optional(
CONF_TRACK_WIRED_CLIENTS,
default=self.controller.option_track_wired_clients,
): bool,
vol.Optional(
CONF_TRACK_DEVICES,
default=self.controller.option_track_devices,
): bool,
vol.Optional(
CONF_SSID_FILTER, default=self.controller.option_ssid_filter
): cv.multi_select(ssid_filter),
vol.Optional(
CONF_DETECTION_TIME,
default=int(
self.controller.option_detection_time.total_seconds()
),
): int,
vol.Optional(
CONF_IGNORE_WIRED_BUG,
default=self.controller.option_ignore_wired_bug,
): bool,
}
),
)
async def async_step_client_control(self, user_input=None):
"""Manage configuration of network access controlled clients."""
errors = {}
if user_input is not None:
self.options.update(user_input)
return await self.async_step_statistics_sensors()
clients_to_block = {}
for client in self.controller.api.clients.values():
clients_to_block[
client.mac
] = f"{client.name or client.hostname} ({client.mac})"
return self.async_show_form(
step_id="client_control",
data_schema=vol.Schema(
{
vol.Optional(
CONF_BLOCK_CLIENT, default=self.options[CONF_BLOCK_CLIENT]
): cv.multi_select(clients_to_block),
vol.Optional(
CONF_POE_CLIENTS,
default=self.options.get(CONF_POE_CLIENTS, DEFAULT_POE_CLIENTS),
): bool,
}
),
errors=errors,
)
async def async_step_statistics_sensors(self, user_input=None):
"""Manage the statistics sensors options."""
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
return self.async_show_form(
step_id="statistics_sensors",
data_schema=vol.Schema(
{
vol.Optional(
CONF_ALLOW_BANDWIDTH_SENSORS,
default=self.controller.option_allow_bandwidth_sensors,
): bool,
vol.Optional(
CONF_ALLOW_UPTIME_SENSORS,
default=self.controller.option_allow_uptime_sensors,
): bool,
}
),
)
async def _update_options(self):
"""Update config entry options."""
return self.async_create_entry(title="", data=self.options)
async def async_discover_unifi(hass):
"""Discover UniFi address."""
try:
return await hass.async_add_executor_job(socket.gethostbyname, "unifi")
except socket.gaierror:
return None
|
import os
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import ycsb
from perfkitbenchmarker.providers.aws import aws_dynamodb
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'aws_dynamodb_ycsb'
BENCHMARK_CONFIG = """
aws_dynamodb_ycsb:
description: >
Run YCSB against AWS DynamoDB.
Configure the number of VMs via --ycsb_client_vms.
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 1"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['default']['vm_count'] = FLAGS.ycsb_client_vms
return config
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Args:
benchmark_config: Unused.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
del benchmark_config
ycsb.CheckPrerequisites()
def Prepare(benchmark_spec):
"""Install YCSB on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
benchmark_spec.always_call_cleanup = True
benchmark_spec.dynamodb_instance = aws_dynamodb.AwsDynamoDBInstance(
table_name='pkb-{0}'.format(FLAGS.run_uri))
benchmark_spec.dynamodb_instance.Create()
vms = benchmark_spec.vms
# Install required packages.
vm_util.RunThreaded(_Install, vms)
benchmark_spec.executor = ycsb.YCSBExecutor('dynamodb')
def Run(benchmark_spec):
"""Run YCSB on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
run_kwargs = {
'dynamodb.awsCredentialsFile': GetRemoteVMCredentialsFullPath(vms[0]),
'dynamodb.primaryKey': FLAGS.aws_dynamodb_primarykey,
'dynamodb.endpoint': benchmark_spec.dynamodb_instance.GetEndPoint(),
'table': 'pkb-{0}'.format(FLAGS.run_uri),
}
if FLAGS.aws_dynamodb_use_sort:
run_kwargs.update({'dynamodb.primaryKeyType': 'HASH_AND_RANGE',
'aws_dynamodb_connectMax': FLAGS.aws_dynamodb_connectMax,
'dynamodb.hashKeyName': FLAGS.aws_dynamodb_primarykey,
'dynamodb.primaryKey': FLAGS.aws_dynamodb_sortkey})
if FLAGS.aws_dynamodb_ycsb_consistentReads:
run_kwargs.update({'dynamodb.consistentReads': 'true'})
load_kwargs = run_kwargs.copy()
if FLAGS['ycsb_preload_threads'].present:
load_kwargs['threads'] = FLAGS.ycsb_preload_threads
samples = list(benchmark_spec.executor.LoadAndRun(
vms, load_kwargs=load_kwargs, run_kwargs=run_kwargs))
benchmark_metadata = {
'ycsb_client_vms': len(vms),
}
for sample in samples:
sample.metadata.update(
benchmark_spec.dynamodb_instance.GetResourceMetadata())
sample.metadata.update(benchmark_metadata)
return samples
def Cleanup(benchmark_spec):
"""Cleanup YCSB on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
benchmark_spec.dynamodb_instance.Delete()
def GetRemoteVMCredentialsFullPath(vm):
"""Returns the full path for first AWS credentials file found."""
home_dir, _ = vm.RemoteCommand('echo ~')
search_path = os.path.join(home_dir.rstrip('\n'),
FLAGS.aws_credentials_remote_path)
result, _ = vm.RemoteCommand('grep -irl "key" {0}'.format(search_path))
return result.strip('\n').split('\n')[0]
def _Install(vm):
"""Install YCSB on client 'vm'."""
vm.Install('ycsb')
# copy AWS creds
vm.Install('aws_credentials')
# aws credentials file format to ycsb recognized format
vm.RemoteCommand('sed -i "s/aws_access_key_id/accessKey/g" {0}'.format(
GetRemoteVMCredentialsFullPath(vm)))
vm.RemoteCommand('sed -i "s/aws_secret_access_key/secretKey/g" {0}'.format(
GetRemoteVMCredentialsFullPath(vm)))
|
from typing import Optional
def icon_for_battery_level(
battery_level: Optional[int] = None, charging: bool = False
) -> str:
"""Return a battery icon valid identifier."""
icon = "mdi:battery"
if battery_level is None:
return f"{icon}-unknown"
if charging and battery_level > 10:
icon += "-charging-{}".format(int(round(battery_level / 20 - 0.01)) * 20)
elif charging:
icon += "-outline"
elif battery_level <= 5:
icon += "-alert"
elif 5 < battery_level < 95:
icon += "-{}".format(int(round(battery_level / 10 - 0.01)) * 10)
return icon
def icon_for_signal_level(signal_level: Optional[int] = None) -> str:
"""Return a signal icon valid identifier."""
if signal_level is None or signal_level == 0:
return "mdi:signal-cellular-outline"
if signal_level > 70:
return "mdi:signal-cellular-3"
if signal_level > 30:
return "mdi:signal-cellular-2"
return "mdi:signal-cellular-1"
|
import sys
import cProfile
import os.path
import os
import tempfile
import subprocess
import shutil
import argparse
import shlex
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
import qutebrowser.qutebrowser
def parse_args():
"""Parse commandline arguments.
Return:
A (namespace, remaining_args) tuple from argparse.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--profile-tool', metavar='TOOL',
action='store', choices=['kcachegrind', 'snakeviz',
'gprof2dot', 'tuna', 'none'],
default='snakeviz',
help="The tool to use to view the profiling data")
parser.add_argument('--profile-file', metavar='FILE', action='store',
default="profile_data",
help="The filename to use with --profile-tool=none")
parser.add_argument('--profile-test', action='store_true',
help="Run pytest instead of qutebrowser")
return parser.parse_known_args()
def main():
args, remaining = parse_args()
tempdir = tempfile.mkdtemp()
if args.profile_tool == 'none':
profilefile = os.path.join(os.getcwd(), args.profile_file)
else:
profilefile = os.path.join(tempdir, 'profile')
sys.argv = [sys.argv[0]] + remaining
profiler = cProfile.Profile()
if args.profile_test:
import pytest
profiler.runcall(pytest.main)
else:
profiler.runcall(qutebrowser.qutebrowser.main)
# If we have an exception after here, we don't want the qutebrowser
# exception hook to take over.
sys.excepthook = sys.__excepthook__
profiler.dump_stats(profilefile)
if args.profile_tool == 'none':
print("Profile data written to {}".format(profilefile))
elif args.profile_tool == 'gprof2dot':
# yep, shell=True. I know what I'm doing.
subprocess.run(
'gprof2dot -f pstats {} | dot -Tpng | feh -F -'.format(
shlex.quote(profilefile)), shell=True, check=True)
elif args.profile_tool == 'kcachegrind':
callgraphfile = os.path.join(tempdir, 'callgraph')
subprocess.run(['pyprof2calltree', '-k', '-i', profilefile,
'-o', callgraphfile], check=True)
elif args.profile_tool == 'snakeviz':
subprocess.run(['snakeviz', profilefile], check=True)
elif args.profile_tool == 'tuna':
subprocess.run(['tuna', profilefile], check=True)
shutil.rmtree(tempdir)
if __name__ == '__main__':
main()
|
import pytest
from kombu.utils.time import maybe_s_to_ms
@pytest.mark.parametrize('input,expected', [
(3, 3000),
(3.0, 3000),
(303, 303000),
(303.33, 303330),
(303.333, 303333),
(303.3334, 303333),
(None, None),
(0, 0),
])
def test_maybe_s_to_ms(input, expected):
ret = maybe_s_to_ms(input)
if expected is None:
assert ret is None
else:
assert ret == expected
|
from lark import Lark
parser = Lark(r"""
start: _NL? section+
section: "[" NAME "]" _NL item+
item: NAME "=" VALUE? _NL
VALUE: /./+
%import common.CNAME -> NAME
%import common.NEWLINE -> _NL
%import common.WS_INLINE
%ignore WS_INLINE
""", parser="earley")
def test():
sample_conf = """
[bla]
a=Hello
this="that",4
empty=
"""
r = parser.parse(sample_conf)
print (r.pretty())
if __name__ == '__main__':
test()
|
from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext.TermDocMatrixFromFrequencies import TermDocMatrixFromFrequencies
class TestTermDocMatrixFromFrequencies(TestCase):
def test_build(self):
term_freq_df = pd.DataFrame({
'term': ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'],
'A': [6, 3, 3, 3, 5, 0, 0],
'B': [6, 3, 3, 3, 5, 1, 1],
}).set_index('term')[['A', 'B']]
term_doc_mat = TermDocMatrixFromFrequencies(term_freq_df).build()
self.assertEqual(list(term_doc_mat.get_categories()), ['A', 'B'])
self.assertEqual(list(term_doc_mat.get_terms()),
['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'])
np.testing.assert_array_equal(term_freq_df.values,
term_doc_mat.get_term_freq_df().values)
|
import os.path as op
import mne
import numpy as np
from mne import find_events, fit_dipole
from mne.datasets.brainstorm import bst_phantom_elekta
from mne.io import read_raw_fif
print(__doc__)
###############################################################################
# Plot the phantom data, lowpassed to get rid of high-frequency artifacts.
# We also crop to a single 10-second segment for speed.
# Notice that there are two large flux jumps on channel 1522 that could
# spread to other channels when performing subsequent spatial operations
# (e.g., Maxwell filtering, SSP, or ICA).
dipole_number = 1
data_path = bst_phantom_elekta.data_path()
raw = read_raw_fif(
op.join(data_path, 'kojak_all_200nAm_pp_no_chpi_no_ms_raw.fif'))
raw.crop(40., 50.).load_data()
order = list(range(160, 170))
raw.copy().filter(0., 40.).plot(order=order, n_channels=10)
###############################################################################
# Now we can clean the data with OTP, lowpass, and plot. The flux jumps have
# been suppressed alongside the random sensor noise.
raw_clean = mne.preprocessing.oversampled_temporal_projection(raw)
raw_clean.filter(0., 40.)
raw_clean.plot(order=order, n_channels=10)
###############################################################################
# We can also look at the effect on single-trial phantom localization.
# See the :ref:`tut-brainstorm-elekta-phantom`
# for more information. Here we use a version that does single-trial
# localization across the 17 trials are in our 10-second window:
def compute_bias(raw):
events = find_events(raw, 'STI201', verbose=False)
events = events[1:] # first one has an artifact
tmin, tmax = -0.2, 0.1
epochs = mne.Epochs(raw, events, dipole_number, tmin, tmax,
baseline=(None, -0.01), preload=True, verbose=False)
sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=None,
verbose=False)
cov = mne.compute_covariance(epochs, tmax=0, method='oas',
rank=None, verbose=False)
idx = epochs.time_as_index(0.036)[0]
data = epochs.get_data()[:, :, idx].T
evoked = mne.EvokedArray(data, epochs.info, tmin=0.)
dip = fit_dipole(evoked, cov, sphere, n_jobs=1, verbose=False)[0]
actual_pos = mne.dipole.get_phantom_dipoles()[0][dipole_number - 1]
misses = 1000 * np.linalg.norm(dip.pos - actual_pos, axis=-1)
return misses
bias = compute_bias(raw)
print('Raw bias: %0.1fmm (worst: %0.1fmm)'
% (np.mean(bias), np.max(bias)))
bias_clean = compute_bias(raw_clean)
print('OTP bias: %0.1fmm (worst: %0.1fmm)'
% (np.mean(bias_clean), np.max(bias_clean),))
###############################################################################
# References
# ----------
# .. [1] Larson E, Taulu S (2017). Reducing Sensor Noise in MEG and EEG
# Recordings Using Oversampled Temporal Projection.
# IEEE Transactions on Biomedical Engineering.
|
from datetime import timedelta
import logging
import socket
from telnetlib import Telnet
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DISKS,
CONF_HOST,
CONF_NAME,
CONF_PORT,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_DEVICE = "device"
ATTR_MODEL = "model"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 7634
DEFAULT_NAME = "HD Temperature"
DEFAULT_TIMEOUT = 5
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DISKS, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HDDTemp sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
disks = config.get(CONF_DISKS)
hddtemp = HddTempData(host, port)
hddtemp.update()
if not disks:
disks = [next(iter(hddtemp.data)).split("|")[0]]
dev = []
for disk in disks:
dev.append(HddTempSensor(name, disk, hddtemp))
add_entities(dev, True)
class HddTempSensor(Entity):
"""Representation of a HDDTemp sensor."""
def __init__(self, name, disk, hddtemp):
"""Initialize a HDDTemp sensor."""
self.hddtemp = hddtemp
self.disk = disk
self._name = f"{name} {disk}"
self._state = None
self._details = None
self._unit = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._details is not None:
return {ATTR_DEVICE: self._details[0], ATTR_MODEL: self._details[1]}
def update(self):
"""Get the latest data from HDDTemp daemon and updates the state."""
self.hddtemp.update()
if self.hddtemp.data and self.disk in self.hddtemp.data:
self._details = self.hddtemp.data[self.disk].split("|")
self._state = self._details[2]
if self._details is not None and self._details[3] == "F":
self._unit = TEMP_FAHRENHEIT
else:
self._unit = TEMP_CELSIUS
else:
self._state = None
class HddTempData:
"""Get the latest data from HDDTemp and update the states."""
def __init__(self, host, port):
"""Initialize the data object."""
self.host = host
self.port = port
self.data = None
def update(self):
"""Get the latest data from HDDTemp running as daemon."""
try:
connection = Telnet(host=self.host, port=self.port, timeout=DEFAULT_TIMEOUT)
data = (
connection.read_all()
.decode("ascii")
.lstrip("|")
.rstrip("|")
.split("||")
)
self.data = {data[i].split("|")[0]: data[i] for i in range(0, len(data), 1)}
except ConnectionRefusedError:
_LOGGER.error("HDDTemp is not available at %s:%s", self.host, self.port)
self.data = None
except socket.gaierror:
_LOGGER.error("HDDTemp host not found %s:%s", self.host, self.port)
self.data = None
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from aurora import AuroraCollector
###############################################################################
class TestAuroraCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('AuroraCollector', {})
self.collector = AuroraCollector(config, None)
def test_import(self):
self.assertTrue(AuroraCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
def se(url):
if url == 'http://localhost:8081/vars':
return self.getFixture('metrics')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = self.get_metrics()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('metrics_blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
def get_metrics(self):
return {
'async.tasks.completed': 11117.0,
'attribute.store.fetch.all.events': 24.0,
'attribute.store.fetch.all.events.per.sec': 0.0,
'attribute.store.fetch.all.nanos.per.event': 0.0,
'attribute.store.fetch.all.nanos.total': 90208119.0,
'attribute.store.fetch.all.nanos.total.per.sec': 0.0,
'attribute.store.fetch.one.events': 33024.0,
'tasks.FAILED.computers.prod.computer-traffic-analysis': 517.0,
'tasks.FAILED.reporting.prod.report-processing': 2.0
}
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from __future__ import division
from math import cos, log10, pi, sin
class Margin(object):
"""Class reprensenting a margin (top, right, left, bottom)"""
def __init__(self, top, right, bottom, left):
"""Create the margin object from the top, right, left, bottom margin"""
self.top = top
self.right = right
self.bottom = bottom
self.left = left
@property
def x(self):
"""Helper for total x margin"""
return self.left + self.right
@property
def y(self):
"""Helper for total y margin"""
return self.top + self.bottom
class Box(object):
"""Chart boundings"""
margin = .02
def __init__(self, xmin=0, ymin=0, xmax=1, ymax=1):
"""
Create the chart bounds with min max horizontal
and vertical values
"""
self._xmin = xmin
self._ymin = ymin
self._xmax = xmax
self._ymax = ymax
def set_polar_box(self, rmin=0, rmax=1, tmin=0, tmax=2 * pi):
"""Helper for polar charts"""
self._rmin = rmin
self._rmax = rmax
self._tmin = tmin
self._tmax = tmax
self.xmin = self.ymin = rmin - rmax
self.xmax = self.ymax = rmax - rmin
@property
def xmin(self):
"""X minimum getter"""
return self._xmin
@xmin.setter
def xmin(self, value):
"""X minimum setter"""
if value is not None:
self._xmin = value
@property
def ymin(self):
"""Y minimum getter"""
return self._ymin
@ymin.setter
def ymin(self, value):
"""Y minimum setter"""
if value is not None:
self._ymin = value
@property
def xmax(self):
"""X maximum getter"""
return self._xmax
@xmax.setter
def xmax(self, value):
"""X maximum setter"""
if value is not None:
self._xmax = value
@property
def ymax(self):
"""Y maximum getter"""
return self._ymax
@ymax.setter
def ymax(self, value):
"""Y maximum setter"""
if value or self.ymin:
self._ymax = value
@property
def width(self):
"""Helper for box width"""
return self.xmax - self.xmin
@property
def height(self):
"""Helper for box height"""
return self.ymax - self.ymin
def swap(self):
"""Return the box (for horizontal graphs)"""
self.xmin, self.ymin = self.ymin, self.xmin
self.xmax, self.ymax = self.ymax, self.xmax
def fix(self, with_margin=True):
"""Correct box when no values and take margin in account"""
if not self.width:
self.xmax = self.xmin + 1
if not self.height:
self.ymin /= 2
self.ymax += self.ymin
xmargin = self.margin * self.width
self.xmin -= xmargin
self.xmax += xmargin
if with_margin:
ymargin = self.margin * self.height
self.ymin -= ymargin
self.ymax += ymargin
class View(object):
"""Projection base class"""
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self.width = width
self.height = height
self.box = box
self.box.fix()
def x(self, x):
"""Project x"""
if x is None:
return None
return self.width * (x - self.box.xmin) / self.box.width
def y(self, y):
"""Project y"""
if y is None:
return None
return (
self.height - self.height * (y - self.box.ymin) / self.box.height
)
def __call__(self, xy):
"""Project x and y"""
x, y = xy
return (self.x(x), self.y(y))
class ReverseView(View):
"""Same as view but reversed vertically"""
def y(self, y):
"""Project reversed y"""
if y is None:
return None
return (self.height * (y - self.box.ymin) / self.box.height)
class HorizontalView(View):
"""Same as view but transposed"""
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self._force_vertical = None
self.width = width
self.height = height
self.box = box
self.box.fix()
self.box.swap()
def x(self, x):
"""Project x as y"""
if x is None:
return None
if self._force_vertical:
return super(HorizontalView, self).x(x)
return super(HorizontalView, self).y(x)
def y(self, y):
"""Project y as x"""
if y is None:
return None
if self._force_vertical:
return super(HorizontalView, self).y(y)
return super(HorizontalView, self).x(y)
class PolarView(View):
"""Polar projection for pie like graphs"""
def __call__(self, rhotheta):
"""Project rho and theta"""
if None in rhotheta:
return None, None
rho, theta = rhotheta
return super(PolarView,
self).__call__((rho * cos(theta), rho * sin(theta)))
class PolarLogView(View):
"""Logarithmic polar projection"""
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
super(PolarLogView, self).__init__(width, height, box)
if not hasattr(box, '_rmin') or not hasattr(box, '_rmax'):
raise Exception(
'Box must be set with set_polar_box for polar charts'
)
self.log10_rmax = log10(self.box._rmax)
self.log10_rmin = log10(self.box._rmin)
if self.log10_rmin == self.log10_rmax:
self.log10_rmax = self.log10_rmin + 1
def __call__(self, rhotheta):
"""Project rho and theta"""
if None in rhotheta:
return None, None
rho, theta = rhotheta
# Center case
if rho == 0:
return super(PolarLogView, self).__call__((0, 0))
rho = (self.box._rmax - self.box._rmin) * (
log10(rho) - self.log10_rmin
) / (self.log10_rmax - self.log10_rmin)
return super(PolarLogView,
self).__call__((rho * cos(theta), rho * sin(theta)))
class PolarThetaView(View):
"""Logarithmic polar projection"""
def __init__(self, width, height, box, aperture=pi / 3):
"""Create the view with a width an height and a box bounds"""
super(PolarThetaView, self).__init__(width, height, box)
self.aperture = aperture
if not hasattr(box, '_tmin') or not hasattr(box, '_tmax'):
raise Exception(
'Box must be set with set_polar_box for polar charts'
)
def __call__(self, rhotheta):
"""Project rho and theta"""
if None in rhotheta:
return None, None
rho, theta = rhotheta
start = 3 * pi / 2 + self.aperture / 2
theta = start + (2 * pi - self.aperture) * (theta - self.box._tmin) / (
self.box._tmax - self.box._tmin
)
return super(PolarThetaView,
self).__call__((rho * cos(theta), rho * sin(theta)))
class PolarThetaLogView(View):
"""Logarithmic polar projection"""
def __init__(self, width, height, box, aperture=pi / 3):
"""Create the view with a width an height and a box bounds"""
super(PolarThetaLogView, self).__init__(width, height, box)
self.aperture = aperture
if not hasattr(box, '_tmin') or not hasattr(box, '_tmax'):
raise Exception(
'Box must be set with set_polar_box for polar charts'
)
self.log10_tmax = log10(self.box._tmax) if self.box._tmax > 0 else 0
self.log10_tmin = log10(self.box._tmin) if self.box._tmin > 0 else 0
if self.log10_tmin == self.log10_tmax:
self.log10_tmax = self.log10_tmin + 1
def __call__(self, rhotheta):
"""Project rho and theta"""
if None in rhotheta:
return None, None
rho, theta = rhotheta
# Center case
if theta == 0:
return super(PolarThetaLogView, self).__call__((0, 0))
theta = self.box._tmin + (self.box._tmax - self.box._tmin) * (
log10(theta) - self.log10_tmin
) / (self.log10_tmax - self.log10_tmin)
start = 3 * pi / 2 + self.aperture / 2
theta = start + (2 * pi - self.aperture) * (theta - self.box._tmin) / (
self.box._tmax - self.box._tmin
)
return super(PolarThetaLogView,
self).__call__((rho * cos(theta), rho * sin(theta)))
class LogView(View):
"""Y Logarithmic projection"""
# Do not want to call the parent here
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self.width = width
self.height = height
self.box = box
self.log10_ymax = log10(self.box.ymax) if self.box.ymax > 0 else 0
self.log10_ymin = log10(self.box.ymin) if self.box.ymin > 0 else 0
if self.log10_ymin == self.log10_ymax:
self.log10_ymax = self.log10_ymin + 1
self.box.fix(False)
def y(self, y):
"""Project y"""
if y is None or y <= 0 or self.log10_ymax - self.log10_ymin == 0:
return 0
return (
self.height - self.height * (log10(y) - self.log10_ymin) /
(self.log10_ymax - self.log10_ymin)
)
class XLogView(View):
"""X logarithmic projection"""
# Do not want to call the parent here
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self.width = width
self.height = height
self.box = box
self.log10_xmax = log10(self.box.xmax) if self.box.xmax > 0 else 0
self.log10_xmin = log10(self.box.xmin) if self.box.xmin > 0 else 0
self.box.fix(False)
def x(self, x):
"""Project x"""
if x is None or x <= 0 or self.log10_xmax - self.log10_xmin == 0:
return None
return (
self.width * (log10(x) - self.log10_xmin) /
(self.log10_xmax - self.log10_xmin)
)
class XYLogView(XLogView, LogView):
"""X and Y logarithmic projection"""
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self.width = width
self.height = height
self.box = box
self.log10_ymax = log10(self.box.ymax) if self.box.ymax > 0 else 0
self.log10_ymin = log10(self.box.ymin) if self.box.ymin > 0 else 0
self.log10_xmax = log10(self.box.xmax) if self.box.xmax > 0 else 0
self.log10_xmin = log10(self.box.xmin) if self.box.xmin > 0 else 0
self.box.fix(False)
class HorizontalLogView(XLogView):
"""Transposed Logarithmic projection"""
# Do not want to call the parent here
def __init__(self, width, height, box):
"""Create the view with a width an height and a box bounds"""
self._force_vertical = None
self.width = width
self.height = height
self.box = box
self.log10_xmax = log10(self.box.ymax) if self.box.ymax > 0 else 0
self.log10_xmin = log10(self.box.ymin) if self.box.ymin > 0 else 0
if self.log10_xmin == self.log10_xmax:
self.log10_xmax = self.log10_xmin + 1
self.box.fix(False)
self.box.swap()
def x(self, x):
"""Project x as y"""
if x is None:
return None
if self._force_vertical:
return super(HorizontalLogView, self).x(x)
return super(XLogView, self).y(x)
def y(self, y):
"""Project y as x"""
if y is None:
return None
if self._force_vertical:
return super(XLogView, self).y(y)
return super(HorizontalLogView, self).x(y)
|
import os
import fnmatch
import tempfile
from molecule import logger
from molecule import scenarios
from molecule import util
LOG = logger.get_logger(__name__)
class Scenario(object):
"""
A scenario allows Molecule test a role in a particular way, this is a
fundamental change from Molecule v1.
A scenario is a self-contained directory containing everything necessary
for testing the role in a particular way. The default scenario is named
``default``, and every role should contain a default scenario.
Unless mentioned explicitly, the scenario name will be the directory name
hosting the files.
Any option set in this section will override the defaults.
.. code-block:: yaml
scenario:
name: default # optional
create_sequence:
- dependency
- create
- prepare
check_sequence:
- dependency
- cleanup
- destroy
- create
- prepare
- converge
- check
- destroy
converge_sequence:
- dependency
- create
- prepare
- converge
destroy_sequence:
- dependency
- cleanup
- destroy
test_sequence:
- lint
- dependency
- cleanup
- destroy
- syntax
- create
- prepare
- converge
- idempotence
- side_effect
- verify
- cleanup
- destroy
""" # noqa
def __init__(self, config):
"""
Initialize a new scenario class and returns None.
:param config: An instance of a Molecule config.
:return: None
"""
self.config = config
self._setup()
def prune(self):
"""
Prune the scenario ephemeral directory files and returns None.
"safe files" will not be pruned, including the ansible configuration
and inventory used by this scenario, the scenario state file, and
files declared as "safe_files" in the ``driver`` configuration
declared in ``molecule.yml``.
:return: None
"""
LOG.info('Pruning extra files from scenario ephemeral directory')
safe_files = [
self.config.provisioner.config_file,
self.config.provisioner.inventory_file,
self.config.state.state_file,
] + self.config.driver.safe_files
files = util.os_walk(self.ephemeral_directory, '*')
for f in files:
if not any(sf for sf in safe_files if fnmatch.fnmatch(f, sf)):
os.remove(f)
# Remove empty directories.
for dirpath, dirs, files in os.walk(
self.ephemeral_directory, topdown=False):
if not dirs and not files:
os.removedirs(dirpath)
@property
def name(self):
return self.config.config['scenario']['name']
@property
def directory(self):
return os.path.dirname(self.config.molecule_file)
@property
def ephemeral_directory(self):
project_directory = os.path.basename(self.config.project_directory)
scenario_name = self.name
project_scenario_directory = os.path.join(
'molecule', project_directory, scenario_name)
path = ephemeral_directory(project_scenario_directory)
return ephemeral_directory(path)
@property
def inventory_directory(self):
return os.path.join(self.ephemeral_directory, "inventory")
@property
def check_sequence(self):
return self.config.config['scenario']['check_sequence']
@property
def cleanup_sequence(self):
return self.config.config['scenario']['cleanup_sequence']
@property
def converge_sequence(self):
return self.config.config['scenario']['converge_sequence']
@property
def create_sequence(self):
return self.config.config['scenario']['create_sequence']
@property
def dependency_sequence(self):
return ['dependency']
@property
def destroy_sequence(self):
return self.config.config['scenario']['destroy_sequence']
@property
def idempotence_sequence(self):
return ['idempotence']
@property
def lint_sequence(self):
return ['lint']
@property
def prepare_sequence(self):
return ['prepare']
@property
def side_effect_sequence(self):
return ['side_effect']
@property
def syntax_sequence(self):
return ['syntax']
@property
def test_sequence(self):
return self.config.config['scenario']['test_sequence']
@property
def verify_sequence(self):
return ['verify']
@property
def sequence(self):
"""
Select the sequence based on scenario and subcommand of the provided
scenario object and returns a list.
:param scenario: A scenario object.
:param skipped: An optional bool to include skipped scenarios.
:return: list
"""
s = scenarios.Scenarios([self.config])
matrix = s._get_matrix()
try:
return matrix[self.name][self.config.subcommand]
except KeyError:
# TODO(retr0h): May change this handling in the future.
return []
def _setup(self):
"""
Prepare the scenario for Molecule and returns None.
:return: None
"""
if not os.path.isdir(self.inventory_directory):
os.makedirs(self.inventory_directory)
def ephemeral_directory(path):
d = os.getenv('MOLECULE_EPHEMERAL_DIRECTORY')
if d:
return os.path.join(tempfile.gettempdir(), d)
return os.path.join(tempfile.gettempdir(), path)
|
import pytest
import sys
from plumbum import cli, local
from plumbum.cli.terminal import get_terminal_size
class SimpleApp(cli.Application):
@cli.switch(["a"])
def spam(self):
print("!!a")
@cli.switch(["b", "bacon"], argtype=int, mandatory = True, envname="PLUMBUM_TEST_BACON")
def bacon(self, param):
"""give me some bacon"""
print ("!!b", param)
eggs = cli.SwitchAttr(["e"], str, help = "sets the eggs attribute", envname="PLUMBUM_TEST_EGGS")
cheese = cli.Flag(["--cheese"], help = "cheese, please")
chives = cli.Flag(["--chives"], help = "chives, instead")
verbose = cli.CountOf(["v"], help = "increases the verbosity level")
benedict = cli.CountOf(["--benedict"], help = """a very long help message with lots of
useless information that nobody would ever want to read, but heck, we need to test
text wrapping in help messages as well""")
csv = cli.SwitchAttr(["--csv"], cli.Set("MIN", "MAX", int, csv=True))
num = cli.SwitchAttr(["--num"], cli.Set("MIN", "MAX", int))
def main(self, *args):
old = self.eggs
self.eggs = "lalala"
self.eggs = old
self.tailargs = args
class PositionalApp(cli.Application):
def main(self, one):
print("Got", one)
class Geet(cli.Application):
debug = cli.Flag("--debug")
cleanups = []
def main(self):
del self.cleanups[:]
print ("hi this is geet main")
def cleanup(self, retcode):
self.cleanups.append(1)
print("geet cleaning up with rc = %s" % (retcode,))
@Geet.subcommand("add")
class GeetAdd(cli.Application):
def main(self, *files):
return "adding", files
@Geet.subcommand("commit")
class GeetCommit(cli.Application):
message = cli.Flag("-m", str)
def main(self):
if self.parent.debug:
return "committing in debug"
else:
return "committing"
def cleanup(self, retcode):
self.parent.cleanups.append(2)
print("geet commit cleaning up with rc = %s" % (retcode,))
class Sample(cli.Application):
DESCRIPTION = "A sample cli application"
DESCRIPTION_MORE = '''
ABC This is just a sample help text typed with a Dvorak keyboard.
Although this paragraph is not left or right justified
in source, we expect it to appear
formatted nicely on the output, maintaining the indentation of the first line.
DEF this one has a different indentation.
Let's test that list items are not combined as paragraphs.
- Item 1
GHI more text for item 1, which may be very very very very very very long and even more long and long and long to
prove that we can actually wrap list items as well.
- Item 2 and this is
some text for item 2
- Item 3
List items with invisible bullets should be printed without the bullet.
/XYZ Invisible 1
/Invisible 2
* Star 1
* Star 2
Last paragraph can fill more than one line on the output as well. So many features is bound to cause lots of bugs.
Oh well...
'''
foo = cli.SwitchAttr("--foo")
Sample.unbind_switches("--version")
class Mumble(cli.Application):
pass
Sample.subcommand("mumble", Mumble)
class LazyLoaded(cli.Application):
def main(self):
print("hello world")
class AppA(cli.Application):
@cli.switch(['--one'])
def one(self):
pass
two = cli.SwitchAttr(['--two'])
class AppB(AppA):
@cli.switch(['--three'])
def three(self):
pass
four = cli.SwitchAttr(['--four'])
def main(self):
pass
# Testing #363
class TestInheritedApp:
def test_help(self, capsys):
_, rc = AppB.run(["AppB", "-h"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "--one" in stdout
assert "--two" in stdout
assert "--three" in stdout
assert "--four" in stdout
class TestCLI:
def test_meta_switches(self):
_, rc = SimpleApp.run(["foo", "-h"], exit = False)
assert rc == 0
_, rc = SimpleApp.run(["foo", "--version"], exit = False)
assert rc == 0
def test_okay(self):
_, rc = SimpleApp.run(["foo", "--bacon=81"], exit = False)
assert rc == 0
inst, rc = SimpleApp.run(["foo", "--bacon=81", "-a", "-v", "-e", "7", "-vv",
"--", "lala", "-e", "7"], exit = False)
assert rc == 0
assert inst.eggs == "7"
_, rc = SimpleApp.run(["foo", "--bacon=81", "--csv=100"], exit = False)
assert rc == 0
_, rc = SimpleApp.run(["foo", "--bacon=81", "--csv=MAX,MIN,100"], exit = False)
assert rc == 0
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=100"], exit = False)
assert rc == 0
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=MAX"], exit = False)
assert rc == 0
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=MIN"], exit = False)
assert rc == 0
def test_failures(self):
_, rc = SimpleApp.run(["foo"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=81", "--csv=xx"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=81", "--csv=xx"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=MOO"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=MIN,MAX"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=81", "--num=10.5"], exit = False)
assert rc == 2
_, rc = SimpleApp.run(["foo", "--bacon=hello"], exit = False)
assert rc == 2
# Testing #371
def test_extra_args(self, capsys):
_, rc = PositionalApp.run(["positionalapp"], exit = False)
assert rc != 0
stdout, stderr = capsys.readouterr()
assert "Expected at least" in stdout
_, rc = PositionalApp.run(["positionalapp", "one"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
_, rc = PositionalApp.run(["positionalapp", "one", "two"], exit = False)
assert rc != 0
stdout, stderr = capsys.readouterr()
assert "Expected at most" in stdout
def test_subcommands(self):
_, rc = Geet.run(["geet", "--debug"], exit = False)
assert rc == 0
assert Geet.cleanups == [1]
_, rc = Geet.run(["geet", "--debug", "add", "foo.txt", "bar.txt"], exit = False)
assert rc == ("adding", ("foo.txt", "bar.txt"))
assert Geet.cleanups == [1]
_, rc = Geet.run(["geet", "--debug", "commit"], exit = False)
assert rc == "committing in debug"
assert Geet.cleanups == [2, 1]
_, rc = Geet.run(["geet", "--help"], exit = False)
assert rc == 0
_, rc = Geet.run(["geet", "commit", "--help"], exit = False)
assert rc == 0
assert Geet.cleanups == [1]
def test_help_all(self, capsys):
_, rc = Geet.run(["geet", "--help-all"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "--help-all" in stdout
assert "geet add" in stdout
assert "geet commit" in stdout
def test_unbind(self, capsys):
_, rc = Sample.run(["sample", "--help"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "--foo" in stdout
assert "--version" not in stdout
def test_description(self, capsys):
_, rc = Sample.run(["sample", "--help"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
cols, _ = get_terminal_size()
if cols < 9:
# Terminal is too narrow to test
pass
else:
# Paragraph indentation should be preserved
assert " ABC" in stdout
assert " DEF" in stdout
assert " - Item" in stdout
# List items should not be combined into paragraphs
assert " * Star 2"
# Lines of the same list item should be combined. (The right-hand expression of the 'or' operator
# below is for when the terminal is too narrow, causing "GHI" to be wrapped to the next line.)
assert " GHI" not in stdout or " GHI" in stdout
# List item with invisible bullet should be indented without the bullet
assert " XYZ" in stdout
def test_default_main(self, capsys):
_, rc = Sample.run(["sample"], exit = False)
assert rc == 1
stdout, stderr = capsys.readouterr()
assert "No sub-command given" in stdout
_, rc = Sample.run(["sample", "pimple"], exit = False)
assert rc == 1
stdout, stderr = capsys.readouterr()
assert "Unknown sub-command 'pimple'" in stdout
_, rc = Sample.run(["sample", "mumble"], exit = False)
assert rc == 1
stdout, stderr = capsys.readouterr()
assert "main() not implemented" in stdout
def test_lazy_subcommand(self, capsys):
class Foo(cli.Application):
pass
Foo.subcommand("lazy", "test_cli.LazyLoaded")
_, rc = Foo.run(["foo", "lazy"], exit = False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "hello world" in stdout
def test_reset_switchattr(self):
inst, rc = SimpleApp.run(["foo", "--bacon=81", "-e", "bar"], exit=False)
assert rc == 0
assert inst.eggs == "bar"
inst, rc = SimpleApp.run(["foo", "--bacon=81"], exit=False)
assert rc == 0
assert inst.eggs is None
def test_invoke(self):
inst, rc = SimpleApp.invoke("arg1", "arg2", eggs="sunny", bacon=10, verbose=2)
assert (inst.eggs, inst.verbose, inst.tailargs) == ("sunny", 2, ("arg1", "arg2"))
def test_env_var(self, capsys):
_, rc = SimpleApp.run(["arg", "--bacon=10"], exit=False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "10" in stdout
with local.env(
PLUMBUM_TEST_BACON='20',
PLUMBUM_TEST_EGGS='raw',
):
inst, rc = SimpleApp.run(["arg"], exit=False)
assert rc == 0
stdout, stderr = capsys.readouterr()
assert "20" in stdout
assert inst.eggs == 'raw'
def test_mandatory_env_var(self, capsys):
_, rc = SimpleApp.run(["arg"], exit = False)
assert rc == 2
stdout, stderr = capsys.readouterr()
assert "bacon is mandatory" in stdout
def test_partial_switches(self, capsys):
app = SimpleApp
app.ALLOW_ABBREV = True
inst, rc = app.run(["foo", "--bacon=2", "--ch"], exit=False)
stdout, stderr = capsys.readouterr()
assert 'Ambiguous partial switch' in stdout
assert rc == 2
inst, rc = app.run(["foo", "--bacon=2", "--chee"], exit=False)
assert rc == 0
assert inst.cheese is True
assert inst.chives is False
|
from Handler import Handler
import socket
class StatsiteHandler(Handler):
"""
Implements the abstract Handler class, sending data to statsite
"""
RETRY = 3
def __init__(self, config=None):
"""
Create a new instance of the StatsiteHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
# Initialize Data
self.socket = None
# Initialize Options
self.host = self.config['host']
self.tcpport = int(self.config['tcpport'])
self.udpport = int(self.config['udpport'])
self.timeout = int(self.config['timeout'])
# Connect
self._connect()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(StatsiteHandler, self).get_default_config_help()
config.update({
'host': '',
'tcpport': '',
'udpport': '',
'timeout': '',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(StatsiteHandler, self).get_default_config()
config.update({
'host': '',
'tcpport': 1234,
'udpport': 1234,
'timeout': 5,
})
return config
def __del__(self):
"""
Destroy instance of the StatsiteHandler class
"""
self._close()
def process(self, metric):
"""
Process a metric by sending it to statsite
"""
# Just send the data as a string
self._send(str(metric))
def _send(self, data):
"""
Send data to statsite. Data that can not be sent will be queued.
"""
retry = self.RETRY
# Attempt to send any data in the queue
while retry > 0:
# Check socket
if not self.socket:
# Log Error
self.log.error("StatsiteHandler: Socket unavailable.")
# Attempt to restablish connection
self._connect()
# Decrement retry
retry -= 1
# Try again
continue
try:
# Send data to socket
data = data.split()
data = data[0] + ":" + data[1] + "|kv\n"
self.socket.sendall(data)
# Done
break
except socket.error as e:
# Log Error
self.log.error("StatsiteHandler: Failed sending data. %s.", e)
# Attempt to restablish connection
self._close()
# Decrement retry
retry -= 1
# try again
continue
def _connect(self):
"""
Connect to the statsite server
"""
# Create socket
if self.udpport > 0:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.port = self.udpport
elif self.tcpport > 0:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = self.tcpport
if socket is None:
# Log Error
self.log.error("StatsiteHandler: Unable to create socket.")
# Close Socket
self._close()
return
# Set socket timeout
self.socket.settimeout(self.timeout)
# Connect to statsite server
try:
self.socket.connect((self.host, self.port))
# Log
self.log.debug("Established connection to statsite server %s:%d",
self.host, self.port)
except Exception as ex:
# Log Error
self.log.error("StatsiteHandler: Failed to connect to %s:%i. %s",
self.host, self.port, ex)
# Close Socket
self._close()
return
def _close(self):
"""
Close the socket
"""
if self.socket is not None:
self.socket.close()
self.socket = None
|
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_MESSAGE,
ATTR_TITLE,
DOMAIN as DOMAIN_NOTIFY,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_ENTITY_ID,
CONF_NAME,
CONF_STATE,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_IDLE,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers import event, service
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.util.dt import now
_LOGGER = logging.getLogger(__name__)
DOMAIN = "alert"
CONF_CAN_ACK = "can_acknowledge"
CONF_NOTIFIERS = "notifiers"
CONF_REPEAT = "repeat"
CONF_SKIP_FIRST = "skip_first"
CONF_ALERT_MESSAGE = "message"
CONF_DONE_MESSAGE = "done_message"
CONF_TITLE = "title"
CONF_DATA = "data"
DEFAULT_CAN_ACK = True
DEFAULT_SKIP_FIRST = False
ALERT_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_STATE, default=STATE_ON): cv.string,
vol.Required(CONF_REPEAT): vol.All(cv.ensure_list, [vol.Coerce(float)]),
vol.Required(CONF_CAN_ACK, default=DEFAULT_CAN_ACK): cv.boolean,
vol.Required(CONF_SKIP_FIRST, default=DEFAULT_SKIP_FIRST): cv.boolean,
vol.Optional(CONF_ALERT_MESSAGE): cv.template,
vol.Optional(CONF_DONE_MESSAGE): cv.template,
vol.Optional(CONF_TITLE): cv.template,
vol.Optional(CONF_DATA): dict,
vol.Required(CONF_NOTIFIERS): cv.ensure_list,
}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: cv.schema_with_slug_keys(ALERT_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
ALERT_SERVICE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_ids})
def is_on(hass, entity_id):
"""Return if the alert is firing and not acknowledged."""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass, config):
"""Set up the Alert component."""
entities = []
for object_id, cfg in config[DOMAIN].items():
if not cfg:
cfg = {}
name = cfg[CONF_NAME]
watched_entity_id = cfg[CONF_ENTITY_ID]
alert_state = cfg[CONF_STATE]
repeat = cfg[CONF_REPEAT]
skip_first = cfg[CONF_SKIP_FIRST]
message_template = cfg.get(CONF_ALERT_MESSAGE)
done_message_template = cfg.get(CONF_DONE_MESSAGE)
notifiers = cfg[CONF_NOTIFIERS]
can_ack = cfg[CONF_CAN_ACK]
title_template = cfg.get(CONF_TITLE)
data = cfg.get(CONF_DATA)
entities.append(
Alert(
hass,
object_id,
name,
watched_entity_id,
alert_state,
repeat,
skip_first,
message_template,
done_message_template,
notifiers,
can_ack,
title_template,
data,
)
)
if not entities:
return False
async def async_handle_alert_service(service_call):
"""Handle calls to alert services."""
alert_ids = await service.async_extract_entity_ids(hass, service_call)
for alert_id in alert_ids:
for alert in entities:
if alert.entity_id != alert_id:
continue
alert.async_set_context(service_call.context)
if service_call.service == SERVICE_TURN_ON:
await alert.async_turn_on()
elif service_call.service == SERVICE_TOGGLE:
await alert.async_toggle()
else:
await alert.async_turn_off()
# Setup service calls
hass.services.async_register(
DOMAIN,
SERVICE_TURN_OFF,
async_handle_alert_service,
schema=ALERT_SERVICE_SCHEMA,
)
hass.services.async_register(
DOMAIN, SERVICE_TURN_ON, async_handle_alert_service, schema=ALERT_SERVICE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_TOGGLE, async_handle_alert_service, schema=ALERT_SERVICE_SCHEMA
)
for alert in entities:
alert.async_write_ha_state()
return True
class Alert(ToggleEntity):
"""Representation of an alert."""
def __init__(
self,
hass,
entity_id,
name,
watched_entity_id,
state,
repeat,
skip_first,
message_template,
done_message_template,
notifiers,
can_ack,
title_template,
data,
):
"""Initialize the alert."""
self.hass = hass
self._name = name
self._alert_state = state
self._skip_first = skip_first
self._data = data
self._message_template = message_template
if self._message_template is not None:
self._message_template.hass = hass
self._done_message_template = done_message_template
if self._done_message_template is not None:
self._done_message_template.hass = hass
self._title_template = title_template
if self._title_template is not None:
self._title_template.hass = hass
self._notifiers = notifiers
self._can_ack = can_ack
self._delay = [timedelta(minutes=val) for val in repeat]
self._next_delay = 0
self._firing = False
self._ack = False
self._cancel = None
self._send_done_message = False
self.entity_id = f"{DOMAIN}.{entity_id}"
event.async_track_state_change_event(
hass, [watched_entity_id], self.watched_entity_change
)
@property
def name(self):
"""Return the name of the alert."""
return self._name
@property
def should_poll(self):
"""Home Assistant need not poll these entities."""
return False
@property
def state(self):
"""Return the alert status."""
if self._firing:
if self._ack:
return STATE_OFF
return STATE_ON
return STATE_IDLE
async def watched_entity_change(self, ev):
"""Determine if the alert should start or stop."""
to_state = ev.data.get("new_state")
if to_state is None:
return
_LOGGER.debug("Watched entity (%s) has changed", ev.data.get("entity_id"))
if to_state.state == self._alert_state and not self._firing:
await self.begin_alerting()
if to_state.state != self._alert_state and self._firing:
await self.end_alerting()
async def begin_alerting(self):
"""Begin the alert procedures."""
_LOGGER.debug("Beginning Alert: %s", self._name)
self._ack = False
self._firing = True
self._next_delay = 0
if not self._skip_first:
await self._notify()
else:
await self._schedule_notify()
self.async_write_ha_state()
async def end_alerting(self):
"""End the alert procedures."""
_LOGGER.debug("Ending Alert: %s", self._name)
self._cancel()
self._ack = False
self._firing = False
if self._send_done_message:
await self._notify_done_message()
self.async_write_ha_state()
async def _schedule_notify(self):
"""Schedule a notification."""
delay = self._delay[self._next_delay]
next_msg = now() + delay
self._cancel = event.async_track_point_in_time(
self.hass, self._notify, next_msg
)
self._next_delay = min(self._next_delay + 1, len(self._delay) - 1)
async def _notify(self, *args):
"""Send the alert notification."""
if not self._firing:
return
if not self._ack:
_LOGGER.info("Alerting: %s", self._name)
self._send_done_message = True
if self._message_template is not None:
message = self._message_template.async_render(parse_result=False)
else:
message = self._name
await self._send_notification_message(message)
await self._schedule_notify()
async def _notify_done_message(self, *args):
"""Send notification of complete alert."""
_LOGGER.info("Alerting: %s", self._done_message_template)
self._send_done_message = False
if self._done_message_template is None:
return
message = self._done_message_template.async_render(parse_result=False)
await self._send_notification_message(message)
async def _send_notification_message(self, message):
msg_payload = {ATTR_MESSAGE: message}
if self._title_template is not None:
title = self._title_template.async_render(parse_result=False)
msg_payload.update({ATTR_TITLE: title})
if self._data:
msg_payload.update({ATTR_DATA: self._data})
_LOGGER.debug(msg_payload)
for target in self._notifiers:
await self.hass.services.async_call(
DOMAIN_NOTIFY, target, msg_payload, context=self._context
)
async def async_turn_on(self, **kwargs):
"""Async Unacknowledge alert."""
_LOGGER.debug("Reset Alert: %s", self._name)
self._ack = False
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Async Acknowledge alert."""
_LOGGER.debug("Acknowledged Alert: %s", self._name)
self._ack = True
self.async_write_ha_state()
async def async_toggle(self, **kwargs):
"""Async toggle alert."""
if self._ack:
return await self.async_turn_on()
return await self.async_turn_off()
|
from homeassistant.components.fan import (
DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import FANS, NEW_LIGHT
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
SPEEDS = {SPEED_OFF: 0, SPEED_LOW: 1, SPEED_MEDIUM: 2, SPEED_HIGH: 4}
SUPPORTED_ON_SPEEDS = {1: SPEED_LOW, 2: SPEED_MEDIUM, 4: SPEED_HIGH}
def convert_speed(speed: int) -> str:
"""Convert speed from deCONZ to HASS.
Fallback to medium speed if unsupported by HASS fan platform.
"""
if speed in SPEEDS.values():
for hass_speed, deconz_speed in SPEEDS.items():
if speed == deconz_speed:
return hass_speed
return SPEED_MEDIUM
async def async_setup_entry(hass, config_entry, async_add_entities) -> None:
"""Set up fans for deCONZ component.
Fans are based on the same device class as lights in deCONZ.
"""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_fan(lights) -> None:
"""Add fan from deCONZ."""
entities = []
for light in lights:
if light.type in FANS and light.uniqueid not in gateway.entities[DOMAIN]:
entities.append(DeconzFan(light, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_fan
)
)
async_add_fan(gateway.api.lights.values())
class DeconzFan(DeconzDevice, FanEntity):
"""Representation of a deCONZ fan."""
TYPE = DOMAIN
def __init__(self, device, gateway) -> None:
"""Set up fan."""
super().__init__(device, gateway)
self._default_on_speed = SPEEDS[SPEED_MEDIUM]
if self.speed != SPEED_OFF:
self._default_on_speed = self._device.speed
self._features = SUPPORT_SET_SPEED
@property
def is_on(self) -> bool:
"""Return true if fan is on."""
return self.speed != SPEED_OFF
@property
def speed(self) -> int:
"""Return the current speed."""
return convert_speed(self._device.speed)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return list(SPEEDS)
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._features
@callback
def async_update_callback(self, force_update=False) -> None:
"""Store latest configured speed from the device."""
if self.speed != SPEED_OFF and self._device.speed != self._default_on_speed:
self._default_on_speed = self._device.speed
super().async_update_callback(force_update)
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if speed not in SPEEDS:
raise ValueError(f"Unsupported speed {speed}")
data = {"speed": SPEEDS[speed]}
await self._device.async_set_state(data)
async def async_turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on fan."""
if not speed:
speed = convert_speed(self._default_on_speed)
await self.async_set_speed(speed)
async def async_turn_off(self, **kwargs) -> None:
"""Turn off fan."""
await self.async_set_speed(SPEED_OFF)
|
from django.conf import settings
from django.urls import reverse
from weblate.trans.models import Suggestion
from weblate.trans.tests.test_views import ViewTestCase
class SuggestionsTest(ViewTestCase):
def add_suggestion_1(self):
return self.edit_unit("Hello, world!\n", "Nazdar svete!\n", suggest="yes")
def add_suggestion_2(self):
return self.edit_unit("Hello, world!\n", "Ahoj svete!\n", suggest="yes")
def test_add(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
# Try empty suggestion (should not be added)
response = self.edit_unit("Hello, world!\n", "", suggest="yes")
# We should stay on same message
self.assert_redirects_offset(response, translate_url, 1)
# Add first suggestion
response = self.add_suggestion_1()
# We should get to second message
self.assert_redirects_offset(response, translate_url, 2)
# Add second suggestion
response = self.add_suggestion_2()
# We should get to second message
self.assert_redirects_offset(response, translate_url, 2)
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of suggestions
self.assertEqual(translation.stats.suggestions, 1)
self.assert_backend(0)
# Unit should not be translated
self.assertEqual(len(unit.all_checks), 0)
self.assertFalse(unit.translated)
self.assertFalse(unit.fuzzy)
self.assertEqual(len(self.get_unit().suggestions), 2)
def test_add_same(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
# Add first suggestion
response = self.add_suggestion_1()
# We should get to second message
self.assert_redirects_offset(response, translate_url, 2)
# Add first suggestion
response = self.add_suggestion_1()
# We should stay on same message
self.assert_redirects_offset(response, translate_url, 1)
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of suggestions
self.assertEqual(translation.stats.suggestions, 1)
self.assert_backend(0)
# Unit should not be translated
self.assertEqual(len(unit.all_checks), 0)
self.assertFalse(unit.translated)
self.assertFalse(unit.fuzzy)
self.assertEqual(len(self.get_unit().suggestions), 1)
def test_delete(self, **kwargs):
translate_url = reverse("translate", kwargs=self.kw_translation)
# Create two suggestions
self.add_suggestion_1()
self.add_suggestion_2()
# Get ids of created suggestions
suggestions = self.get_unit().suggestions.values_list("pk", flat=True)
self.assertEqual(len(suggestions), 2)
# Delete one of suggestions
response = self.edit_unit(
"Hello, world!\n", "", delete=suggestions[0], **kwargs
)
self.assert_redirects_offset(response, translate_url, 1)
# Ensure we have just one
suggestions = self.get_unit().suggestions.values_list("pk", flat=True)
self.assertEqual(len(suggestions), 1)
def test_delete_spam(self):
self.test_delete(spam="1")
def test_accept_edit(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
# Create suggestion
self.add_suggestion_1()
# Get ids of created suggestions
suggestion = self.get_unit().suggestions[0].pk
# Accept one of suggestions
response = self.edit_unit("Hello, world!\n", "", accept_edit=suggestion)
self.assert_redirects_offset(response, translate_url, 1)
def test_accept(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
# Create two suggestions
self.add_suggestion_1()
self.add_suggestion_2()
# Get ids of created suggestions
suggestions = self.get_unit().suggestions
self.assertEqual(suggestions.count(), 2)
# Accept one of suggestions
response = self.edit_unit(
"Hello, world!\n", "", accept=suggestions.get(target="Ahoj svete!\n").pk
)
self.assert_redirects_offset(response, translate_url, 2)
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of suggestions
self.assertEqual(translation.stats.suggestions, 1)
# Unit should be translated
self.assertEqual(len(unit.all_checks), 0)
self.assertTrue(unit.translated)
self.assertFalse(unit.fuzzy)
self.assertEqual(unit.target, "Ahoj svete!\n")
self.assert_backend(1)
self.assertEqual(len(self.get_unit().suggestions), 1)
def test_accept_anonymous(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
self.client.logout()
# Create suggestions
self.add_suggestion_1()
self.client.login(username="testuser", password="testpassword")
# Get ids of created suggestion
suggestions = list(self.get_unit().suggestions)
self.assertEqual(len(suggestions), 1)
self.assertEqual(suggestions[0].user.username, settings.ANONYMOUS_USER_NAME)
# Accept one of suggestions
response = self.edit_unit("Hello, world!\n", "", accept=suggestions[0].pk)
self.assert_redirects_offset(response, translate_url, 2)
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of suggestions
self.assertEqual(translation.stats.suggestions, 0)
# Unit should be translated
self.assertEqual(unit.target, "Nazdar svete!\n")
def test_vote(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
self.component.suggestion_voting = True
self.component.suggestion_autoaccept = 0
self.component.save()
self.add_suggestion_1()
suggestion_id = self.get_unit().suggestions[0].pk
response = self.edit_unit("Hello, world!\n", "", upvote=suggestion_id)
self.assert_redirects_offset(response, translate_url, 2)
suggestion = Suggestion.objects.get(pk=suggestion_id)
self.assertEqual(suggestion.get_num_votes(), 1)
response = self.edit_unit("Hello, world!\n", "", downvote=suggestion_id)
self.assert_redirects_offset(response, translate_url, 1)
suggestion = Suggestion.objects.get(pk=suggestion_id)
self.assertEqual(suggestion.get_num_votes(), -1)
def test_vote_autoaccept(self):
self.add_suggestion_1()
translate_url = reverse("translate", kwargs=self.kw_translation)
self.component.suggestion_voting = True
self.component.suggestion_autoaccept = 1
self.component.save()
suggestion_id = self.get_unit().suggestions[0].pk
response = self.edit_unit("Hello, world!\n", "", upvote=suggestion_id)
self.assert_redirects_offset(response, translate_url, 2)
# Reload from database
unit = self.get_unit()
translation = self.component.translation_set.get(language_code="cs")
# Check number of suggestions
self.assertEqual(translation.stats.suggestions, 0)
# Unit should be translated
self.assertEqual(len(unit.all_checks), 0)
self.assertTrue(unit.translated)
self.assertFalse(unit.fuzzy)
self.assertEqual(unit.target, "Nazdar svete!\n")
self.assert_backend(1)
def test_vote_when_same_suggestion(self):
translate_url = reverse("translate", kwargs=self.kw_translation)
self.component.suggestion_voting = True
self.component.suggestion_autoaccept = 0
self.component.save()
# Add the first suggestion as default test-user
response = self.add_suggestion_1()
suggestion_id = self.get_unit().suggestions[0].pk
suggestion = Suggestion.objects.get(pk=suggestion_id)
# Suggestion get vote from the user that makes suggestion
self.assertEqual(suggestion.get_num_votes(), 1)
# Add suggestion as second user
self.log_as_jane()
response = self.add_suggestion_1()
# When adding the same suggestion, we stay on the same page
self.assert_redirects_offset(response, translate_url, 1)
suggestion = Suggestion.objects.get(pk=suggestion_id)
# and the suggestion gets an upvote
self.assertEqual(suggestion.get_num_votes(), 2)
|
from marshmallow import fields
from lemur.common.schema import LemurOutputSchema
class RotationPolicyOutputSchema(LemurOutputSchema):
id = fields.Integer()
days = fields.Integer()
class RotationPolicyNestedOutputSchema(RotationPolicyOutputSchema):
pass
|
import mock
import pytest
from paasta_tools.tron.client import TronClient
from paasta_tools.tron.client import TronRequestError
@pytest.fixture
def mock_requests():
with mock.patch(
"paasta_tools.tron.client.requests", autospec=True
) as mock_requests:
yield mock_requests
class TestTronClient:
tron_url = "http://tron.test:9000"
client = TronClient(tron_url)
def test_get(self, mock_requests):
response = self.client._get("/some/thing", {"check": 1})
assert response == mock_requests.get.return_value.json.return_value
mock_requests.get.assert_called_once_with(
headers=mock.ANY, url=self.tron_url + "/some/thing", params={"check": 1}
)
def test_post(self, mock_requests):
response = self.client._post("/some/thing", {"check": 1})
assert response == mock_requests.post.return_value.json.return_value
mock_requests.post.assert_called_once_with(
headers=mock.ANY, url=self.tron_url + "/some/thing", data={"check": 1}
)
@pytest.mark.parametrize("okay_status", [True, False])
def test_returned_error_message(self, mock_requests, okay_status):
mock_requests.post.return_value.ok = okay_status
mock_requests.post.return_value.json.return_value = {
"error": "config was invalid"
}
with pytest.raises(TronRequestError, match="config was invalid"):
self.client._post("/api/test")
def test_unexpected_error(self, mock_requests):
mock_requests.get.return_value.ok = False
mock_requests.get.return_value.text = "Server error"
mock_requests.get.return_value.json.side_effect = ValueError
with pytest.raises(TronRequestError):
self.client._get("/some/thing")
def test_okay_not_json(self, mock_requests):
mock_requests.get.return_value.ok = True
mock_requests.get.return_value.text = "Hi, you have reached Tron."
mock_requests.get.return_value.json.side_effect = ValueError
assert self.client._get("/some/thing") == "Hi, you have reached Tron."
def test_update_namespace(self, mock_requests):
new_config = "yaml: stuff"
mock_requests.get.return_value.json.return_value = {
"config": "old: things",
"hash": "01abcd",
}
self.client.update_namespace("some_service", new_config)
assert mock_requests.get.call_count == 1
_, kwargs = mock_requests.get.call_args
assert kwargs["url"] == self.tron_url + "/api/config"
assert kwargs["params"] == {"name": "some_service", "no_header": 1}
assert mock_requests.post.call_count == 1
_, kwargs = mock_requests.post.call_args
assert kwargs["url"] == self.tron_url + "/api/config"
assert kwargs["data"] == {
"name": "some_service",
"config": new_config,
"hash": "01abcd",
"check": 0,
}
@pytest.mark.parametrize("skip_if_unchanged", [True, False])
def test_update_namespace_unchanged(self, mock_requests, skip_if_unchanged):
new_config = "yaml: stuff"
mock_requests.get.return_value.json.return_value = {
"config": new_config,
"hash": "01abcd",
}
self.client.update_namespace("some_service", new_config, skip_if_unchanged)
assert mock_requests.post.call_count == int(not skip_if_unchanged)
def test_list_namespaces(self, mock_requests):
mock_requests.get.return_value.json.return_value = {
"jobs": {},
"namespaces": ["a", "b"],
}
assert self.client.list_namespaces() == ["a", "b"]
assert mock_requests.get.call_count == 1
_, kwargs = mock_requests.get.call_args
assert kwargs["url"] == self.tron_url + "/api"
assert kwargs["params"] is None
|
import os
from os import path
import tempfile
from unittest import mock
import pytest
from homeassistant import config as hass_config
import homeassistant.components.command_line.cover as cmd_rs
from homeassistant.components.cover import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_RELOAD,
SERVICE_STOP_COVER,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
@pytest.fixture
def rs(hass):
"""Return CommandCover instance."""
return cmd_rs.CommandCover(
hass,
"foo",
"command_open",
"command_close",
"command_stop",
"command_state",
None,
15,
)
def test_should_poll_new(rs):
"""Test the setting of polling."""
assert rs.should_poll is True
rs._command_state = None
assert rs.should_poll is False
def test_query_state_value(rs):
"""Test with state value."""
with mock.patch("subprocess.check_output") as mock_run:
mock_run.return_value = b" foo bar "
result = rs._query_state_value("runme")
assert "foo bar" == result
assert mock_run.call_count == 1
assert mock_run.call_args == mock.call(
"runme", shell=True, timeout=15 # nosec # shell by design
)
async def test_state_value(hass):
"""Test with state value."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "cover_status")
test_cover = {
"command_state": f"cat {path}",
"command_open": f"echo 1 > {path}",
"command_close": f"echo 1 > {path}",
"command_stop": f"echo 0 > {path}",
"value_template": "{{ value }}",
}
assert (
await async_setup_component(
hass,
DOMAIN,
{"cover": {"platform": "command_line", "covers": {"test": test_cover}}},
)
is True
)
await hass.async_block_till_done()
assert "unknown" == hass.states.get("cover.test").state
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
assert "open" == hass.states.get("cover.test").state
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
assert "open" == hass.states.get("cover.test").state
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True
)
assert "closed" == hass.states.get("cover.test").state
async def test_reload(hass):
"""Verify we can reload command_line covers."""
test_cover = {
"command_state": "echo open",
"value_template": "{{ value }}",
}
await async_setup_component(
hass,
DOMAIN,
{"cover": {"platform": "command_line", "covers": {"test": test_cover}}},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("cover.test").state
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"command_line/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
"command_line",
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("cover.test") is None
assert hass.states.get("cover.from_yaml")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
import argparse
import signal
import sys
from contextlib import contextmanager
from paasta_tools.mesos_tools import get_container_id_for_mesos_id
from paasta_tools.utils import get_docker_client
def parse_args():
parser = argparse.ArgumentParser(
description="Executes given command in Docker container for given Mesos task ID"
)
parser.add_argument("-i", "--mesos-id", required=True, help="Mesos task ID")
parser.add_argument(
"-c", "--cmd", required=True, help="command to execute in container"
)
parser.add_argument(
"-t", "--timeout", default=45, type=int, help="timeout for command"
)
args = parser.parse_args()
return args
class TimeoutException(Exception):
pass
@contextmanager
def time_limit(seconds): # From http://stackoverflow.com/a/601168/1576438
def signal_handler(signum, frame):
raise TimeoutException("Timed out!")
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(seconds)
try:
yield
finally:
signal.alarm(0)
def execute_in_container(docker_client, container_id, cmd, timeout):
container_info = docker_client.inspect_container(container_id)
if container_info["ExecIDs"] and len(container_info["ExecIDs"]) > 0:
for possible_exec_id in container_info["ExecIDs"]:
exec_info = docker_client.exec_inspect(possible_exec_id)["ProcessConfig"]
if exec_info["entrypoint"] == "/bin/sh" and exec_info["arguments"] == [
"-c",
cmd,
]:
exec_id = possible_exec_id
break
else:
exec_id = docker_client.exec_create(container_id, ["/bin/sh", "-c", cmd])["Id"]
output = docker_client.exec_start(exec_id, stream=False)
return_code = docker_client.exec_inspect(exec_id)["ExitCode"]
return (output, return_code)
def main():
args = parse_args()
if not args.mesos_id:
print(
"The Mesos task id you supplied seems to be an empty string! Please provide a valid task id."
)
sys.exit(2)
docker_client = get_docker_client()
container_id = get_container_id_for_mesos_id(docker_client, args.mesos_id)
if container_id:
try:
with time_limit(args.timeout):
output, return_code = execute_in_container(
docker_client, container_id, args.cmd, args.timeout
)
print(output)
except TimeoutException:
print("Command timed out!")
return_code = 1
finally:
sys.exit(return_code)
else:
print("Could not find container with MESOS_TASK_ID '%s'." % args.mesos_id)
sys.exit(1)
if __name__ == "__main__":
main()
|
import zigpy.zcl.clusters.smartenergy as smartenergy
from homeassistant.const import (
POWER_WATT,
TIME_HOURS,
TIME_SECONDS,
VOLUME_CUBIC_FEET,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import callback
from .. import registries, typing as zha_typing
from ..const import REPORT_CONFIG_DEFAULT
from .base import ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Calendar.cluster_id)
class Calendar(ZigbeeChannel):
"""Calendar channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.DeviceManagement.cluster_id)
class DeviceManagement(ZigbeeChannel):
"""Device Management channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Drlc.cluster_id)
class Drlc(ZigbeeChannel):
"""Demand Response and Load Control channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.EnergyManagement.cluster_id)
class EnergyManagement(ZigbeeChannel):
"""Energy Management channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Events.cluster_id)
class Events(ZigbeeChannel):
"""Event channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.KeyEstablishment.cluster_id)
class KeyEstablishment(ZigbeeChannel):
"""Key Establishment channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.MduPairing.cluster_id)
class MduPairing(ZigbeeChannel):
"""Pairing channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Messaging.cluster_id)
class Messaging(ZigbeeChannel):
"""Messaging channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Metering.cluster_id)
class Metering(ZigbeeChannel):
"""Metering channel."""
REPORT_CONFIG = [{"attr": "instantaneous_demand", "config": REPORT_CONFIG_DEFAULT}]
unit_of_measure_map = {
0x00: POWER_WATT,
0x01: f"{VOLUME_CUBIC_METERS}/{TIME_HOURS}",
0x02: f"{VOLUME_CUBIC_FEET}/{TIME_HOURS}",
0x03: f"ccf/{TIME_HOURS}",
0x04: f"US gal/{TIME_HOURS}",
0x05: f"IMP gal/{TIME_HOURS}",
0x06: f"BTU/{TIME_HOURS}",
0x07: f"l/{TIME_HOURS}",
0x08: "kPa",
0x09: "kPa",
0x0A: f"mcf/{TIME_HOURS}",
0x0B: "unitless",
0x0C: f"MJ/{TIME_SECONDS}",
}
def __init__(
self, cluster: zha_typing.ZigpyClusterType, ch_pool: zha_typing.ChannelPoolType
) -> None:
"""Initialize Metering."""
super().__init__(cluster, ch_pool)
self._divisor = 1
self._multiplier = 1
self._unit_enum = None
self._format_spec = None
async def async_configure(self):
"""Configure channel."""
await self.fetch_config(False)
await super().async_configure()
async def async_initialize(self, from_cache):
"""Initialize channel."""
await self.fetch_config(True)
await super().async_initialize(from_cache)
@callback
def attribute_updated(self, attrid, value):
"""Handle attribute update from Metering cluster."""
if None in (self._multiplier, self._divisor, self._format_spec):
return
super().attribute_updated(attrid, value * self._multiplier / self._divisor)
@property
def unit_of_measurement(self):
"""Return unit of measurement."""
return self.unit_of_measure_map.get(self._unit_enum & 0x7F, "unknown")
async def fetch_config(self, from_cache):
"""Fetch config from device and updates format specifier."""
results = await self.get_attributes(
["divisor", "multiplier", "unit_of_measure", "demand_formatting"],
from_cache=from_cache,
)
self._divisor = results.get("divisor", self._divisor)
self._multiplier = results.get("multiplier", self._multiplier)
self._unit_enum = results.get("unit_of_measure", 0x7F) # default to unknown
fmting = results.get(
"demand_formatting", 0xF9
) # 1 digit to the right, 15 digits to the left
r_digits = int(fmting & 0x07) # digits to the right of decimal point
l_digits = (fmting >> 3) & 0x0F # digits to the left of decimal point
if l_digits == 0:
l_digits = 15
width = r_digits + l_digits + (1 if r_digits > 0 else 0)
if fmting & 0x80:
self._format_spec = "{:" + str(width) + "." + str(r_digits) + "f}"
else:
self._format_spec = "{:0" + str(width) + "." + str(r_digits) + "f}"
def formatter_function(self, value):
"""Return formatted value for display."""
if self.unit_of_measurement == POWER_WATT:
# Zigbee spec power unit is kW, but we show the value in W
value_watt = value * 1000
if value_watt < 100:
return round(value_watt, 1)
return round(value_watt)
return self._format_spec.format(value).lstrip()
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Prepayment.cluster_id)
class Prepayment(ZigbeeChannel):
"""Prepayment channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Price.cluster_id)
class Price(ZigbeeChannel):
"""Price channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(smartenergy.Tunneling.cluster_id)
class Tunneling(ZigbeeChannel):
"""Tunneling channel."""
|
from asyncio import run_coroutine_threadsafe
import logging
import homeconnect
from homeconnect.api import HomeConnectError
from homeassistant import config_entries, core
from homeassistant.const import DEVICE_CLASS_TIMESTAMP, PERCENTAGE, TIME_SECONDS
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.dispatcher import dispatcher_send
from .const import (
BSH_ACTIVE_PROGRAM,
BSH_POWER_OFF,
BSH_POWER_STANDBY,
SIGNAL_UPDATE_ENTITIES,
)
_LOGGER = logging.getLogger(__name__)
class ConfigEntryAuth(homeconnect.HomeConnectAPI):
"""Provide Home Connect authentication tied to an OAuth2 based config entry."""
def __init__(
self,
hass: core.HomeAssistant,
config_entry: config_entries.ConfigEntry,
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
):
"""Initialize Home Connect Auth."""
self.hass = hass
self.config_entry = config_entry
self.session = config_entry_oauth2_flow.OAuth2Session(
hass, config_entry, implementation
)
super().__init__(self.session.token)
self.devices = []
def refresh_tokens(self) -> dict:
"""Refresh and return new Home Connect tokens using Home Assistant OAuth2 session."""
run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self.hass.loop
).result()
return self.session.token
def get_devices(self):
"""Get a dictionary of devices."""
appl = self.get_appliances()
devices = []
for app in appl:
if app.type == "Dryer":
device = Dryer(self.hass, app)
elif app.type == "Washer":
device = Washer(self.hass, app)
elif app.type == "Dishwasher":
device = Dishwasher(self.hass, app)
elif app.type == "FridgeFreezer":
device = FridgeFreezer(self.hass, app)
elif app.type == "Oven":
device = Oven(self.hass, app)
elif app.type == "CoffeeMaker":
device = CoffeeMaker(self.hass, app)
elif app.type == "Hood":
device = Hood(self.hass, app)
elif app.type == "Hob":
device = Hob(self.hass, app)
else:
_LOGGER.warning("Appliance type %s not implemented", app.type)
continue
devices.append({"device": device, "entities": device.get_entity_info()})
self.devices = devices
return devices
class HomeConnectDevice:
"""Generic Home Connect device."""
# for some devices, this is instead BSH_POWER_STANDBY
# see https://developer.home-connect.com/docs/settings/power_state
power_off_state = BSH_POWER_OFF
def __init__(self, hass, appliance):
"""Initialize the device class."""
self.hass = hass
self.appliance = appliance
def initialize(self):
"""Fetch the info needed to initialize the device."""
try:
self.appliance.get_status()
except (HomeConnectError, ValueError):
_LOGGER.debug("Unable to fetch appliance status. Probably offline")
try:
self.appliance.get_settings()
except (HomeConnectError, ValueError):
_LOGGER.debug("Unable to fetch settings. Probably offline")
try:
program_active = self.appliance.get_programs_active()
except (HomeConnectError, ValueError):
_LOGGER.debug("Unable to fetch active programs. Probably offline")
program_active = None
if program_active and "key" in program_active:
self.appliance.status[BSH_ACTIVE_PROGRAM] = {"value": program_active["key"]}
self.appliance.listen_events(callback=self.event_callback)
def event_callback(self, appliance):
"""Handle event."""
_LOGGER.debug("Update triggered on %s", appliance.name)
_LOGGER.debug(self.appliance.status)
dispatcher_send(self.hass, SIGNAL_UPDATE_ENTITIES, appliance.haId)
class DeviceWithPrograms(HomeConnectDevice):
"""Device with programs."""
PROGRAMS = []
def get_programs_available(self):
"""Get the available programs."""
return self.PROGRAMS
def get_program_switches(self):
"""Get a dictionary with info about program switches.
There will be one switch for each program.
"""
programs = self.get_programs_available()
return [{"device": self, "program_name": p["name"]} for p in programs]
def get_program_sensors(self):
"""Get a dictionary with info about program sensors.
There will be one of the four types of sensors for each
device.
"""
sensors = {
"Remaining Program Time": (None, None, DEVICE_CLASS_TIMESTAMP, 1),
"Duration": (TIME_SECONDS, "mdi:update", None, 1),
"Program Progress": (PERCENTAGE, "mdi:progress-clock", None, 1),
}
return [
{
"device": self,
"desc": k,
"unit": unit,
"key": "BSH.Common.Option.{}".format(k.replace(" ", "")),
"icon": icon,
"device_class": device_class,
"sign": sign,
}
for k, (unit, icon, device_class, sign) in sensors.items()
]
class DeviceWithDoor(HomeConnectDevice):
"""Device that has a door sensor."""
def get_door_entity(self):
"""Get a dictionary with info about the door binary sensor."""
return {
"device": self,
"desc": "Door",
"device_class": "door",
}
class Dryer(DeviceWithDoor, DeviceWithPrograms):
"""Dryer class."""
PROGRAMS = [
{"name": "LaundryCare.Dryer.Program.Cotton"},
{"name": "LaundryCare.Dryer.Program.Synthetic"},
{"name": "LaundryCare.Dryer.Program.Mix"},
{"name": "LaundryCare.Dryer.Program.Blankets"},
{"name": "LaundryCare.Dryer.Program.BusinessShirts"},
{"name": "LaundryCare.Dryer.Program.DownFeathers"},
{"name": "LaundryCare.Dryer.Program.Hygiene"},
{"name": "LaundryCare.Dryer.Program.Jeans"},
{"name": "LaundryCare.Dryer.Program.Outdoor"},
{"name": "LaundryCare.Dryer.Program.SyntheticRefresh"},
{"name": "LaundryCare.Dryer.Program.Towels"},
{"name": "LaundryCare.Dryer.Program.Delicates"},
{"name": "LaundryCare.Dryer.Program.Super40"},
{"name": "LaundryCare.Dryer.Program.Shirts15"},
{"name": "LaundryCare.Dryer.Program.Pillow"},
{"name": "LaundryCare.Dryer.Program.AntiShrink"},
]
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
door_entity = self.get_door_entity()
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {
"binary_sensor": [door_entity],
"switch": program_switches,
"sensor": program_sensors,
}
class Dishwasher(DeviceWithDoor, DeviceWithPrograms):
"""Dishwasher class."""
PROGRAMS = [
{"name": "Dishcare.Dishwasher.Program.Auto1"},
{"name": "Dishcare.Dishwasher.Program.Auto2"},
{"name": "Dishcare.Dishwasher.Program.Auto3"},
{"name": "Dishcare.Dishwasher.Program.Eco50"},
{"name": "Dishcare.Dishwasher.Program.Quick45"},
{"name": "Dishcare.Dishwasher.Program.Intensiv70"},
{"name": "Dishcare.Dishwasher.Program.Normal65"},
{"name": "Dishcare.Dishwasher.Program.Glas40"},
{"name": "Dishcare.Dishwasher.Program.GlassCare"},
{"name": "Dishcare.Dishwasher.Program.NightWash"},
{"name": "Dishcare.Dishwasher.Program.Quick65"},
{"name": "Dishcare.Dishwasher.Program.Normal45"},
{"name": "Dishcare.Dishwasher.Program.Intensiv45"},
{"name": "Dishcare.Dishwasher.Program.AutoHalfLoad"},
{"name": "Dishcare.Dishwasher.Program.IntensivPower"},
{"name": "Dishcare.Dishwasher.Program.MagicDaily"},
{"name": "Dishcare.Dishwasher.Program.Super60"},
{"name": "Dishcare.Dishwasher.Program.Kurz60"},
{"name": "Dishcare.Dishwasher.Program.ExpressSparkle65"},
{"name": "Dishcare.Dishwasher.Program.MachineCare"},
{"name": "Dishcare.Dishwasher.Program.SteamFresh"},
{"name": "Dishcare.Dishwasher.Program.MaximumCleaning"},
]
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
door_entity = self.get_door_entity()
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {
"binary_sensor": [door_entity],
"switch": program_switches,
"sensor": program_sensors,
}
class Oven(DeviceWithDoor, DeviceWithPrograms):
"""Oven class."""
PROGRAMS = [
{"name": "Cooking.Oven.Program.HeatingMode.PreHeating"},
{"name": "Cooking.Oven.Program.HeatingMode.HotAir"},
{"name": "Cooking.Oven.Program.HeatingMode.TopBottomHeating"},
{"name": "Cooking.Oven.Program.HeatingMode.PizzaSetting"},
{"name": "Cooking.Oven.Program.Microwave.600Watt"},
]
power_off_state = BSH_POWER_STANDBY
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
door_entity = self.get_door_entity()
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {
"binary_sensor": [door_entity],
"switch": program_switches,
"sensor": program_sensors,
}
class Washer(DeviceWithDoor, DeviceWithPrograms):
"""Washer class."""
PROGRAMS = [
{"name": "LaundryCare.Washer.Program.Cotton"},
{"name": "LaundryCare.Washer.Program.Cotton.CottonEco"},
{"name": "LaundryCare.Washer.Program.EasyCare"},
{"name": "LaundryCare.Washer.Program.Mix"},
{"name": "LaundryCare.Washer.Program.DelicatesSilk"},
{"name": "LaundryCare.Washer.Program.Wool"},
{"name": "LaundryCare.Washer.Program.Sensitive"},
{"name": "LaundryCare.Washer.Program.Auto30"},
{"name": "LaundryCare.Washer.Program.Auto40"},
{"name": "LaundryCare.Washer.Program.Auto60"},
{"name": "LaundryCare.Washer.Program.Chiffon"},
{"name": "LaundryCare.Washer.Program.Curtains"},
{"name": "LaundryCare.Washer.Program.DarkWash"},
{"name": "LaundryCare.Washer.Program.Dessous"},
{"name": "LaundryCare.Washer.Program.Monsoon"},
{"name": "LaundryCare.Washer.Program.Outdoor"},
{"name": "LaundryCare.Washer.Program.PlushToy"},
{"name": "LaundryCare.Washer.Program.ShirtsBlouses"},
{"name": "LaundryCare.Washer.Program.SportFitness"},
{"name": "LaundryCare.Washer.Program.Towels"},
{"name": "LaundryCare.Washer.Program.WaterProof"},
]
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
door_entity = self.get_door_entity()
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {
"binary_sensor": [door_entity],
"switch": program_switches,
"sensor": program_sensors,
}
class CoffeeMaker(DeviceWithPrograms):
"""Coffee maker class."""
PROGRAMS = [
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.Coffee"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.Cappuccino"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.LatteMacchiato"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeLatte"},
{"name": "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Americano"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoDoppio"},
{"name": "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.FlatWhite"},
{"name": "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Galao"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.MilkFroth"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.WarmMilk"},
{"name": "ConsumerProducts.CoffeeMaker.Program.Beverage.Ristretto"},
{"name": "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Cortado"},
]
power_off_state = BSH_POWER_STANDBY
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {"switch": program_switches, "sensor": program_sensors}
class Hood(DeviceWithPrograms):
"""Hood class."""
PROGRAMS = [
{"name": "Cooking.Common.Program.Hood.Automatic"},
{"name": "Cooking.Common.Program.Hood.Venting"},
{"name": "Cooking.Common.Program.Hood.DelayedShutOff"},
]
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {"switch": program_switches, "sensor": program_sensors}
class FridgeFreezer(DeviceWithDoor):
"""Fridge/Freezer class."""
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
door_entity = self.get_door_entity()
return {"binary_sensor": [door_entity]}
class Hob(DeviceWithPrograms):
"""Hob class."""
PROGRAMS = [{"name": "Cooking.Hob.Program.PowerLevelMode"}]
def get_entity_info(self):
"""Get a dictionary with infos about the associated entities."""
program_sensors = self.get_program_sensors()
program_switches = self.get_program_switches()
return {"switch": program_switches, "sensor": program_sensors}
|
import logging
import shlex
import subprocess
import sys
from urllib.parse import urlparse
from paasta_tools.api import client
from paasta_tools.cli.utils import calculate_remote_masters
from paasta_tools.cli.utils import find_connectable_master
from paasta_tools.cli.utils import get_subparser
from paasta_tools.cli.utils import pick_slave_from_status
from paasta_tools.marathon_tools import get_marathon_clients
from paasta_tools.marathon_tools import get_marathon_servers
from paasta_tools.marathon_tools import load_marathon_service_config
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import _run
from paasta_tools.utils import load_system_paasta_config
log = logging.getLogger(__name__)
def add_subparser(subparsers):
new_parser = get_subparser(
description="'paasta sysdig' works by SSH'ing to remote PaaSTA masters and "
"running sysdig with the necessary filters",
help_text="Run sysdig on a remote host and filter to a service and instance",
command="sysdig",
function=paasta_sysdig,
subparsers=subparsers,
)
new_parser.add_argument(
"-l",
"--local",
help="Run the script here rather than SSHing to a PaaSTA master",
default=False,
action="store_true",
)
def get_status_for_instance(cluster, service, instance):
api = client.get_paasta_oapi_client(cluster=cluster)
if not api:
sys.exit(1)
status = api.service.status_instance(service=service, instance=instance)
if not status.marathon:
log.error("Not a marathon service, exiting")
sys.exit(1)
return status
def get_any_mesos_master(cluster, system_paasta_config):
masters, output = calculate_remote_masters(cluster, system_paasta_config)
if not masters:
print("ERROR: %s" % output)
sys.exit(1)
mesos_master, output = find_connectable_master(masters)
if not mesos_master:
print(
f"ERROR: could not find connectable master in cluster {cluster}\nOutput: {output}"
)
sys.exit(1)
return mesos_master
def paasta_sysdig(args):
system_paasta_config = load_system_paasta_config()
if not args.local:
mesos_master = get_any_mesos_master(
cluster=args.cluster, system_paasta_config=system_paasta_config
)
ssh_cmd = (
"ssh -At -o StrictHostKeyChecking=no -o LogLevel=QUIET {0} "
'"sudo paasta {1} --local"'
).format(mesos_master, " ".join(sys.argv[1:]))
return_code, output = _run(ssh_cmd)
if return_code != 0:
print(output)
sys.exit(return_code)
slave, command = output.split(":", 1)
subprocess.call(shlex.split("ssh -tA {} '{}'".format(slave, command.strip())))
return
status = get_status_for_instance(
cluster=args.cluster, service=args.service, instance=args.instance
)
slave = pick_slave_from_status(status=status, host=args.host)
job_config = load_marathon_service_config(
service=args.service, instance=args.instance, cluster=args.cluster
)
marathon_servers = get_marathon_servers(system_paasta_config)
marathon_clients = get_marathon_clients(marathon_servers)
# Unfortunately, sysdig seems to only be able to take one marathon URL, so hopefully the service in question is not
# currently moving between shards.
client = marathon_clients.get_current_client_for_service(job_config=job_config)
marathon_url = client.servers[0]
marathon_user, marathon_pass = client.auth
mesos_url = get_mesos_master().host
marathon_parsed_url = urlparse(marathon_url)
marathon_creds_url = marathon_parsed_url._replace(
netloc="{}:{}@{}".format(
marathon_user, marathon_pass, marathon_parsed_url.netloc
)
)
print(
format_mesos_command(
slave, status.marathon.app_id, mesos_url, marathon_creds_url.geturl()
)
)
def format_mesos_command(slave, app_id, mesos_url, marathon_url):
sysdig_mesos = f"{mesos_url},{marathon_url}"
command = (
f'sudo csysdig -m {sysdig_mesos} marathon.app.id="/{app_id}" -v mesos_tasks'
)
return slave + ":" + command
|
from itertools import chain
import logging
from py_nextbus import NextBusClient
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, DEVICE_CLASS_TIMESTAMP
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util.dt import utc_from_timestamp
_LOGGER = logging.getLogger(__name__)
DOMAIN = "nextbus"
CONF_AGENCY = "agency"
CONF_ROUTE = "route"
CONF_STOP = "stop"
ICON = "mdi:bus"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_AGENCY): cv.string,
vol.Required(CONF_ROUTE): cv.string,
vol.Required(CONF_STOP): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
def listify(maybe_list):
"""Return list version of whatever value is passed in.
This is used to provide a consistent way of interacting with the JSON
results from the API. There are several attributes that will either missing
if there are no values, a single dictionary if there is only one value, and
a list if there are multiple.
"""
if maybe_list is None:
return []
if isinstance(maybe_list, list):
return maybe_list
return [maybe_list]
def maybe_first(maybe_list):
"""Return the first item out of a list or returns back the input."""
if isinstance(maybe_list, list) and maybe_list:
return maybe_list[0]
return maybe_list
def validate_value(value_name, value, value_list):
"""Validate tag value is in the list of items and logs error if not."""
valid_values = {v["tag"]: v["title"] for v in value_list}
if value not in valid_values:
_LOGGER.error(
"Invalid %s tag `%s`. Please use one of the following: %s",
value_name,
value,
", ".join(f"{title}: {tag}" for tag, title in valid_values.items()),
)
return False
return True
def validate_tags(client, agency, route, stop):
"""Validate provided tags."""
# Validate agencies
if not validate_value("agency", agency, client.get_agency_list()["agency"]):
return False
# Validate the route
if not validate_value("route", route, client.get_route_list(agency)["route"]):
return False
# Validate the stop
route_config = client.get_route_config(route, agency)["route"]
if not validate_value("stop", stop, route_config["stop"]):
return False
return True
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Load values from configuration and initialize the platform."""
agency = config[CONF_AGENCY]
route = config[CONF_ROUTE]
stop = config[CONF_STOP]
name = config.get(CONF_NAME)
client = NextBusClient(output_format="json")
# Ensures that the tags provided are valid, also logs out valid values
if not validate_tags(client, agency, route, stop):
_LOGGER.error("Invalid config value(s)")
return
add_entities([NextBusDepartureSensor(client, agency, route, stop, name)], True)
class NextBusDepartureSensor(Entity):
"""Sensor class that displays upcoming NextBus times.
To function, this requires knowing the agency tag as well as the tags for
both the route and the stop.
This is possibly a little convoluted to provide as it requires making a
request to the service to get these values. Perhaps it can be simplifed in
the future using fuzzy logic and matching.
"""
def __init__(self, client, agency, route, stop, name=None):
"""Initialize sensor with all required config."""
self.agency = agency
self.route = route
self.stop = stop
self._custom_name = name
# Maybe pull a more user friendly name from the API here
self._name = f"{agency} {route}"
self._client = client
# set up default state attributes
self._state = None
self._attributes = {}
def _log_debug(self, message, *args):
"""Log debug message with prefix."""
_LOGGER.debug(":".join((self.agency, self.route, self.stop, message)), *args)
@property
def name(self):
"""Return sensor name.
Uses an auto generated name based on the data from the API unless a
custom name is provided in the configuration.
"""
if self._custom_name:
return self._custom_name
return self._name
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
@property
def state(self):
"""Return current state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return additional state attributes."""
return self._attributes
@property
def icon(self):
"""Return icon to be used for this sensor."""
# Would be nice if we could determine if the line is a train or bus
# however that doesn't seem to be available to us. Using bus for now.
return ICON
def update(self):
"""Update sensor with new departures times."""
# Note: using Multi because there is a bug with the single stop impl
results = self._client.get_predictions_for_multi_stops(
[{"stop_tag": self.stop, "route_tag": self.route}], self.agency
)
self._log_debug("Predictions results: %s", results)
if "Error" in results:
self._log_debug("Could not get predictions: %s", results)
if not results.get("predictions"):
self._log_debug("No predictions available")
self._state = None
# Remove attributes that may now be outdated
self._attributes.pop("upcoming", None)
return
results = results["predictions"]
# Set detailed attributes
self._attributes.update(
{
"agency": results.get("agencyTitle"),
"route": results.get("routeTitle"),
"stop": results.get("stopTitle"),
}
)
# List all messages in the attributes
messages = listify(results.get("message", []))
self._log_debug("Messages: %s", messages)
self._attributes["message"] = " -- ".join(
message.get("text", "") for message in messages
)
# List out all directions in the attributes
directions = listify(results.get("direction", []))
self._attributes["direction"] = ", ".join(
direction.get("title", "") for direction in directions
)
# Chain all predictions together
predictions = list(
chain(
*(listify(direction.get("prediction", [])) for direction in directions)
)
)
# Short circuit if we don't have any actual bus predictions
if not predictions:
self._log_debug("No upcoming predictions available")
self._state = None
self._attributes["upcoming"] = "No upcoming predictions"
return
# Generate list of upcoming times
self._attributes["upcoming"] = ", ".join(
sorted(p["minutes"] for p in predictions)
)
latest_prediction = maybe_first(predictions)
self._state = utc_from_timestamp(
int(latest_prediction["epochTime"]) / 1000
).isoformat()
|
import copy
import uuid
from homeassistant.components.lovelace import dashboard, resources
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
RESOURCE_EXAMPLES = [
{"type": "js", "url": "/local/bla.js"},
{"type": "css", "url": "/local/bla.css"},
]
async def test_yaml_resources(hass, hass_ws_client):
"""Test defining resources in configuration.yaml."""
assert await async_setup_component(
hass, "lovelace", {"lovelace": {"mode": "yaml", "resources": RESOURCE_EXAMPLES}}
)
client = await hass_ws_client(hass)
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == RESOURCE_EXAMPLES
async def test_yaml_resources_backwards(hass, hass_ws_client):
"""Test defining resources in YAML ll config (legacy)."""
with patch(
"homeassistant.components.lovelace.dashboard.load_yaml",
return_value={"resources": RESOURCE_EXAMPLES},
):
assert await async_setup_component(
hass, "lovelace", {"lovelace": {"mode": "yaml"}}
)
client = await hass_ws_client(hass)
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == RESOURCE_EXAMPLES
async def test_storage_resources(hass, hass_ws_client, hass_storage):
"""Test defining resources in storage config."""
resource_config = [{**item, "id": uuid.uuid4().hex} for item in RESOURCE_EXAMPLES]
hass_storage[resources.RESOURCE_STORAGE_KEY] = {
"key": resources.RESOURCE_STORAGE_KEY,
"version": 1,
"data": {"items": resource_config},
}
assert await async_setup_component(hass, "lovelace", {})
client = await hass_ws_client(hass)
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == resource_config
async def test_storage_resources_import(hass, hass_ws_client, hass_storage):
"""Test importing resources from storage config."""
assert await async_setup_component(hass, "lovelace", {})
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"key": "lovelace",
"version": 1,
"data": {"config": {"resources": copy.deepcopy(RESOURCE_EXAMPLES)}},
}
client = await hass_ws_client(hass)
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert (
response["result"]
== hass_storage[resources.RESOURCE_STORAGE_KEY]["data"]["items"]
)
assert (
"resources"
not in hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT]["data"]["config"]
)
# Add a resource
await client.send_json(
{
"id": 6,
"type": "lovelace/resources/create",
"res_type": "module",
"url": "/local/yo.js",
}
)
response = await client.receive_json()
assert response["success"]
await client.send_json({"id": 7, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
last_item = response["result"][-1]
assert last_item["type"] == "module"
assert last_item["url"] == "/local/yo.js"
# Update a resource
first_item = response["result"][0]
await client.send_json(
{
"id": 8,
"type": "lovelace/resources/update",
"resource_id": first_item["id"],
"res_type": "css",
"url": "/local/updated.css",
}
)
response = await client.receive_json()
assert response["success"]
await client.send_json({"id": 9, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
first_item = response["result"][0]
assert first_item["type"] == "css"
assert first_item["url"] == "/local/updated.css"
# Delete resources
await client.send_json(
{
"id": 10,
"type": "lovelace/resources/delete",
"resource_id": first_item["id"],
}
)
response = await client.receive_json()
assert response["success"]
await client.send_json({"id": 11, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert len(response["result"]) == 2
assert first_item["id"] not in (item["id"] for item in response["result"])
async def test_storage_resources_import_invalid(hass, hass_ws_client, hass_storage):
"""Test importing resources from storage config."""
assert await async_setup_component(hass, "lovelace", {})
hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT] = {
"key": "lovelace",
"version": 1,
"data": {"config": {"resources": [{"invalid": "resource"}]}},
}
client = await hass_ws_client(hass)
# Fetch data
await client.send_json({"id": 5, "type": "lovelace/resources"})
response = await client.receive_json()
assert response["success"]
assert response["result"] == []
assert (
"resources"
in hass_storage[dashboard.CONFIG_STORAGE_KEY_DEFAULT]["data"]["config"]
)
|
from functools import partial
from itertools import (
dropwhile,
takewhile,
islice,
count,
product,
chain,
starmap,
filterfalse,
)
import collections
import types
from functional.execution import ExecutionStrategies
#: Defines a Transformation from a name, function, and execution_strategies
Transformation = collections.namedtuple(
"Transformation", ["name", "function", "execution_strategies"]
)
#: Cache transformation
CACHE_T = Transformation("cache", None, None)
def name(function):
"""
Retrieve a pretty name for the function
:param function: function to get name from
:return: pretty name
"""
if isinstance(function, types.FunctionType):
return function.__name__
else:
return str(function)
def map_t(func):
"""
Transformation for Sequence.map
:param func: map function
:return: transformation
"""
return Transformation(
"map({0})".format(name(func)),
partial(map, func),
{ExecutionStrategies.PARALLEL},
)
def select_t(func):
"""
Transformation for Sequence.select
:param func: select function
:return: transformation
"""
return Transformation(
"select({0})".format(name(func)),
partial(map, func),
{ExecutionStrategies.PARALLEL},
)
def starmap_t(func):
"""
Transformation for Sequence.starmap and Sequence.smap
:param func: starmap function
:return: transformation
"""
return Transformation(
"starmap({})".format(name(func)),
partial(starmap, func),
{ExecutionStrategies.PARALLEL},
)
def filter_t(func):
"""
Transformation for Sequence.filter
:param func: filter function
:return: transformation
"""
return Transformation(
"filter({0})".format(name(func)),
partial(filter, func),
{ExecutionStrategies.PARALLEL},
)
def where_t(func):
"""
Transformation for Sequence.where
:param func: where function
:return: transformation
"""
return Transformation(
"where({0})".format(name(func)),
partial(filter, func),
{ExecutionStrategies.PARALLEL},
)
def filter_not_t(func):
"""
Transformation for Sequence.filter_not
:param func: filter_not function
:return: transformation
"""
return Transformation(
"filter_not({0})".format(name(func)),
partial(filterfalse, func),
{ExecutionStrategies.PARALLEL},
)
def reversed_t():
"""
Transformation for Sequence.reverse
:return: transformation
"""
return Transformation("reversed", reversed, [ExecutionStrategies.PRE_COMPUTE])
def slice_t(start, until):
"""
Transformation for Sequence.slice
:param start: start index
:param until: until index (does not include element at until)
:return: transformation
"""
return Transformation(
"slice({0}, {1})".format(start, until),
lambda sequence: islice(sequence, start, until),
None,
)
def distinct_t():
"""
Transformation for Sequence.distinct
:return: transformation
"""
def distinct(sequence):
seen = set()
for element in sequence:
if element in seen:
continue
seen.add(element)
yield element
return Transformation("distinct", distinct, None)
def distinct_by_t(func):
"""
Transformation for Sequence.distinct_by
:param func: distinct_by function
:return: transformation
"""
def distinct_by(sequence):
distinct_lookup = {}
for element in sequence:
key = func(element)
if key not in distinct_lookup:
distinct_lookup[key] = element
return distinct_lookup.values()
return Transformation("distinct_by({0})".format(name(func)), distinct_by, None)
def sorted_t(key=None, reverse=False):
"""
Transformation for Sequence.sorted
:param key: key to sort by
:param reverse: reverse or not
:return: transformation
"""
return Transformation(
"sorted", lambda sequence: sorted(sequence, key=key, reverse=reverse), None
)
def order_by_t(func):
"""
Transformation for Sequence.order_by
:param func: order_by function
:return: transformation
"""
return Transformation(
"order_by({0})".format(name(func)),
lambda sequence: sorted(sequence, key=func),
None,
)
def drop_right_t(n):
"""
Transformation for Sequence.drop_right
:param n: number to drop from right
:return: transformation
"""
if n <= 0:
end_index = None
else:
end_index = -n
return Transformation(
"drop_right({0})".format(n), lambda sequence: sequence[:end_index], None
)
def drop_t(n):
"""
Transformation for Sequence.drop
:param n: number to drop from left
:return: transformation
"""
return Transformation(
"drop({0})".format(n), lambda sequence: islice(sequence, n, None), None
)
def drop_while_t(func):
"""
Transformation for Sequence.drop_while
:param func: drops while func is true
:return: transformation
"""
return Transformation(
"drop_while({0})".format(name(func)), partial(dropwhile, func), None
)
def take_t(n):
"""
Transformation for Sequence.take
:param n: number to take
:return: transformation
"""
return Transformation(
"take({0})".format(n), lambda sequence: islice(sequence, 0, n), None
)
def take_while_t(func):
"""
Transformation for Sequence.take_while
:param func: takes while func is True
:return: transformation
"""
return Transformation(
"take_while({0})".format(name(func)), partial(takewhile, func), None
)
def flat_map_impl(func, sequence):
"""
Implementation for flat_map_t
:param func: function to map
:param sequence: sequence to flat_map over
:return: flat_map generator
"""
for element in sequence:
for value in func(element):
yield value
def flat_map_t(func):
"""
Transformation for Sequence.flat_map
:param func: function to flat_map
:return: transformation
"""
return Transformation(
"flat_map({0})".format(name(func)),
partial(flat_map_impl, func),
{ExecutionStrategies.PARALLEL},
)
def flatten_t():
"""
Transformation for Sequence.flatten
:return: transformation
"""
return Transformation(
"flatten", partial(flat_map_impl, lambda x: x), {ExecutionStrategies.PARALLEL}
)
def zip_t(zip_sequence):
"""
Transformation for Sequence.zip
:param zip_sequence: sequence to zip with
:return: transformation
"""
return Transformation(
"zip(<sequence>)", lambda sequence: zip(sequence, zip_sequence), None
)
def zip_with_index_t(start):
"""
Transformation for Sequence.zip_with_index
:return: transformation
"""
return Transformation(
"zip_with_index", lambda sequence: zip(sequence, count(start=start)), None
)
def enumerate_t(start):
"""
Transformation for Sequence.enumerate
:param start: start index for enumerate
:return: transformation
"""
return Transformation(
"enumerate", lambda sequence: enumerate(sequence, start=start), None
)
def cartesian_t(iterables, repeat):
"""
Transformation for Sequence.cartesian
:param iterables: elements for cartesian product
:param repeat: how many times to repeat iterables
:return: transformation
"""
return Transformation(
"cartesian", lambda sequence: product(sequence, *iterables, repeat=repeat), None
)
def init_t():
"""
Transformation for Sequence.init
:return: transformation
"""
return Transformation(
"init", lambda sequence: sequence[:-1], {ExecutionStrategies.PRE_COMPUTE}
)
def tail_t():
"""
Transformation for Sequence.tail
:return: transformation
"""
return Transformation("tail", lambda sequence: islice(sequence, 1, None), None)
def inits_t(wrap):
"""
Transformation for Sequence.inits
:param wrap: wrap children values with this
:return: transformation
"""
return Transformation(
"inits",
lambda sequence: [
wrap(sequence[:i]) for i in reversed(range(len(sequence) + 1))
],
{ExecutionStrategies.PRE_COMPUTE},
)
def tails_t(wrap):
"""
Transformation for Sequence.tails
:param wrap: wrap children values with this
:return: transformation
"""
return Transformation(
"tails",
lambda sequence: [wrap(sequence[i:]) for i in range(len(sequence) + 1)],
{ExecutionStrategies.PRE_COMPUTE},
)
def union_t(other):
"""
Transformation for Sequence.union
:param other: sequence to union with
:return: transformation
"""
return Transformation("union", lambda sequence: set(sequence).union(other), None)
def intersection_t(other):
"""
Transformation for Sequence.intersection
:param other: sequence to intersect with
:return: transformation
"""
return Transformation(
"intersection", lambda sequence: set(sequence).intersection(other), None
)
def difference_t(other):
"""
Transformation for Sequence.difference
:param other: sequence to different with
:return: transformation
"""
return Transformation(
"difference", lambda sequence: set(sequence).difference(other), None
)
def symmetric_difference_t(other):
"""
Transformation for Sequence.symmetric_difference
:param other: sequence to symmetric_difference with
:return: transformation
"""
return Transformation(
"symmetric_difference",
lambda sequence: set(sequence).symmetric_difference(other),
None,
)
def group_by_key_impl(sequence):
"""
Implementation for group_by_key_t
:param sequence: sequence to group
:return: grouped sequence
"""
result = {}
for element in sequence:
if result.get(element[0]):
result.get(element[0]).append(element[1])
else:
result[element[0]] = [element[1]]
return result.items()
def group_by_key_t():
"""
Transformation for Sequence.group_by_key
:return: transformation
"""
return Transformation("group_by_key", group_by_key_impl, None)
def reduce_by_key_impl(func, sequence):
"""
Implementation for reduce_by_key_t
:param func: reduce function
:param sequence: sequence to reduce
:return: reduced sequence
"""
result = {}
for key, value in sequence:
if key in result:
result[key] = func(result[key], value)
else:
result[key] = value
return result.items()
def reduce_by_key_t(func):
"""
Transformation for Sequence.reduce_by_key
:param func: reduce function
:return: transformation
"""
return Transformation(
"reduce_by_key({0})".format(name(func)), partial(reduce_by_key_impl, func), None
)
def accumulate_impl(func, sequence):
# pylint: disable=no-name-in-module
"""
Implementation for accumulate
:param sequence: sequence to accumulate
:param func: accumulate function
"""
from itertools import accumulate
return accumulate(sequence, func)
def accumulate_t(func):
"""
Transformation for Sequence.accumulate
"""
return Transformation(
"accumulate({0})".format(name(func)), partial(accumulate_impl, func), None
)
def count_by_key_impl(sequence):
"""
Implementation for count_by_key_t
:param sequence: sequence of (key, value) pairs
:return: counts by key
"""
counter = collections.Counter()
for key, _ in sequence:
counter[key] += 1
return counter.items()
def count_by_key_t():
"""
Transformation for Sequence.count_by_key
:return: transformation
"""
return Transformation("count_by_key", count_by_key_impl, None)
def count_by_value_impl(sequence):
"""
Implementation for count_by_value_t
:param sequence: sequence of values
:return: counts by value
"""
counter = collections.Counter()
for e in sequence:
counter[e] += 1
return counter.items()
def count_by_value_t():
"""
Transformation for Sequence.count_by_value
:return: transformation
"""
return Transformation("count_by_value", count_by_value_impl, None)
def group_by_impl(func, sequence):
"""
Implementation for group_by_t
:param func: grouping function
:param sequence: sequence to group
:return: grouped sequence
"""
result = {}
for element in sequence:
if result.get(func(element)):
result.get(func(element)).append(element)
else:
result[func(element)] = [element]
return result.items()
def group_by_t(func):
"""
Transformation for Sequence.group_by
:param func: grouping function
:return: transformation
"""
return Transformation(
"group_by({0})".format(name(func)), partial(group_by_impl, func), None
)
def grouped_impl(wrap, size, sequence):
"""
Implementation for grouped_t
:param wrap: wrap children values with this
:param size: size of groups
:param sequence: sequence to group
:return: grouped sequence
"""
iterator = iter(sequence)
try:
while True:
batch = islice(iterator, size)
yield list(chain((wrap(next(batch)),), batch))
except StopIteration:
return
def grouped_t(wrap, size):
"""
Transformation for Sequence.grouped
:param wrap: wrap children values with this
:param size: size of groups
:return: transformation
"""
return Transformation(
"grouped({0})".format(size), partial(grouped_impl, wrap, size), None
)
def sliding_impl(wrap, size, step, sequence):
"""
Implementation for sliding_t
:param wrap: wrap children values with this
:param size: size of window
:param step: step size
:param sequence: sequence to create sliding windows from
:return: sequence of sliding windows
"""
i = 0
n = len(sequence)
while i + size <= n or (step != 1 and i < n):
yield wrap(sequence[i : i + size])
i += step
def sliding_t(wrap, size, step):
"""
Transformation for Sequence.sliding
:param wrap: wrap children values with this
:param size: size of window
:param step: step size
:return: transformation
"""
return Transformation(
"sliding({0}, {1})".format(size, step),
partial(sliding_impl, wrap, size, step),
{ExecutionStrategies.PRE_COMPUTE},
)
def partition_impl(wrap, predicate, sequence):
truthy_partition = []
falsy_partition = []
for e in sequence:
if predicate(e):
truthy_partition.append(e)
else:
falsy_partition.append(e)
return wrap((wrap(truthy_partition), wrap(falsy_partition)))
def partition_t(wrap, func):
"""
Transformation for Sequence.partition
:param wrap: wrap children values with this
:param func: partition function
:return: transformation
"""
return Transformation(
"partition({0})".format(name(func)), partial(partition_impl, wrap, func), None
)
def inner_join_impl(other, sequence):
"""
Implementation for part of join_impl
:param other: other sequence to join with
:param sequence: first sequence to join with
:return: joined sequence
"""
seq_dict = {}
for element in sequence:
seq_dict[element[0]] = element[1]
seq_kv = seq_dict
other_kv = dict(other)
keys = seq_kv.keys() if len(seq_kv) < len(other_kv) else other_kv.keys()
result = {}
for k in keys:
if k in seq_kv and k in other_kv:
result[k] = (seq_kv[k], other_kv[k])
return result.items()
def join_impl(other, join_type, sequence):
"""
Implementation for join_t
:param other: other sequence to join with
:param join_type: join type (inner, outer, left, right)
:param sequence: first sequence to join with
:return: joined sequence
"""
if join_type == "inner":
return inner_join_impl(other, sequence)
seq_dict = {}
for element in sequence:
seq_dict[element[0]] = element[1]
seq_kv = seq_dict
other_kv = dict(other)
if join_type == "left":
keys = seq_kv.keys()
elif join_type == "right":
keys = other_kv.keys()
elif join_type == "outer":
keys = set(list(seq_kv.keys()) + list(other_kv.keys()))
else:
raise TypeError("Wrong type of join specified")
result = {}
for k in keys:
result[k] = (seq_kv.get(k), other_kv.get(k))
return result.items()
def join_t(other, join_type):
"""
Transformation for Sequence.join, Sequence.inner_join, Sequence.outer_join, Sequence.right_join,
and Sequence.left_join
:param other: other sequence to join with
:param join_type: join type from left, right, inner, and outer
:return: transformation
"""
return Transformation(
"{0}_join".format(join_type), partial(join_impl, other, join_type), None
)
|
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
)
from homeassistant.components.plex.const import CONF_SERVER_IDENTIFIER
from homeassistant.components.plex.media_browser import SPECIAL_METHODS
from homeassistant.components.websocket_api.const import ERR_UNKNOWN_ERROR, TYPE_RESULT
from .const import DEFAULT_DATA
from .helpers import trigger_plex_update
async def test_browse_media(hass, hass_ws_client, mock_plex_server, mock_websocket):
"""Test getting Plex clients from plex.tv."""
websocket_client = await hass_ws_client(hass)
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
media_players = hass.states.async_entity_ids("media_player")
msg_id = 1
# Browse base of non-existent Plex server
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: "server",
ATTR_MEDIA_CONTENT_ID: "this server does not exist",
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == ERR_UNKNOWN_ERROR
# Browse base of Plex server
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "server"
assert result[ATTR_MEDIA_CONTENT_ID] == DEFAULT_DATA[CONF_SERVER_IDENTIFIER]
assert len(result["children"]) == len(mock_plex_server.library.sections()) + len(
SPECIAL_METHODS
)
tvshows = next(iter(x for x in result["children"] if x["title"] == "TV Shows"))
playlists = next(iter(x for x in result["children"] if x["title"] == "Playlists"))
special_keys = list(SPECIAL_METHODS.keys())
# Browse into a special folder (server)
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: "server",
ATTR_MEDIA_CONTENT_ID: f"{DEFAULT_DATA[CONF_SERVER_IDENTIFIER]}:{special_keys[0]}",
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "server"
assert (
result[ATTR_MEDIA_CONTENT_ID]
== f"{DEFAULT_DATA[CONF_SERVER_IDENTIFIER]}:{special_keys[0]}"
)
assert len(result["children"]) == len(mock_plex_server.library.onDeck())
# Browse into a special folder (library)
msg_id += 1
library_section_id = next(iter(mock_plex_server.library.sections())).key
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: "library",
ATTR_MEDIA_CONTENT_ID: f"{library_section_id}:{special_keys[1]}",
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "library"
assert result[ATTR_MEDIA_CONTENT_ID] == f"{library_section_id}:{special_keys[1]}"
assert len(result["children"]) == len(
mock_plex_server.library.sectionByID(library_section_id).recentlyAdded()
)
# Browse into a Plex TV show library
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: tvshows[ATTR_MEDIA_CONTENT_TYPE],
ATTR_MEDIA_CONTENT_ID: str(tvshows[ATTR_MEDIA_CONTENT_ID]),
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "library"
result_id = result[ATTR_MEDIA_CONTENT_ID]
assert len(result["children"]) == len(
mock_plex_server.library.sectionByID(result_id).all()
) + len(SPECIAL_METHODS)
# Browse into a Plex TV show
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: result["children"][-1][ATTR_MEDIA_CONTENT_TYPE],
ATTR_MEDIA_CONTENT_ID: str(result["children"][-1][ATTR_MEDIA_CONTENT_ID]),
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "show"
result_id = int(result[ATTR_MEDIA_CONTENT_ID])
assert result["title"] == mock_plex_server.fetchItem(result_id).title
# Browse into a non-existent TV season
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: result["children"][0][ATTR_MEDIA_CONTENT_TYPE],
ATTR_MEDIA_CONTENT_ID: str(99999999999999),
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == ERR_UNKNOWN_ERROR
# Browse Plex playlists
msg_id += 1
await websocket_client.send_json(
{
"id": msg_id,
"type": "media_player/browse_media",
"entity_id": media_players[0],
ATTR_MEDIA_CONTENT_TYPE: playlists[ATTR_MEDIA_CONTENT_TYPE],
ATTR_MEDIA_CONTENT_ID: str(playlists[ATTR_MEDIA_CONTENT_ID]),
}
)
msg = await websocket_client.receive_json()
assert msg["id"] == msg_id
assert msg["type"] == TYPE_RESULT
assert msg["success"]
result = msg["result"]
assert result[ATTR_MEDIA_CONTENT_TYPE] == "playlists"
result_id = result[ATTR_MEDIA_CONTENT_ID]
|
from datetime import timedelta
import logging
from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
_LOGGER = logging.getLogger(__name__)
ATTR_AQI = "air_quality_index"
ATTR_ATTRIBUTION = "attribution"
ATTR_CO2 = "carbon_dioxide"
ATTR_CO = "carbon_monoxide"
ATTR_N2O = "nitrogen_oxide"
ATTR_NO = "nitrogen_monoxide"
ATTR_NO2 = "nitrogen_dioxide"
ATTR_OZONE = "ozone"
ATTR_PM_0_1 = "particulate_matter_0_1"
ATTR_PM_10 = "particulate_matter_10"
ATTR_PM_2_5 = "particulate_matter_2_5"
ATTR_SO2 = "sulphur_dioxide"
DOMAIN = "air_quality"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=30)
PROP_TO_ATTR = {
"air_quality_index": ATTR_AQI,
"attribution": ATTR_ATTRIBUTION,
"carbon_dioxide": ATTR_CO2,
"carbon_monoxide": ATTR_CO,
"nitrogen_oxide": ATTR_N2O,
"nitrogen_monoxide": ATTR_NO,
"nitrogen_dioxide": ATTR_NO2,
"ozone": ATTR_OZONE,
"particulate_matter_0_1": ATTR_PM_0_1,
"particulate_matter_10": ATTR_PM_10,
"particulate_matter_2_5": ATTR_PM_2_5,
"sulphur_dioxide": ATTR_SO2,
}
async def async_setup(hass, config):
"""Set up the air quality component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class AirQualityEntity(Entity):
"""ABC for air quality data."""
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
raise NotImplementedError()
@property
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return None
@property
def particulate_matter_0_1(self):
"""Return the particulate matter 0.1 level."""
return None
@property
def air_quality_index(self):
"""Return the Air Quality Index (AQI)."""
return None
@property
def ozone(self):
"""Return the O3 (ozone) level."""
return None
@property
def carbon_monoxide(self):
"""Return the CO (carbon monoxide) level."""
return None
@property
def carbon_dioxide(self):
"""Return the CO2 (carbon dioxide) level."""
return None
@property
def attribution(self):
"""Return the attribution."""
return None
@property
def sulphur_dioxide(self):
"""Return the SO2 (sulphur dioxide) level."""
return None
@property
def nitrogen_oxide(self):
"""Return the N2O (nitrogen oxide) level."""
return None
@property
def nitrogen_monoxide(self):
"""Return the NO (nitrogen monoxide) level."""
return None
@property
def nitrogen_dioxide(self):
"""Return the NO2 (nitrogen dioxide) level."""
return None
@property
def state_attributes(self):
"""Return the state attributes."""
data = {}
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value is not None:
data[attr] = value
return data
@property
def state(self):
"""Return the current state."""
return self.particulate_matter_2_5
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
|
import gevent
from gevent.monkey import patch_all
patch_all()
from gevent.pywsgi import WSGIServer
import locale
import argparse
import logging
import socket
import urllib
import urllib2
from logging import getLogger
from flask import Flask
import zerorpc
from psdash import __version__
from psdash.node import LocalNode, RemoteNode
from psdash.web import fromtimestamp
logger = getLogger('psdash.run')
class PsDashRunner(object):
DEFAULT_LOG_INTERVAL = 60
DEFAULT_NET_IO_COUNTER_INTERVAL = 3
DEFAULT_REGISTER_INTERVAL = 60
DEFAULT_BIND_HOST = '0.0.0.0'
DEFAULT_PORT = 5000
LOCAL_NODE = 'localhost'
@classmethod
def create_from_cli_args(cls):
return cls(args=None)
def __init__(self, config_overrides=None, args=tuple()):
self._nodes = {}
config = self._load_args_config(args)
if config_overrides:
config.update(config_overrides)
self.app = self._create_app(config)
self._setup_nodes()
self._setup_logging()
self._setup_context()
def _get_args(cls, args):
parser = argparse.ArgumentParser(
description='psdash %s - system information web dashboard' % __version__
)
parser.add_argument(
'-l', '--log',
action='append',
dest='logs',
default=None,
metavar='path',
help='log files to make available for psdash. Patterns (e.g. /var/log/**/*.log) are supported. '
'This option can be used multiple times.'
)
parser.add_argument(
'-b', '--bind',
action='store',
dest='bind_host',
default=None,
metavar='host',
help='host to bind to. Defaults to 0.0.0.0 (all interfaces).'
)
parser.add_argument(
'-p', '--port',
action='store',
type=int,
dest='port',
default=None,
metavar='port',
help='port to listen on. Defaults to 5000.'
)
parser.add_argument(
'-d', '--debug',
action='store_true',
dest='debug',
help='enables debug mode.'
)
parser.add_argument(
'-a', '--agent',
action='store_true',
dest='agent',
help='Enables agent mode. This launches a RPC server, using zerorpc, on given bind host and port.'
)
parser.add_argument(
'--register-to',
action='store',
dest='register_to',
default=None,
metavar='host:port',
help='The psdash node running in web mode to register this agent to on start up. e.g 10.0.1.22:5000'
)
parser.add_argument(
'--register-as',
action='store',
dest='register_as',
default=None,
metavar='name',
help='The name to register as. (This will default to the node\'s hostname)'
)
return parser.parse_args(args)
def _load_args_config(self, args):
config = {}
for k, v in vars(self._get_args(args)).iteritems():
if v:
key = 'PSDASH_%s' % k.upper() if k != 'debug' else 'DEBUG'
config[key] = v
return config
def _setup_nodes(self):
self.add_node(LocalNode())
nodes = self.app.config.get('PSDASH_NODES', [])
logger.info("Registering %d nodes", len(nodes))
for n in nodes:
self.register_node(n['name'], n['host'], int(n['port']))
def add_node(self, node):
self._nodes[node.get_id()] = node
def get_local_node(self):
return self._nodes.get(self.LOCAL_NODE)
def get_node(self, name):
return self._nodes.get(name)
def get_nodes(self):
return self._nodes
def register_node(self, name, host, port):
n = RemoteNode(name, host, port)
node = self.get_node(n.get_id())
if node:
n = node
logger.debug("Updating registered node %s", n.get_id())
else:
logger.info("Registering %s", n.get_id())
n.update_last_registered()
self.add_node(n)
return n
def _create_app(self, config=None):
app = Flask(__name__)
app.psdash = self
app.config.from_envvar('PSDASH_CONFIG', silent=True)
if config and isinstance(config, dict):
app.config.update(config)
self._load_allowed_remote_addresses(app)
# If the secret key is not read from the config just set it to something.
if not app.secret_key:
app.secret_key = 'whatisthissourcery'
app.add_template_filter(fromtimestamp)
from psdash.web import webapp
prefix = app.config.get('PSDASH_URL_PREFIX')
if prefix:
prefix = '/' + prefix.strip('/')
webapp.url_prefix = prefix
app.register_blueprint(webapp)
return app
def _load_allowed_remote_addresses(self, app):
key = 'PSDASH_ALLOWED_REMOTE_ADDRESSES'
addrs = app.config.get(key)
if not addrs:
return
if isinstance(addrs, (str, unicode)):
app.config[key] = [a.strip() for a in addrs.split(',')]
def _setup_logging(self):
level = self.app.config.get('PSDASH_LOG_LEVEL', logging.INFO) if not self.app.debug else logging.DEBUG
format = self.app.config.get('PSDASH_LOG_FORMAT', '%(levelname)s | %(name)s | %(message)s')
logging.basicConfig(
level=level,
format=format
)
logging.getLogger('werkzeug').setLevel(logging.WARNING if not self.app.debug else logging.DEBUG)
def _setup_workers(self):
net_io_interval = self.app.config.get('PSDASH_NET_IO_COUNTER_INTERVAL', self.DEFAULT_NET_IO_COUNTER_INTERVAL)
gevent.spawn_later(net_io_interval, self._net_io_counters_worker, net_io_interval)
if 'PSDASH_LOGS' in self.app.config:
logs_interval = self.app.config.get('PSDASH_LOGS_INTERVAL', self.DEFAULT_LOG_INTERVAL)
gevent.spawn_later(logs_interval, self._logs_worker, logs_interval)
if self.app.config.get('PSDASH_AGENT'):
register_interval = self.app.config.get('PSDASH_REGISTER_INTERVAL', self.DEFAULT_REGISTER_INTERVAL)
gevent.spawn_later(register_interval, self._register_agent_worker, register_interval)
def _setup_locale(self):
# This set locale to the user default (usually controlled by the LANG env var)
locale.setlocale(locale.LC_ALL, '')
def _setup_context(self):
self.get_local_node().net_io_counters.update()
if 'PSDASH_LOGS' in self.app.config:
self.get_local_node().logs.add_patterns(self.app.config['PSDASH_LOGS'])
def _logs_worker(self, sleep_interval):
while True:
logger.debug("Reloading logs...")
self.get_local_node().logs.add_patterns(self.app.config['PSDASH_LOGS'])
gevent.sleep(sleep_interval)
def _register_agent_worker(self, sleep_interval):
while True:
logger.debug("Registering agent...")
self._register_agent()
gevent.sleep(sleep_interval)
def _net_io_counters_worker(self, sleep_interval):
while True:
logger.debug("Updating net io counters...")
self.get_local_node().net_io_counters.update()
gevent.sleep(sleep_interval)
def _register_agent(self):
register_name = self.app.config.get('PSDASH_REGISTER_AS')
if not register_name:
register_name = socket.gethostname()
url_args = {
'name': register_name,
'port': self.app.config.get('PSDASH_PORT', self.DEFAULT_PORT),
}
register_url = '%s/register?%s' % (self.app.config['PSDASH_REGISTER_TO'], urllib.urlencode(url_args))
if 'PSDASH_AUTH_USERNAME' in self.app.config and 'PSDASH_AUTH_PASSWORD' in self.app.config:
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='psDash login required',
uri=register_url,
user=self.app.config['PSDASH_AUTH_USERNAME'],
passwd=self.app.config['PSDASH_AUTH_PASSWORD']
)
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
try:
urllib2.urlopen(register_url)
except urllib2.HTTPError as e:
logger.error('Failed to register agent to "%s": %s', register_url, e)
def _run_rpc(self):
logger.info("Starting RPC server (agent mode)")
if 'PSDASH_REGISTER_TO' in self.app.config:
self._register_agent()
service = self.get_local_node().get_service()
self.server = zerorpc.Server(service)
self.server.bind('tcp://%s:%s' % (self.app.config.get('PSDASH_BIND_HOST', self.DEFAULT_BIND_HOST),
self.app.config.get('PSDASH_PORT', self.DEFAULT_PORT)))
self.server.run()
def _run_web(self):
logger.info("Starting web server")
log = 'default' if self.app.debug else None
ssl_args = {}
if self.app.config.get('PSDASH_HTTPS_KEYFILE') and self.app.config.get('PSDASH_HTTPS_CERTFILE'):
ssl_args = {
'keyfile': self.app.config.get('PSDASH_HTTPS_KEYFILE'),
'certfile': self.app.config.get('PSDASH_HTTPS_CERTFILE')
}
listen_to = (
self.app.config.get('PSDASH_BIND_HOST', self.DEFAULT_BIND_HOST),
self.app.config.get('PSDASH_PORT', self.DEFAULT_PORT)
)
self.server = WSGIServer(
listen_to,
application=self.app,
log=log,
**ssl_args
)
self.server.serve_forever()
def run(self):
logger.info('Starting psdash v%s' % __version__)
self._setup_locale()
self._setup_workers()
logger.info('Listening on %s:%s',
self.app.config.get('PSDASH_BIND_HOST', self.DEFAULT_BIND_HOST),
self.app.config.get('PSDASH_PORT', self.DEFAULT_PORT))
if self.app.config.get('PSDASH_AGENT'):
return self._run_rpc()
else:
return self._run_web()
def main():
r = PsDashRunner.create_from_cli_args()
r.run()
if __name__ == '__main__':
main()
|
from datetime import datetime, timedelta
import pytest
from six import text_type as str
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
from subliminal.utils import sanitize, timestamp
from subliminal.video import Episode, Movie, Video
def test_video_exists_age(movies, tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
video = movies['man_of_steel']
tmpdir.ensure(video.name).setmtime(timestamp(datetime.utcnow() - timedelta(days=3)))
assert video.exists
assert timedelta(days=3) < video.age < timedelta(days=3, seconds=1)
def test_video_age(movies):
assert movies['man_of_steel'].age == timedelta()
def test_video_fromguess_episode(episodes, monkeypatch):
guess = {'type': 'episode'}
monkeypatch.setattr(Episode, 'fromguess', Mock())
Video.fromguess(episodes['bbt_s07e05'].name, guess)
assert Episode.fromguess.called
def test_video_fromguess_movie(movies, monkeypatch):
guess = {'type': 'movie'}
monkeypatch.setattr(Movie, 'fromguess', Mock())
Video.fromguess(movies['man_of_steel'].name, guess)
assert Movie.fromguess.called
def test_video_fromguess_wrong_type(episodes):
guess = {'type': 'subtitle'}
with pytest.raises(ValueError) as excinfo:
Video.fromguess(episodes['bbt_s07e05'].name, guess)
assert str(excinfo.value) == 'The guess must be an episode or a movie guess'
def test_video_fromname_movie(movies):
video = Video.fromname(movies['man_of_steel'].name)
assert type(video) is Movie
assert video.name == movies['man_of_steel'].name
assert video.source == movies['man_of_steel'].source
assert video.release_group == movies['man_of_steel'].release_group
assert video.resolution == movies['man_of_steel'].resolution
assert video.video_codec == movies['man_of_steel'].video_codec
assert video.audio_codec is None
assert video.imdb_id is None
assert video.hashes == {}
assert video.size is None
assert video.subtitle_languages == set()
assert video.title == movies['man_of_steel'].title
assert video.year == movies['man_of_steel'].year
def test_video_fromname_episode(episodes):
video = Video.fromname(episodes['bbt_s07e05'].name)
assert type(video) is Episode
assert video.name == episodes['bbt_s07e05'].name
assert video.source == episodes['bbt_s07e05'].source
assert video.release_group == episodes['bbt_s07e05'].release_group
assert video.resolution == episodes['bbt_s07e05'].resolution
assert video.video_codec == episodes['bbt_s07e05'].video_codec
assert video.audio_codec is None
assert video.imdb_id is None
assert video.hashes == {}
assert video.size is None
assert video.subtitle_languages == set()
assert video.series == episodes['bbt_s07e05'].series
assert video.season == episodes['bbt_s07e05'].season
assert video.episode == episodes['bbt_s07e05'].episode
assert video.title is None
assert video.year is None
assert video.tvdb_id is None
def test_video_fromname_episode_no_season(episodes):
video = Video.fromname(episodes['the_jinx_e05'].name)
assert type(video) is Episode
assert video.name == episodes['the_jinx_e05'].name
assert video.source == episodes['the_jinx_e05'].source
assert video.release_group == episodes['the_jinx_e05'].release_group
assert video.resolution == episodes['the_jinx_e05'].resolution
assert video.video_codec == episodes['the_jinx_e05'].video_codec
assert video.audio_codec is None
assert video.imdb_id is None
assert video.hashes == {}
assert video.size is None
assert video.subtitle_languages == set()
assert sanitize(video.series) == sanitize(episodes['the_jinx_e05'].series)
assert video.season == episodes['the_jinx_e05'].season
assert video.episode == episodes['the_jinx_e05'].episode
assert video.title is None
assert video.year is None
assert video.tvdb_id is None
def test_video_hash(episodes):
video = episodes['bbt_s07e05']
assert hash(video) == hash(video.name)
def test_episode_from_guess_multi_episode(episodes):
video = Video.fromname(episodes['Marvels.Agents.of.S.H.I.E.L.D.S05E01-E02'].name)
# Multi-ep is converted to single-ep by taking the lowest episode number
assert video.episode == episodes['Marvels.Agents.of.S.H.I.E.L.D.S05E01-E02'].episode
def test_episode_fromguess_wrong_type(episodes):
guess = {'type': 'subtitle'}
with pytest.raises(ValueError) as excinfo:
Episode.fromguess(episodes['bbt_s07e05'].name, guess)
assert str(excinfo.value) == 'The guess must be an episode guess'
def test_episode_fromguess_insufficient_data(episodes):
guess = {'type': 'episode'}
with pytest.raises(ValueError) as excinfo:
Episode.fromguess(episodes['bbt_s07e05'].name, guess)
assert str(excinfo.value) == 'Insufficient data to process the guess'
def test_movie_fromguess_wrong_type(movies):
guess = {'type': 'subtitle'}
with pytest.raises(ValueError) as excinfo:
Movie.fromguess(movies['man_of_steel'].name, guess)
assert str(excinfo.value) == 'The guess must be a movie guess'
def test_movie_fromguess_insufficient_data(movies):
guess = {'type': 'movie'}
with pytest.raises(ValueError) as excinfo:
Movie.fromguess(movies['man_of_steel'].name, guess)
assert str(excinfo.value) == 'Insufficient data to process the guess'
def test_movie_fromname(movies):
video = Movie.fromname(movies['man_of_steel'].name)
assert video.name == movies['man_of_steel'].name
assert video.source == movies['man_of_steel'].source
assert video.release_group == movies['man_of_steel'].release_group
assert video.resolution == movies['man_of_steel'].resolution
assert video.video_codec == movies['man_of_steel'].video_codec
assert video.audio_codec is None
assert video.imdb_id is None
assert video.hashes == {}
assert video.size is None
assert video.subtitle_languages == set()
assert video.title == movies['man_of_steel'].title
assert video.year == movies['man_of_steel'].year
def test_episode_fromname(episodes):
video = Episode.fromname(episodes['bbt_s07e05'].name)
assert video.name == episodes['bbt_s07e05'].name
assert video.source == episodes['bbt_s07e05'].source
assert video.release_group == episodes['bbt_s07e05'].release_group
assert video.resolution == episodes['bbt_s07e05'].resolution
assert video.video_codec == episodes['bbt_s07e05'].video_codec
assert video.audio_codec is None
assert video.imdb_id is None
assert video.hashes == {}
assert video.size is None
assert video.subtitle_languages == set()
assert video.series == episodes['bbt_s07e05'].series
assert video.season == episodes['bbt_s07e05'].season
assert video.episode == episodes['bbt_s07e05'].episode
assert video.title is None
assert video.year is None
assert video.tvdb_id is None
|
import asyncio
import logging
from typing import Any, Dict, Iterable, List, Optional
from homeassistant import config as conf_util
from homeassistant.const import SERVICE_RELOAD
from homeassistant.core import Event, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform
from homeassistant.helpers.entity_platform import EntityPlatform, async_get_platforms
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
_LOGGER = logging.getLogger(__name__)
async def async_reload_integration_platforms(
hass: HomeAssistantType, integration_name: str, integration_platforms: Iterable
) -> None:
"""Reload an integration's platforms.
The platform must support being re-setup.
This functionality is only intended to be used for integrations that process
Home Assistant data and make this available to other integrations.
Examples are template, stats, derivative, utility meter.
"""
try:
unprocessed_conf = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
tasks = [
_resetup_platform(
hass, integration_name, integration_platform, unprocessed_conf
)
for integration_platform in integration_platforms
]
await asyncio.gather(*tasks)
async def _resetup_platform(
hass: HomeAssistantType,
integration_name: str,
integration_platform: str,
unprocessed_conf: Dict,
) -> None:
"""Resetup a platform."""
integration = await async_get_integration(hass, integration_platform)
conf = await conf_util.async_process_component_config(
hass, unprocessed_conf, integration
)
if not conf:
return
root_config: Dict = {integration_platform: []}
# Extract only the config for template, ignore the rest.
for p_type, p_config in config_per_platform(conf, integration_platform):
if p_type != integration_name:
continue
root_config[integration_platform].append(p_config)
component = integration.get_component()
if hasattr(component, "async_reset_platform"):
# If the integration has its own way to reset
# use this method.
await component.async_reset_platform(hass, integration_name) # type: ignore
await component.async_setup(hass, root_config) # type: ignore
return
# If its an entity platform, we use the entity_platform
# async_reset method
platform = async_get_platform_without_config_entry(
hass, integration_name, integration_platform
)
if platform:
await _async_reconfig_platform(platform, root_config[integration_platform])
return
if not root_config[integration_platform]:
# No config for this platform
# and its not loaded. Nothing to do
return
await _async_setup_platform(
hass, integration_name, integration_platform, root_config[integration_platform]
)
async def _async_setup_platform(
hass: HomeAssistantType,
integration_name: str,
integration_platform: str,
platform_configs: List[Dict],
) -> None:
"""Platform for the first time when new configuration is added."""
if integration_platform not in hass.data:
await async_setup_component(
hass, integration_platform, {integration_platform: platform_configs}
)
return
entity_component = hass.data[integration_platform]
tasks = [
entity_component.async_setup_platform(integration_name, p_config)
for p_config in platform_configs
]
await asyncio.gather(*tasks)
async def _async_reconfig_platform(
platform: EntityPlatform, platform_configs: List[Dict]
) -> None:
"""Reconfigure an already loaded platform."""
await platform.async_reset()
tasks = [platform.async_setup(p_config) for p_config in platform_configs] # type: ignore
await asyncio.gather(*tasks)
async def async_integration_yaml_config(
hass: HomeAssistantType, integration_name: str
) -> Optional[Dict[Any, Any]]:
"""Fetch the latest yaml configuration for an integration."""
integration = await async_get_integration(hass, integration_name)
return await conf_util.async_process_component_config(
hass, await conf_util.async_hass_config_yaml(hass), integration
)
@callback
def async_get_platform_without_config_entry(
hass: HomeAssistantType, integration_name: str, integration_platform_name: str
) -> Optional[EntityPlatform]:
"""Find an existing platform that is not a config entry."""
for integration_platform in async_get_platforms(hass, integration_name):
if integration_platform.config_entry is not None:
continue
if integration_platform.domain == integration_platform_name:
platform: EntityPlatform = integration_platform
return platform
return None
async def async_setup_reload_service(
hass: HomeAssistantType, domain: str, platforms: Iterable
) -> None:
"""Create the reload service for the domain."""
if hass.services.has_service(domain, SERVICE_RELOAD):
return
async def _reload_config(call: Event) -> None:
"""Reload the platforms."""
await async_reload_integration_platforms(hass, domain, platforms)
hass.bus.async_fire(f"event_{domain}_reloaded", context=call.context)
hass.helpers.service.async_register_admin_service(
domain, SERVICE_RELOAD, _reload_config
)
def setup_reload_service(
hass: HomeAssistantType, domain: str, platforms: Iterable
) -> None:
"""Sync version of async_setup_reload_service."""
asyncio.run_coroutine_threadsafe(
async_setup_reload_service(hass, domain, platforms),
hass.loop,
).result()
|
revision = "7ead443ba911"
down_revision = "6006c79b6011"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column("certificates", sa.Column("csr", sa.TEXT(), nullable=True))
def downgrade():
op.drop_column("certificates", "csr")
|
from logilab.common.registry import yes, RegistrableObject, RegistrableInstance
class Proxy(object):
"""annoying object should that not be registered, nor cause error"""
def __getattr__(self, attr):
return 1
trap = Proxy()
class AppObjectClass(RegistrableObject):
__registry__ = 'zereg'
__regid__ = 'appobject1'
__select__ = yes()
class AppObjectInstance(RegistrableInstance):
__registry__ = 'zereg'
__select__ = yes()
def __init__(self, regid):
self.__regid__ = regid
appobject2 = AppObjectInstance('appobject2')
|
import os.path as op
import os
import shutil
import numpy as np
from numpy.testing import assert_array_equal, assert_allclose, assert_equal
import pytest
from scipy import io as sio
from mne import find_events, pick_types
from mne.io import read_raw_egi, read_evokeds_mff
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.egi.egi import _combine_triggers
from mne.utils import run_tests_if_main, requires_version, object_diff
from mne.datasets.testing import data_path, requires_testing_data
base_dir = op.join(op.dirname(op.abspath(__file__)), 'data')
egi_fname = op.join(base_dir, 'test_egi.raw')
egi_txt_fname = op.join(base_dir, 'test_egi.txt')
egi_path = op.join(data_path(download=False), 'EGI')
egi_mff_fname = op.join(egi_path, 'test_egi.mff')
egi_mff_pns_fname = op.join(egi_path, 'test_egi_pns.mff')
egi_pause_fname = op.join(egi_path, 'test_egi_multiepoch_paused.mff')
egi_eprime_pause_fname = op.join(egi_path, 'test_egi_multiepoch_eprime.mff')
egi_pause_w1337_fname = op.join(egi_path, 'w1337_20191014_105416.mff')
egi_mff_evoked_fname = op.join(egi_path, 'test_egi_evoked.mff')
egi_txt_evoked_cat1_fname = op.join(egi_path, 'test_egi_evoked_cat1.txt')
egi_txt_evoked_cat2_fname = op.join(egi_path, 'test_egi_evoked_cat2.txt')
# absolute event times from NetStation
egi_pause_events = {'AM40': [7.224, 11.928, 14.413, 16.848],
'bgin': [6.121, 8.434, 13.369, 15.815, 18.094],
'FIX+': [6.225, 10.929, 13.414, 15.849],
'ITI+': [8.293, 12.997, 15.482, 17.918]}
# absolute epoch times
egi_pause_skips = [(1304000.0, 1772000.0), (8660000.0, 12296000.0)]
egi_eprime_pause_events = {'AM40': [6.049, 8.434, 10.936, 13.321],
'bgin': [4.902, 7.381, 9.901, 12.268, 14.619],
'FIX+': [5.050, 7.435, 9.937, 12.322],
'ITI+': [7.185, 9.503, 12.005, 14.391]}
egi_eprime_pause_skips = [(1344000.0, 1804000.0)]
egi_pause_w1337_events = None
egi_pause_w1337_skips = [(21956000.0, 40444000.0), (60936000.0, 89332000.0)]
@requires_testing_data
@pytest.mark.parametrize('fname, skip_times, event_times', [
(egi_pause_fname, egi_pause_skips, egi_pause_events),
(egi_eprime_pause_fname, egi_eprime_pause_skips, egi_eprime_pause_events),
(egi_pause_w1337_fname, egi_pause_w1337_skips, egi_pause_w1337_events),
])
def test_egi_mff_pause(fname, skip_times, event_times):
"""Test EGI MFF with pauses."""
if fname == egi_pause_w1337_fname:
# too slow to _test_raw_reader
raw = read_raw_egi(fname).load_data()
else:
with pytest.warns(RuntimeWarning, match='Acquisition skips detected'):
raw = _test_raw_reader(read_raw_egi, input_fname=fname,
test_scaling=False, # XXX probably some bug
test_rank='less',
)
assert raw.info['sfreq'] == 250. # true for all of these files
assert len(raw.annotations) == len(skip_times)
# assert event onsets match expected times
if event_times is None:
with pytest.raises(ValueError, match='Consider using .*events_from'):
find_events(raw)
else:
events = find_events(raw)
for event_type in event_times.keys():
ns_samples = np.floor(np.array(event_times[event_type]) *
raw.info['sfreq'])
assert_array_equal(
events[events[:, 2] == raw.event_id[event_type], 0],
ns_samples)
# read some data from the middle of the skip, assert it's all zeros
stim_picks = pick_types(raw.info, meg=False, stim=True, exclude=())
other_picks = np.setdiff1d(np.arange(len(raw.ch_names)), stim_picks)
for ii, annot in enumerate(raw.annotations):
assert annot['description'] == 'BAD_ACQ_SKIP'
start, stop = raw.time_as_index(
[annot['onset'], annot['onset'] + annot['duration']])
data, _ = raw[:, start:stop]
assert_array_equal(data[other_picks], 0.)
if event_times is not None:
assert raw.ch_names[-1] == 'STI 014'
assert not np.array_equal(data[stim_picks], 0.)
# assert skips match expected onset and duration
skip = ((start + 1) / raw.info['sfreq'] * 1e6,
(stop + 1) / raw.info['sfreq'] * 1e6)
assert skip == skip_times[ii]
@requires_testing_data
def test_io_egi_mff():
"""Test importing EGI MFF simple binary files."""
raw = read_raw_egi(egi_mff_fname, include=None)
assert ('RawMff' in repr(raw))
include = ['DIN1', 'DIN2', 'DIN3', 'DIN4', 'DIN5', 'DIN7']
raw = _test_raw_reader(read_raw_egi, input_fname=egi_mff_fname,
include=include, channel_naming='EEG %03d',
test_scaling=False, # XXX probably some bug
)
assert raw.info['sfreq'] == 1000.
assert_equal('eeg' in raw, True)
eeg_chan = [c for c in raw.ch_names if 'EEG' in c]
assert_equal(len(eeg_chan), 129)
picks = pick_types(raw.info, eeg=True)
assert_equal(len(picks), 129)
assert_equal('STI 014' in raw.ch_names, True)
events = find_events(raw, stim_channel='STI 014')
assert_equal(len(events), 8)
assert_equal(np.unique(events[:, 1])[0], 0)
assert (np.unique(events[:, 0])[0] != 0)
assert (np.unique(events[:, 2])[0] != 0)
pytest.raises(ValueError, read_raw_egi, egi_mff_fname, include=['Foo'],
preload=False)
pytest.raises(ValueError, read_raw_egi, egi_mff_fname, exclude=['Bar'],
preload=False)
for ii, k in enumerate(include, 1):
assert (k in raw.event_id)
assert (raw.event_id[k] == ii)
def test_io_egi():
"""Test importing EGI simple binary files."""
# test default
with open(egi_txt_fname) as fid:
data = np.loadtxt(fid)
t = data[0]
data = data[1:]
data *= 1e-6 # µV
with pytest.warns(RuntimeWarning, match='Did not find any event code'):
raw = read_raw_egi(egi_fname, include=None)
assert 'RawEGI' in repr(raw)
data_read, t_read = raw[:256]
assert_allclose(t_read, t)
assert_allclose(data_read, data, atol=1e-10)
include = ['TRSP', 'XXX1']
raw = _test_raw_reader(read_raw_egi, input_fname=egi_fname,
include=include, test_rank='less',
test_scaling=False, # XXX probably some bug
)
assert_equal('eeg' in raw, True)
eeg_chan = [c for c in raw.ch_names if c.startswith('E')]
assert_equal(len(eeg_chan), 256)
picks = pick_types(raw.info, eeg=True)
assert_equal(len(picks), 256)
assert_equal('STI 014' in raw.ch_names, True)
events = find_events(raw, stim_channel='STI 014')
assert_equal(len(events), 2) # ground truth
assert_equal(np.unique(events[:, 1])[0], 0)
assert (np.unique(events[:, 0])[0] != 0)
assert (np.unique(events[:, 2])[0] != 0)
triggers = np.array([[0, 1, 1, 0], [0, 0, 1, 0]])
# test trigger functionality
triggers = np.array([[0, 1, 0, 0], [0, 0, 1, 0]])
events_ids = [12, 24]
new_trigger = _combine_triggers(triggers, events_ids)
assert_array_equal(np.unique(new_trigger), np.unique([0, 12, 24]))
pytest.raises(ValueError, read_raw_egi, egi_fname, include=['Foo'],
preload=False)
pytest.raises(ValueError, read_raw_egi, egi_fname, exclude=['Bar'],
preload=False)
for ii, k in enumerate(include, 1):
assert (k in raw.event_id)
assert (raw.event_id[k] == ii)
@requires_testing_data
def test_io_egi_pns_mff(tmpdir):
"""Test importing EGI MFF with PNS data."""
raw = read_raw_egi(egi_mff_pns_fname, include=None, preload=True,
verbose='error')
assert ('RawMff' in repr(raw))
pns_chans = pick_types(raw.info, ecg=True, bio=True, emg=True)
assert_equal(len(pns_chans), 7)
names = [raw.ch_names[x] for x in pns_chans]
pns_names = ['Resp. Temperature'[:15],
'Resp. Pressure',
'ECG',
'Body Position',
'Resp. Effort Chest'[:15],
'Resp. Effort Abdomen'[:15],
'EMG-Leg']
_test_raw_reader(read_raw_egi, input_fname=egi_mff_pns_fname,
channel_naming='EEG %03d', verbose='error',
test_rank='less',
test_scaling=False, # XXX probably some bug
)
assert_equal(names, pns_names)
mat_names = [
'Resp_Temperature'[:15],
'Resp_Pressure',
'ECG',
'Body_Position',
'Resp_Effort_Chest'[:15],
'Resp_Effort_Abdomen'[:15],
'EMGLeg'
]
egi_fname_mat = op.join(data_path(), 'EGI', 'test_egi_pns.mat')
mc = sio.loadmat(egi_fname_mat)
for ch_name, ch_idx, mat_name in zip(pns_names, pns_chans, mat_names):
print('Testing {}'.format(ch_name))
mc_key = [x for x in mc.keys() if mat_name in x][0]
cal = raw.info['chs'][ch_idx]['cal']
mat_data = mc[mc_key] * cal
raw_data = raw[ch_idx][0]
assert_array_equal(mat_data, raw_data)
# EEG missing
new_mff = str(tmpdir.join('temp.mff'))
shutil.copytree(egi_mff_pns_fname, new_mff)
read_raw_egi(new_mff, verbose='error')
os.remove(op.join(new_mff, 'info1.xml'))
os.remove(op.join(new_mff, 'signal1.bin'))
with pytest.raises(FileNotFoundError, match='Could not find any EEG'):
read_raw_egi(new_mff, verbose='error')
@requires_testing_data
@pytest.mark.parametrize('preload', (True, False))
def test_io_egi_pns_mff_bug(preload):
"""Test importing EGI MFF with PNS data (BUG)."""
egi_fname_mff = op.join(data_path(), 'EGI', 'test_egi_pns_bug.mff')
with pytest.warns(RuntimeWarning, match='EGI PSG sample bug'):
raw = read_raw_egi(egi_fname_mff, include=None, preload=preload,
verbose='warning')
assert len(raw.annotations) == 1
assert_allclose(raw.annotations.duration, [0.004])
assert_allclose(raw.annotations.onset, [13.948])
egi_fname_mat = op.join(data_path(), 'EGI', 'test_egi_pns.mat')
mc = sio.loadmat(egi_fname_mat)
pns_chans = pick_types(raw.info, ecg=True, bio=True, emg=True)
pns_names = ['Resp. Temperature'[:15],
'Resp. Pressure',
'ECG',
'Body Position',
'Resp. Effort Chest'[:15],
'Resp. Effort Abdomen'[:15],
'EMG-Leg']
mat_names = [
'Resp_Temperature'[:15],
'Resp_Pressure',
'ECG',
'Body_Position',
'Resp_Effort_Chest'[:15],
'Resp_Effort_Abdomen'[:15],
'EMGLeg'
]
for ch_name, ch_idx, mat_name in zip(pns_names, pns_chans, mat_names):
print('Testing {}'.format(ch_name))
mc_key = [x for x in mc.keys() if mat_name in x][0]
cal = raw.info['chs'][ch_idx]['cal']
mat_data = mc[mc_key] * cal
mat_data[:, -1] = 0 # The MFF has one less sample, the last one
raw_data = raw[ch_idx][0]
assert_array_equal(mat_data, raw_data)
@requires_testing_data
def test_io_egi_crop_no_preload():
"""Test crop non-preloaded EGI MFF data (BUG)."""
egi_fname_mff = op.join(data_path(), 'EGI', 'test_egi.mff')
raw = read_raw_egi(egi_fname_mff, preload=False)
raw.crop(17.5, 20.5)
raw.load_data()
raw_preload = read_raw_egi(egi_fname_mff, preload=True)
raw_preload.crop(17.5, 20.5)
raw_preload.load_data()
assert_allclose(raw._data, raw_preload._data)
@requires_version('mffpy', '0.5.7')
@requires_testing_data
@pytest.mark.parametrize('idx, cond, tmax, signals, bads', [
(0, 'Category 1', 0.016, egi_txt_evoked_cat1_fname,
['E8', 'E11', 'E17', 'E28', 'ECG']),
(1, 'Category 2', 0.0, egi_txt_evoked_cat2_fname,
['E257', 'EMG'])
])
def test_io_egi_evokeds_mff(idx, cond, tmax, signals, bads):
"""Test reading evoked MFF file."""
# Test reading all conditions from evokeds
evokeds = read_evokeds_mff(egi_mff_evoked_fname)
assert len(evokeds) == 2
# Test reading list of conditions from evokeds
evokeds = read_evokeds_mff(egi_mff_evoked_fname, condition=[0, 1])
assert len(evokeds) == 2
# Test invalid condition
with pytest.raises(ValueError) as exc_info:
read_evokeds_mff(egi_mff_evoked_fname, condition='Invalid Condition')
message = "Invalid value for the 'condition' parameter provided as " \
"category name. Allowed values are 'Category 1', and " \
"'Category 2', but got 'Invalid Condition' instead."
assert str(exc_info.value) == message
with pytest.raises(ValueError) as exc_info:
read_evokeds_mff(egi_mff_evoked_fname, condition=2)
message = '"condition" parameter (2), provided as epoch index, ' \
'is out of range for available epochs (2).'
assert str(exc_info.value) == message
with pytest.raises(TypeError) as exc_info:
read_evokeds_mff(egi_mff_evoked_fname, condition=1.2)
message = '"condition" parameter must be either int or str.'
assert str(exc_info.value) == message
# Test reading evoked data from single condition
evoked_cond = read_evokeds_mff(egi_mff_evoked_fname, condition=cond)
evoked_idx = read_evokeds_mff(egi_mff_evoked_fname, condition=idx)
for evoked in [evoked_cond, evoked_idx]:
assert evoked.comment == cond
assert evoked.nave == 3
assert evoked.tmin == 0.0
assert evoked.tmax == tmax
# Check signal data
data = np.loadtxt(signals, ndmin=2).T * 1e-6 # convert to volts
assert_allclose(evoked_cond.data, data, atol=1e-12)
assert_allclose(evoked_idx.data, data, atol=1e-12)
# Check info
assert object_diff(evoked_cond.info, evoked_idx.info) == ''
assert evoked_cond.info['description'] == cond
assert evoked_cond.info['bads'] == bads
assert len(evoked_cond.info['ch_names']) == 259
assert 'ECG' in evoked_cond.info['ch_names']
assert 'EMG' in evoked_cond.info['ch_names']
assert 'ecg' in evoked_cond
assert 'emg' in evoked_cond
pick_eeg = pick_types(evoked_cond.info, eeg=True, exclude=[])
assert len(pick_eeg) == 257
assert evoked_cond.info['nchan'] == 259
assert evoked_cond.info['sfreq'] == 250.0
assert not evoked_cond.info['custom_ref_applied']
@requires_version('mffpy', '0.5.7')
@requires_testing_data
def test_read_evokeds_mff_bad_input():
"""Test errors are thrown when reading invalid input file."""
# Test file that is not an MFF
with pytest.raises(ValueError) as exc_info:
read_evokeds_mff(egi_fname)
message = 'fname must be an MFF file with extension ".mff".'
assert str(exc_info.value) == message
# Test continuous MFF
with pytest.raises(ValueError) as exc_info:
read_evokeds_mff(egi_mff_fname)
message = f'{egi_mff_fname} is a continuous MFF file. ' \
'fname must be the path to an averaged MFF file.'
assert str(exc_info.value) == message
run_tests_if_main()
|
import logging
from fritzconnection.core import exceptions as fritzexceptions
from fritzconnection.lib.fritzhosts import FritzHosts
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DEFAULT_IP = "169.254.1.1" # This IP is valid for all FRITZ!Box routers.
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=CONF_DEFAULT_IP): cv.string,
vol.Optional(CONF_PASSWORD, default="admin"): cv.string,
vol.Optional(CONF_USERNAME, default=""): cv.string,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return FritzBoxScanner."""
scanner = FritzBoxScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class FritzBoxScanner(DeviceScanner):
"""This class queries a FRITZ!Box router."""
def __init__(self, config):
"""Initialize the scanner."""
self.last_results = []
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.success_init = True
# Establish a connection to the FRITZ!Box.
try:
self.fritz_box = FritzHosts(
address=self.host, user=self.username, password=self.password
)
except (ValueError, TypeError):
self.fritz_box = None
# At this point it is difficult to tell if a connection is established.
# So just check for null objects.
if self.fritz_box is None or not self.fritz_box.modelname:
self.success_init = False
if self.success_init:
_LOGGER.info("Successfully connected to %s", self.fritz_box.modelname)
self._update_info()
else:
_LOGGER.error(
"Failed to establish connection to FRITZ!Box with IP: %s", self.host
)
def scan_devices(self):
"""Scan for new devices and return a list of found device ids."""
self._update_info()
active_hosts = []
for known_host in self.last_results:
if known_host["status"] and known_host.get("mac"):
active_hosts.append(known_host["mac"])
return active_hosts
def get_device_name(self, device):
"""Return the name of the given device or None if is not known."""
ret = self.fritz_box.get_specific_host_entry(device).get("NewHostName")
if ret == {}:
return None
return ret
def get_extra_attributes(self, device):
"""Return the attributes (ip, mac) of the given device or None if is not known."""
ip_device = None
try:
ip_device = self.fritz_box.get_specific_host_entry(device).get(
"NewIPAddress"
)
except fritzexceptions.FritzLookUpError as fritz_lookup_error:
_LOGGER.warning(
"Host entry for %s not found: %s", device, fritz_lookup_error
)
if not ip_device:
return {}
return {"ip": ip_device, "mac": device}
def _update_info(self):
"""Retrieve latest information from the FRITZ!Box."""
if not self.success_init:
return False
_LOGGER.debug("Scanning")
self.last_results = self.fritz_box.get_hosts_info()
return True
|
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.test import TestCase
from zinnia.managers import PUBLISHED
from zinnia.models.author import Author
from zinnia.models.entry import Entry
from zinnia.signals import disconnect_entry_signals
from zinnia.tests.utils import skip_if_custom_user
@skip_if_custom_user
class AuthorTestCase(TestCase):
def setUp(self):
disconnect_entry_signals()
self.site = Site.objects.get_current()
self.author = Author.objects.create_user(
'webmaster', '[email protected]')
params = {'title': 'My entry',
'content': 'My content',
'tags': 'zinnia, test',
'slug': 'my-entry'}
self.entry = Entry.objects.create(**params)
self.entry.authors.add(self.author)
self.entry.sites.add(self.site)
def test_entries_published(self):
self.assertEqual(self.author.entries_published().count(), 0)
self.entry.status = PUBLISHED
self.entry.save()
self.assertEqual(self.author.entries_published().count(), 1)
def test_str(self):
self.assertEqual(self.author.__str__(),
'webmaster')
self.author.last_name = 'Doe'
self.assertEqual(self.author.__str__(),
'Doe')
self.author.first_name = 'John'
self.assertEqual(self.author.__str__(),
'John')
def test_manager_pollution(self):
"""
https://github.com/Fantomas42/django-blog-zinnia/pull/307
"""
self.assertNotEqual(get_user_model().objects.model,
Author)
|
from homeassistant.components.cover import SUPPORT_CLOSE, SUPPORT_OPEN
from homeassistant.components.zwave import (
CONF_INVERT_OPENCLOSE_BUTTONS,
CONF_INVERT_PERCENT,
const,
cover,
)
from tests.async_mock import MagicMock
from tests.mock.zwave import MockEntityValues, MockNode, MockValue, value_changed
def test_get_device_detects_none(hass, mock_openzwave):
"""Test device returns none."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert device is None
def test_get_device_detects_rollershutter(hass, mock_openzwave):
"""Test device returns rollershutter."""
hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode()
value = MockValue(
data=0, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
values = MockEntityValues(primary=value, open=None, close=None, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert isinstance(device, cover.ZwaveRollershutter)
def test_get_device_detects_garagedoor_switch(hass, mock_openzwave):
"""Test device returns garage door."""
node = MockNode()
value = MockValue(
data=False, node=node, command_class=const.COMMAND_CLASS_SWITCH_BINARY
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert isinstance(device, cover.ZwaveGarageDoorSwitch)
assert device.device_class == "garage"
assert device.supported_features == SUPPORT_OPEN | SUPPORT_CLOSE
def test_get_device_detects_garagedoor_barrier(hass, mock_openzwave):
"""Test device returns garage door."""
node = MockNode()
value = MockValue(
data="Closed", node=node, command_class=const.COMMAND_CLASS_BARRIER_OPERATOR
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert isinstance(device, cover.ZwaveGarageDoorBarrier)
assert device.device_class == "garage"
assert device.supported_features == SUPPORT_OPEN | SUPPORT_CLOSE
def test_roller_no_position_workaround(hass, mock_openzwave):
"""Test position changed."""
hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode(manufacturer_id="0047", product_type="5a52")
value = MockValue(
data=45, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
values = MockEntityValues(primary=value, open=None, close=None, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert device.current_cover_position is None
def test_roller_value_changed(hass, mock_openzwave):
"""Test position changed."""
hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode()
value = MockValue(
data=None, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
values = MockEntityValues(primary=value, open=None, close=None, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert device.current_cover_position is None
assert device.is_closed is None
value.data = 2
value_changed(value)
assert device.current_cover_position == 0
assert device.is_closed
value.data = 35
value_changed(value)
assert device.current_cover_position == 35
assert not device.is_closed
value.data = 97
value_changed(value)
assert device.current_cover_position == 100
assert not device.is_closed
def test_roller_commands(hass, mock_openzwave):
"""Test position changed."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode()
value = MockValue(
data=50, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
open_value = MockValue(data=False, node=node)
close_value = MockValue(data=False, node=node)
values = MockEntityValues(
primary=value, open=open_value, close=close_value, node=node
)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
device.set_cover_position(position=25)
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 25
device.open_cover()
assert mock_network.manager.pressButton.called
(value_id,) = mock_network.manager.pressButton.mock_calls.pop(0)[1]
assert value_id == open_value.value_id
device.close_cover()
assert mock_network.manager.pressButton.called
(value_id,) = mock_network.manager.pressButton.mock_calls.pop(0)[1]
assert value_id == close_value.value_id
device.stop_cover()
assert mock_network.manager.releaseButton.called
(value_id,) = mock_network.manager.releaseButton.mock_calls.pop(0)[1]
assert value_id == open_value.value_id
def test_roller_invert_percent(hass, mock_openzwave):
"""Test position changed."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode()
value = MockValue(
data=50, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
open_value = MockValue(data=False, node=node)
close_value = MockValue(data=False, node=node)
values = MockEntityValues(
primary=value, open=open_value, close=close_value, node=node
)
device = cover.get_device(
hass=hass, node=node, values=values, node_config={CONF_INVERT_PERCENT: True}
)
device.set_cover_position(position=25)
assert node.set_dimmer.called
value_id, brightness = node.set_dimmer.mock_calls[0][1]
assert value_id == value.value_id
assert brightness == 75
device.open_cover()
assert mock_network.manager.pressButton.called
(value_id,) = mock_network.manager.pressButton.mock_calls.pop(0)[1]
assert value_id == open_value.value_id
def test_roller_reverse_open_close(hass, mock_openzwave):
"""Test position changed."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode()
value = MockValue(
data=50, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
open_value = MockValue(data=False, node=node)
close_value = MockValue(data=False, node=node)
values = MockEntityValues(
primary=value, open=open_value, close=close_value, node=node
)
device = cover.get_device(
hass=hass,
node=node,
values=values,
node_config={CONF_INVERT_OPENCLOSE_BUTTONS: True},
)
device.open_cover()
assert mock_network.manager.pressButton.called
(value_id,) = mock_network.manager.pressButton.mock_calls.pop(0)[1]
assert value_id == close_value.value_id
device.close_cover()
assert mock_network.manager.pressButton.called
(value_id,) = mock_network.manager.pressButton.mock_calls.pop(0)[1]
assert value_id == open_value.value_id
device.stop_cover()
assert mock_network.manager.releaseButton.called
(value_id,) = mock_network.manager.releaseButton.mock_calls.pop(0)[1]
assert value_id == close_value.value_id
def test_switch_garage_value_changed(hass, mock_openzwave):
"""Test position changed."""
node = MockNode()
value = MockValue(
data=False, node=node, command_class=const.COMMAND_CLASS_SWITCH_BINARY
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert device.is_closed
value.data = True
value_changed(value)
assert not device.is_closed
def test_switch_garage_commands(hass, mock_openzwave):
"""Test position changed."""
node = MockNode()
value = MockValue(
data=False, node=node, command_class=const.COMMAND_CLASS_SWITCH_BINARY
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert value.data is False
device.open_cover()
assert value.data is True
device.close_cover()
assert value.data is False
def test_barrier_garage_value_changed(hass, mock_openzwave):
"""Test position changed."""
node = MockNode()
value = MockValue(
data="Closed", node=node, command_class=const.COMMAND_CLASS_BARRIER_OPERATOR
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert device.is_closed
assert not device.is_opening
assert not device.is_closing
value.data = "Opening"
value_changed(value)
assert not device.is_closed
assert device.is_opening
assert not device.is_closing
value.data = "Opened"
value_changed(value)
assert not device.is_closed
assert not device.is_opening
assert not device.is_closing
value.data = "Closing"
value_changed(value)
assert not device.is_closed
assert not device.is_opening
assert device.is_closing
def test_barrier_garage_commands(hass, mock_openzwave):
"""Test position changed."""
node = MockNode()
value = MockValue(
data="Closed", node=node, command_class=const.COMMAND_CLASS_BARRIER_OPERATOR
)
values = MockEntityValues(primary=value, node=node)
device = cover.get_device(hass=hass, node=node, values=values, node_config={})
assert value.data == "Closed"
device.open_cover()
assert value.data == "Opened"
device.close_cover()
assert value.data == "Closed"
|
from .base import FieldType, indexPredicates
from dedupe import predicates
from affinegap import normalizedAffineGapDistance as affineGap
from highered import CRFEditDistance
from simplecosine.cosine import CosineTextSimilarity
from typing import Optional
crfEd = CRFEditDistance()
base_predicates = (predicates.wholeFieldPredicate,
predicates.firstTokenPredicate,
predicates.commonIntegerPredicate,
predicates.nearIntegersPredicate,
predicates.firstIntegerPredicate,
predicates.hundredIntegerPredicate,
predicates.hundredIntegersOddPredicate,
predicates.alphaNumericPredicate,
predicates.sameThreeCharStartPredicate,
predicates.sameFiveCharStartPredicate,
predicates.sameSevenCharStartPredicate,
predicates.commonTwoTokens,
predicates.commonThreeTokens,
predicates.fingerprint,
predicates.oneGramFingerprint,
predicates.twoGramFingerprint,
predicates.sortedAcronym
)
class BaseStringType(FieldType):
type: Optional[str] = None
_Predicate = predicates.StringPredicate
def __init__(self, definition):
super(BaseStringType, self).__init__(definition)
self.predicates += indexPredicates((predicates.LevenshteinCanopyPredicate,
predicates.LevenshteinSearchPredicate),
(1, 2, 3, 4),
self.field)
class ShortStringType(BaseStringType):
type = "ShortString"
_predicate_functions = (base_predicates +
(predicates.commonFourGram,
predicates.commonSixGram,
predicates.tokenFieldPredicate,
predicates.suffixArray,
predicates.doubleMetaphone,
predicates.metaphoneToken))
_index_predicates = [predicates.TfidfNGramCanopyPredicate,
predicates.TfidfNGramSearchPredicate]
_index_thresholds = (0.2, 0.4, 0.6, 0.8)
def __init__(self, definition):
super(ShortStringType, self).__init__(definition)
if definition.get('crf', False) is True:
self.comparator = crfEd
else:
self.comparator = affineGap
class StringType(ShortStringType):
type = "String"
_index_predicates = [predicates.TfidfNGramCanopyPredicate,
predicates.TfidfNGramSearchPredicate,
predicates.TfidfTextCanopyPredicate,
predicates.TfidfTextSearchPredicate]
class TextType(BaseStringType):
type = "Text"
_predicate_functions = base_predicates
_index_predicates = [predicates.TfidfTextCanopyPredicate,
predicates.TfidfTextSearchPredicate]
_index_thresholds = (0.2, 0.4, 0.6, 0.8)
def __init__(self, definition):
super(TextType, self).__init__(definition)
if 'corpus' not in definition:
definition['corpus'] = []
self.comparator = CosineTextSimilarity(definition['corpus'])
|
from collections import defaultdict
import json
from typing import Dict
from .model import Config, Integration
BASE = """
\"\"\"Automatically generated by hassfest.
To update, run python3 -m script.hassfest
\"\"\"
# fmt: off
MQTT = {}
""".strip()
def generate_and_validate(integrations: Dict[str, Integration]):
"""Validate and generate MQTT data."""
data = defaultdict(list)
for domain in sorted(integrations):
integration = integrations[domain]
if not integration.manifest:
continue
mqtt = integration.manifest.get("mqtt")
if not mqtt:
continue
for topic in mqtt:
data[domain].append(topic)
return BASE.format(json.dumps(data, indent=4))
def validate(integrations: Dict[str, Integration], config: Config):
"""Validate MQTT file."""
mqtt_path = config.root / "homeassistant/generated/mqtt.py"
config.cache["mqtt"] = content = generate_and_validate(integrations)
if config.specific_integrations:
return
with open(str(mqtt_path)) as fp:
if fp.read().strip() != content:
config.add_error(
"mqtt",
"File mqtt.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
def generate(integrations: Dict[str, Integration], config: Config):
"""Generate MQTT file."""
mqtt_path = config.root / "homeassistant/generated/mqtt.py"
with open(str(mqtt_path), "w") as fp:
fp.write(f"{config.cache['mqtt']}\n")
|
from russound_rio import Russound
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
SUPPORT_RUSSOUND = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=9621): cv.port,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Russound RIO platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
russ = Russound(hass.loop, host, port)
await russ.connect()
# Discover sources and zones
sources = await russ.enumerate_sources()
valid_zones = await russ.enumerate_zones()
devices = []
for zone_id, name in valid_zones:
await russ.watch_zone(zone_id)
dev = RussoundZoneDevice(russ, zone_id, name, sources)
devices.append(dev)
@callback
def on_stop(event):
"""Shutdown cleanly when hass stops."""
hass.loop.create_task(russ.close())
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_stop)
async_add_entities(devices)
class RussoundZoneDevice(MediaPlayerEntity):
"""Representation of a Russound Zone."""
def __init__(self, russ, zone_id, name, sources):
"""Initialize the zone device."""
super().__init__()
self._name = name
self._russ = russ
self._zone_id = zone_id
self._sources = sources
def _zone_var(self, name, default=None):
return self._russ.get_cached_zone_variable(self._zone_id, name, default)
def _source_var(self, name, default=None):
current = int(self._zone_var("currentsource", 0))
if current:
return self._russ.get_cached_source_variable(current, name, default)
return default
def _source_na_var(self, name):
"""Will replace invalid values with None."""
current = int(self._zone_var("currentsource", 0))
if current:
value = self._russ.get_cached_source_variable(current, name, None)
if value in (None, "", "------"):
return None
return value
return None
def _zone_callback_handler(self, zone_id, *args):
if zone_id == self._zone_id:
self.schedule_update_ha_state()
def _source_callback_handler(self, source_id, *args):
current = int(self._zone_var("currentsource", 0))
if source_id == current:
self.schedule_update_ha_state()
async def async_added_to_hass(self):
"""Register callback handlers."""
self._russ.add_zone_callback(self._zone_callback_handler)
self._russ.add_source_callback(self._source_callback_handler)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the zone."""
return self._zone_var("name", self._name)
@property
def state(self):
"""Return the state of the device."""
status = self._zone_var("status", "OFF")
if status == "ON":
return STATE_ON
if status == "OFF":
return STATE_OFF
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_RUSSOUND
@property
def source(self):
"""Get the currently selected source."""
return self._source_na_var("name")
@property
def source_list(self):
"""Return a list of available input sources."""
return [x[1] for x in self._sources]
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_title(self):
"""Title of current playing media."""
return self._source_na_var("songname")
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._source_na_var("artistname")
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self._source_na_var("albumname")
@property
def media_image_url(self):
"""Image url of current playing media."""
return self._source_na_var("coverarturl")
@property
def volume_level(self):
"""Volume level of the media player (0..1).
Value is returned based on a range (0..50).
Therefore float divide by 50 to get to the required range.
"""
return float(self._zone_var("volume", 0)) / 50.0
async def async_turn_off(self):
"""Turn off the zone."""
await self._russ.send_zone_event(self._zone_id, "ZoneOff")
async def async_turn_on(self):
"""Turn on the zone."""
await self._russ.send_zone_event(self._zone_id, "ZoneOn")
async def async_set_volume_level(self, volume):
"""Set the volume level."""
rvol = int(volume * 50.0)
await self._russ.send_zone_event(self._zone_id, "KeyPress", "Volume", rvol)
async def async_select_source(self, source):
"""Select the source input for this zone."""
for source_id, name in self._sources:
if name.lower() != source.lower():
continue
await self._russ.send_zone_event(self._zone_id, "SelectSource", source_id)
break
|
from app import app
from app import my_signals
from app.intents.models import Intent
from app.nlu import spacy_tokenizer
from app.nlu.classifiers.starspace_intent_classifier import \
EmbeddingIntentClassifier
from app.nlu.entity_extractor import EntityExtractor
model_updated_signal = my_signals.signal('model-updated')
def train_models():
"""
Initiate NER and Intent Classification training
:return:
"""
# generate intent classifier training data
intents = Intent.objects
if not intents:
raise Exception("NO_DATA")
# train intent classifier on all intents
train_intent_classifier(intents)
# train ner model for each Stories
for intent in intents:
train_all_ner(intent.intentId, intent.trainingData)
model_updated_signal.send(app, message="Training Completed.")
def train_intent_classifier(intents):
"""
Train intent classifier model
:param intents:
:return:
"""
X = []
y = []
for intent in intents:
training_data = intent.trainingData
for example in training_data:
if example.get("text").strip() == "":
continue
X.append(example.get("text"))
y.append(intent.intentId)
intent_classifier = EmbeddingIntentClassifier(use_word_vectors=app.config['USE_WORD_VECTORS'])
intent_classifier.train(X, y)
intent_classifier.persist(model_dir=app.config["MODELS_DIR"])
def train_all_ner(story_id, training_data):
"""
Train NER model for single Story
:param story_id:
:param training_data:
:return:
"""
entityExtraction = EntityExtractor()
# generate crf training data
ner_training_data = entityExtraction.json2crf(training_data)
# train and store ner model
entityExtraction.train(ner_training_data, story_id)
# Load and initialize Perceptron tagger
def pos_tagger(sentence):
"""
perform POS tagging on a given sentence
:param sentence:
:return:
"""
doc = spacy_tokenizer(sentence)
taged_sentance = []
for token in doc:
taged_sentance.append((token.text, token.tag_))
return taged_sentance
def pos_tag_and_label(sentence):
"""
Perform POS tagging and BIO labeling on given sentence
:param sentence:
:return:
"""
tagged_sentence = pos_tagger(sentence)
tagged_sentence_json = []
for token, postag in tagged_sentence:
tagged_sentence_json.append([token, postag, "O"])
return tagged_sentence_json
def sentence_tokenize(sentences):
"""
Sentence tokenizer
:param sentences:
:return:
"""
doc = spacy_tokenizer(sentences)
words = [token.text for token in doc]
return " ".join(words)
|
import os
import re
from paasta_tools.cli.cmds.validate import paasta_validate_soa_configs
from paasta_tools.cli.utils import figure_out_service_name
from paasta_tools.cli.utils import get_file_contents
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.cli.utils import is_file_in_dir
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.cli.utils import NoSuchService
from paasta_tools.cli.utils import PaastaCheckMessages
from paasta_tools.cli.utils import success
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.cli.utils import x_mark
from paasta_tools.marathon_tools import get_all_namespaces_for_service
from paasta_tools.monitoring_tools import get_team
from paasta_tools.utils import _run
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_git_url
from paasta_tools.utils import get_pipeline_config
from paasta_tools.utils import get_pipeline_deploy_groups
from paasta_tools.utils import get_service_instance_list
from paasta_tools.utils import INSTANCE_TYPES
from paasta_tools.utils import list_clusters
from paasta_tools.utils import list_services
from paasta_tools.utils import PaastaColors
def add_subparser(subparsers):
help_text = (
"Determine whether service in pwd is 'paasta ready', checking for common "
"mistakes in the soa-configs directory and the local service directory. This "
"command is designed to be run from the 'root' of a service directory."
)
check_parser = subparsers.add_parser("check", description=help_text, help=help_text)
check_parser.add_argument(
"-s",
"--service",
help="The name of the service you wish to inspect. Defaults to autodetect.",
).completer = lazy_choices_completer(list_services)
check_parser.add_argument(
"-y",
"--yelpsoa-config-root",
dest="yelpsoa_config_root",
help="A directory from which yelpsoa-configs should be read from",
default=DEFAULT_SOA_DIR,
)
check_parser.set_defaults(command=paasta_check)
def deploy_check(service_path):
"""Check whether deploy.yaml exists in service directory. Prints success or
error message.
:param service_path: path to a directory containing deploy.yaml"""
if is_file_in_dir("deploy.yaml", service_path):
print(PaastaCheckMessages.DEPLOY_YAML_FOUND)
else:
print(PaastaCheckMessages.DEPLOY_YAML_MISSING)
def deploy_has_security_check(service, soa_dir):
pipeline = get_pipeline_config(service=service, soa_dir=soa_dir)
steps = [step["step"] for step in pipeline]
if "security-check" in steps:
print(PaastaCheckMessages.DEPLOY_SECURITY_FOUND)
return True
else:
print(PaastaCheckMessages.DEPLOY_SECURITY_MISSING)
return False
def docker_check():
"""Check whether Dockerfile exists in service directory, and is valid.
Prints suitable message depending on outcome"""
docker_file_path = is_file_in_dir("Dockerfile", os.getcwd())
if docker_file_path:
print(PaastaCheckMessages.DOCKERFILE_FOUND)
else:
print(PaastaCheckMessages.DOCKERFILE_MISSING)
def makefile_responds_to(target):
"""Runs `make --dry-run <target>` to detect if a makefile responds to the
specified target."""
cmd = "make --dry-run %s" % target
# According to http://www.gnu.org/software/make/manual/make.html#index-exit-status-of-make,
# 0 means OK, and 2 means error
returncode, _ = _run(cmd, timeout=5)
return returncode == 0
def makefile_has_a_tab(makefile_path):
contents = get_file_contents(makefile_path)
return "\t" in contents
def makefile_has_docker_tag(makefile_path):
contents = get_file_contents(makefile_path)
return re.search(r"DOCKER_TAG\s*\?=", contents, re.MULTILINE) is not None
def makefile_check():
"""Detects if you have a makefile and runs some sanity tests against
it to ensure it is paasta-ready"""
makefile_path = is_file_in_dir("Makefile", os.getcwd())
if makefile_path:
print(PaastaCheckMessages.MAKEFILE_FOUND)
if makefile_has_a_tab(makefile_path):
print(PaastaCheckMessages.MAKEFILE_HAS_A_TAB)
else:
print(PaastaCheckMessages.MAKEFILE_HAS_NO_TABS)
if makefile_has_docker_tag(makefile_path):
print(PaastaCheckMessages.MAKEFILE_HAS_DOCKER_TAG)
else:
print(PaastaCheckMessages.MAKEFILE_HAS_NO_DOCKER_TAG)
if makefile_responds_to("cook-image"):
print(PaastaCheckMessages.MAKEFILE_RESPONDS_BUILD_IMAGE)
else:
print(PaastaCheckMessages.MAKEFILE_RESPONDS_BUILD_IMAGE_FAIL)
if makefile_responds_to("itest"):
print(PaastaCheckMessages.MAKEFILE_RESPONDS_ITEST)
else:
print(PaastaCheckMessages.MAKEFILE_RESPONDS_ITEST_FAIL)
if makefile_responds_to("test"):
print(PaastaCheckMessages.MAKEFILE_RESPONDS_TEST)
else:
print(PaastaCheckMessages.MAKEFILE_RESPONDS_TEST_FAIL)
else:
print(PaastaCheckMessages.MAKEFILE_MISSING)
def git_repo_check(service, soa_dir):
git_url = get_git_url(service, soa_dir)
cmd = "git ls-remote %s" % git_url
returncode, _ = _run(cmd, timeout=5)
if returncode == 0:
print(PaastaCheckMessages.GIT_REPO_FOUND)
else:
print(PaastaCheckMessages.git_repo_missing(git_url))
def get_deploy_groups_used_by_framework(instance_type, service, soa_dir):
"""This is a kind of funny function that gets all the instances for specified
service and framework, and massages it into a form that matches up with what
deploy.yaml's steps look like. This is only so we can compare it 1-1
with what deploy.yaml has for linting.
:param instance_type: one of 'marathon', 'adhoc'
:param service: the service name
:param soa_dir: The SOA configuration directory to read from
:returns: a list of deploy group names used by the service.
"""
deploy_groups = []
for cluster in list_clusters(service, soa_dir):
for _, instance in get_service_instance_list(
service=service,
cluster=cluster,
instance_type=instance_type,
soa_dir=soa_dir,
):
try:
config = get_instance_config(
service=service,
instance=instance,
cluster=cluster,
soa_dir=soa_dir,
load_deployments=False,
instance_type=instance_type,
)
deploy_groups.append(config.get_deploy_group())
except NotImplementedError:
pass
return set(filter(None, deploy_groups))
def deployments_check(service, soa_dir):
"""Checks for consistency between deploy.yaml and the marathon yamls"""
the_return = True
pipeline_deploy_groups = get_pipeline_deploy_groups(
service=service, soa_dir=soa_dir
)
framework_deploy_groups = {}
in_deploy_not_frameworks = set(pipeline_deploy_groups)
for it in INSTANCE_TYPES:
framework_deploy_groups[it] = get_deploy_groups_used_by_framework(
it, service, soa_dir
)
in_framework_not_deploy = set(framework_deploy_groups[it]) - set(
pipeline_deploy_groups
)
in_deploy_not_frameworks -= set(framework_deploy_groups[it])
if len(in_framework_not_deploy) > 0:
print(
"{} There are some instance(s) you have asked to run in {} that".format(
x_mark(), it
)
)
print(" do not have a corresponding entry in deploy.yaml:")
print(" %s" % PaastaColors.bold(", ".join(in_framework_not_deploy)))
print(" You should probably configure these to use a 'deploy_group' or")
print(
" add entries to deploy.yaml for them so they are deployed to those clusters."
)
the_return = False
if len(in_deploy_not_frameworks) > 0:
print(
"%s There are some instance(s) in deploy.yaml that are not referenced"
% x_mark()
)
print(" by any marathon or adhoc instance:")
print(" %s" % PaastaColors.bold((", ".join(in_deploy_not_frameworks))))
print(
" You should probably delete these deploy.yaml entries if they are unused."
)
the_return = False
if the_return is True:
print(success("All entries in deploy.yaml correspond to a paasta instance"))
for it in INSTANCE_TYPES:
if len(framework_deploy_groups[it]) > 0:
print(
success(
"All %s instances have a corresponding deploy.yaml entry" % it
)
)
return the_return
def sensu_check(service, service_path, soa_dir):
"""Check whether monitoring.yaml exists in service directory,
and that the team name is declared.
:param service: name of service currently being examined
:param service_path: path to location of monitoring.yaml file"""
if is_file_in_dir("monitoring.yaml", service_path):
print(PaastaCheckMessages.SENSU_MONITORING_FOUND)
team = get_team(service=service, overrides={}, soa_dir=soa_dir)
if team is None:
print(PaastaCheckMessages.SENSU_TEAM_MISSING)
else:
print(PaastaCheckMessages.sensu_team_found(team))
else:
print(PaastaCheckMessages.SENSU_MONITORING_MISSING)
def service_dir_check(service, soa_dir):
"""Check whether directory service exists in /nail/etc/services
:param service: string of service name we wish to inspect
"""
try:
validate_service_name(service, soa_dir)
print(PaastaCheckMessages.service_dir_found(service, soa_dir))
except NoSuchService:
print(PaastaCheckMessages.service_dir_missing(service, soa_dir))
def smartstack_check(service, service_path, soa_dir):
"""Check whether smartstack.yaml exists in service directory, and the proxy
ports are declared. Print appropriate message depending on outcome.
:param service: name of service currently being examined
:param service_path: path to location of smartstack.yaml file"""
if is_file_in_dir("smartstack.yaml", service_path):
print(PaastaCheckMessages.SMARTSTACK_YAML_FOUND)
instances = get_all_namespaces_for_service(service=service, soa_dir=soa_dir)
if len(instances) > 0:
for namespace, config in get_all_namespaces_for_service(
service=service, soa_dir=soa_dir, full_name=False
):
if "proxy_port" in config:
print(
PaastaCheckMessages.smartstack_port_found(
namespace, config.get("proxy_port")
)
)
else:
print(PaastaCheckMessages.SMARTSTACK_PORT_MISSING)
else:
print(PaastaCheckMessages.SMARTSTACK_PORT_MISSING)
def paasta_check(args):
"""Analyze the service in the PWD to determine if it is paasta ready
:param args: argparse.Namespace obj created from sys.args by cli"""
soa_dir = args.yelpsoa_config_root
service = figure_out_service_name(args, soa_dir)
service_path = os.path.join(soa_dir, service)
service_dir_check(service, soa_dir)
deploy_check(service_path)
deploy_has_security_check(service, soa_dir)
git_repo_check(service, soa_dir)
docker_check()
makefile_check()
deployments_check(service, soa_dir)
sensu_check(service, service_path, soa_dir)
smartstack_check(service, service_path, soa_dir)
paasta_validate_soa_configs(service, service_path)
def read_dockerfile_lines(path):
with open(path, "r") as dockerfile:
return dockerfile.readlines()
|
import numpy as np
import pytest
from numpy.testing import (assert_array_almost_equal, assert_array_equal)
from mne import io
from mne.time_frequency import psd_array_welch
from mne.decoding.ssd import SSD
from mne.utils import requires_sklearn
from mne.filter import filter_data
from mne import create_info
from mne.decoding import CSP
freqs_sig = 9, 12
freqs_noise = 8, 13
def simulate_data(freqs_sig=[9, 12], n_trials=100, n_channels=20,
n_samples=500, samples_per_second=250,
n_components=5, SNR=0.05, random_state=42):
"""Simulate data according to an instantaneous mixin model.
Data are simulated in the statistical source space, where n=n_components
sources contain the peak of interest.
"""
rng = np.random.RandomState(random_state)
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1,
fir_design='firwin')
# generate an orthogonal mixin matrix
mixing_mat = np.linalg.svd(rng.randn(n_channels, n_channels))[0]
# define sources
S_s = rng.randn(n_trials * n_samples, n_components)
# filter source in the specific freq. band of interest
S_s = filter_data(S_s.T, samples_per_second, **filt_params_signal).T
S_n = rng.randn(n_trials * n_samples, n_channels - n_components)
S = np.hstack((S_s, S_n))
# mix data
X_s = np.dot(mixing_mat[:, :n_components], S_s.T).T
X_n = np.dot(mixing_mat[:, n_components:], S_n.T).T
# add noise
X_s = X_s / np.linalg.norm(X_s, 'fro')
X_n = X_n / np.linalg.norm(X_n, 'fro')
X = SNR * X_s + (1 - SNR) * X_n
X = X.T
S = S.T
return X, mixing_mat, S
@pytest.mark.slowtest
def test_ssd():
"""Test Common Spatial Patterns algorithm on raw data."""
X, A, S = simulate_data()
sf = 250
n_channels = X.shape[0]
info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg')
n_components_true = 5
# Init
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
ssd = SSD(info, filt_params_signal, filt_params_noise)
# freq no int
freq = 'foo'
filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
with pytest.raises(TypeError, match='must be an instance '):
ssd = SSD(info, filt_params_signal, filt_params_noise)
# Wrongly specified noise band
freq = 2
filt_params_signal = dict(l_freq=freq, h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
with pytest.raises(ValueError, match='Wrongly specified '):
ssd = SSD(info, filt_params_signal, filt_params_noise)
# filt param no dict
filt_params_signal = freqs_sig
filt_params_noise = freqs_noise
with pytest.raises(ValueError, match='must be defined'):
ssd = SSD(info, filt_params_signal, filt_params_noise)
# Data type
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
ssd = SSD(info, filt_params_signal, filt_params_noise)
raw = io.RawArray(X, info)
pytest.raises(TypeError, ssd.fit, raw)
# More than 1 channel type
ch_types = np.reshape([['mag'] * 10, ['eeg'] * 10], n_channels)
info_2 = create_info(ch_names=n_channels, sfreq=sf, ch_types=ch_types)
with pytest.raises(ValueError, match='At this point SSD'):
ssd = SSD(info_2, filt_params_signal, filt_params_noise)
# Number of channels
info_3 = create_info(ch_names=n_channels + 1, sfreq=sf, ch_types='eeg')
ssd = SSD(info_3, filt_params_signal, filt_params_noise)
pytest.raises(ValueError, ssd.fit, X)
# Fit
n_components = 10
ssd = SSD(info, filt_params_signal, filt_params_noise,
n_components=n_components)
# Call transform before fit
pytest.raises(AttributeError, ssd.transform, X)
# Check outputs
ssd.fit(X)
assert (ssd.filters_.shape == (n_channels, n_channels))
assert (ssd.patterns_.shape == (n_channels, n_channels))
# Transform
X_ssd = ssd.fit_transform(X)
assert (X_ssd.shape[0] == n_components)
# back and forward
ssd = SSD(info, filt_params_signal, filt_params_noise,
n_components=None, sort_by_spectral_ratio=False)
ssd.fit(X)
X_denoised = ssd.apply(X)
assert_array_almost_equal(X_denoised, X)
# Power ratio ordering
spec_ratio, _ = ssd.get_spectral_ratio(ssd.transform(X))
# since we now that the number of true components is 5, the relative
# difference should be low for the first 5 components and then increases
index_diff = np.argmax(-np.diff(spec_ratio))
assert index_diff == n_components_true - 1
# Check detected peaks
# fit ssd
n_components = n_components_true
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=1, h_trans_bandwidth=1)
ssd = SSD(info, filt_params_signal, filt_params_noise,
n_components=n_components, sort_by_spectral_ratio=False)
ssd.fit(X)
out = ssd.transform(X)
psd_out, _ = psd_array_welch(out[0], sfreq=250, n_fft=250)
psd_S, _ = psd_array_welch(S[0], sfreq=250, n_fft=250)
corr = np.abs(np.corrcoef((psd_out, psd_S))[0, 1])
assert np.abs(corr) > 0.95
# Check pattern estimation
# Since there is no exact ordering of the recovered patterns
# a pair-wise greedy search will be done
error = list()
for ii in range(n_channels):
corr = np.abs(np.corrcoef(ssd.patterns_[ii, :].T, A[:, 0])[0, 1])
error.append(1 - corr)
min_err = np.min(error)
assert min_err < 0.3 # threshold taken from SSD original paper
def test_ssd_epoched_data():
"""Test Common Spatial Patterns algorithm on epoched data.
Compare the outputs when raw data is used.
"""
X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500)
sf = 250
n_channels = X.shape[0]
info = create_info(ch_names=n_channels, sfreq=sf, ch_types='eeg')
n_components_true = 5
# Build epochs as sliding windows over the continuous raw file
# Epoch length is 1 second
X_e = np.reshape(X, (100, 20, 500))
# Fit
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=4, h_trans_bandwidth=4)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=4, h_trans_bandwidth=4)
# ssd on epochs
ssd_e = SSD(info, filt_params_signal, filt_params_noise)
ssd_e.fit(X_e)
# ssd on raw
ssd = SSD(info, filt_params_signal, filt_params_noise)
ssd.fit(X)
# Check if the 5 first 5 components are the same for both
_, sorter_spec_e = ssd_e.get_spectral_ratio(ssd_e.transform(X_e))
_, sorter_spec = ssd.get_spectral_ratio(ssd.transform(X))
assert_array_equal(sorter_spec_e[:n_components_true],
sorter_spec[:n_components_true])
@requires_sklearn
def test_ssd_pipeline():
"""Test if SSD works in a pipeline."""
from sklearn.pipeline import Pipeline
sf = 250
X, A, S = simulate_data(n_trials=100, n_channels=20, n_samples=500)
X_e = np.reshape(X, (100, 20, 500))
# define bynary random output
y = np.random.randint(2, size=100)
info = create_info(ch_names=20, sfreq=sf, ch_types='eeg')
filt_params_signal = dict(l_freq=freqs_sig[0], h_freq=freqs_sig[1],
l_trans_bandwidth=4, h_trans_bandwidth=4)
filt_params_noise = dict(l_freq=freqs_noise[0], h_freq=freqs_noise[1],
l_trans_bandwidth=4, h_trans_bandwidth=4)
ssd = SSD(info, filt_params_signal, filt_params_noise)
csp = CSP()
pipe = Pipeline([('SSD', ssd), ('CSP', csp)])
pipe.set_params(SSD__n_components=5)
pipe.set_params(CSP__n_components=2)
out = pipe.fit_transform(X_e, y)
assert (out.shape == (100, 2))
assert (pipe.get_params()['SSD__n_components'] == 5)
|
import logging
from typing import Union
from pymata_express.pymata_express import PymataExpress
from pymata_express.pymata_express_serial import serial
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_LIGHTS,
CONF_NAME,
CONF_SENSORS,
CONF_SWITCHES,
)
from .const import (
CONF_ARDUINO_INSTANCE_ID,
CONF_ARDUINO_WAIT,
CONF_SAMPLING_INTERVAL,
CONF_SERIAL_BAUD_RATE,
CONF_SERIAL_PORT,
CONF_SLEEP_TUNE,
PIN_TYPE_ANALOG,
PIN_TYPE_DIGITAL,
)
_LOGGER = logging.getLogger(__name__)
FirmataPinType = Union[int, str]
class FirmataBoard:
"""Manages a single Firmata board."""
def __init__(self, config: dict):
"""Initialize the board."""
self.config = config
self.api = None
self.firmware_version = None
self.protocol_version = None
self.name = self.config[CONF_NAME]
self.switches = []
self.lights = []
self.binary_sensors = []
self.sensors = []
self.used_pins = []
if CONF_SWITCHES in self.config:
self.switches = self.config[CONF_SWITCHES]
if CONF_LIGHTS in self.config:
self.lights = self.config[CONF_LIGHTS]
if CONF_BINARY_SENSORS in self.config:
self.binary_sensors = self.config[CONF_BINARY_SENSORS]
if CONF_SENSORS in self.config:
self.sensors = self.config[CONF_SENSORS]
async def async_setup(self, tries=0) -> bool:
"""Set up a Firmata instance."""
try:
_LOGGER.debug("Connecting to Firmata %s", self.name)
self.api = await get_board(self.config)
except RuntimeError as err:
_LOGGER.error("Error connecting to PyMata board %s: %s", self.name, err)
return False
except serial.serialutil.SerialTimeoutException as err:
_LOGGER.error(
"Timeout writing to serial port for PyMata board %s: %s", self.name, err
)
return False
except serial.serialutil.SerialException as err:
_LOGGER.error(
"Error connecting to serial port for PyMata board %s: %s",
self.name,
err,
)
return False
self.firmware_version = await self.api.get_firmware_version()
if not self.firmware_version:
_LOGGER.error(
"Error retrieving firmware version from Firmata board %s", self.name
)
return False
if CONF_SAMPLING_INTERVAL in self.config:
try:
await self.api.set_sampling_interval(
self.config[CONF_SAMPLING_INTERVAL]
)
except RuntimeError as err:
_LOGGER.error(
"Error setting sampling interval for PyMata \
board %s: %s",
self.name,
err,
)
return False
_LOGGER.debug("Firmata connection successful for %s", self.name)
return True
async def async_reset(self) -> bool:
"""Reset the board to default state."""
_LOGGER.debug("Shutting down board %s", self.name)
# If the board was never setup, continue.
if self.api is None:
return True
await self.api.shutdown()
self.api = None
return True
def mark_pin_used(self, pin: FirmataPinType) -> bool:
"""Test if a pin is used already on the board or mark as used."""
if pin in self.used_pins:
return False
self.used_pins.append(pin)
return True
def get_pin_type(self, pin: FirmataPinType) -> tuple:
"""Return the type and Firmata location of a pin on the board."""
if isinstance(pin, str):
pin_type = PIN_TYPE_ANALOG
firmata_pin = int(pin[1:])
firmata_pin += self.api.first_analog_pin
else:
pin_type = PIN_TYPE_DIGITAL
firmata_pin = pin
return (pin_type, firmata_pin)
async def get_board(data: dict) -> PymataExpress:
"""Create a Pymata board object."""
board_data = {}
if CONF_SERIAL_PORT in data:
board_data["com_port"] = data[CONF_SERIAL_PORT]
if CONF_SERIAL_BAUD_RATE in data:
board_data["baud_rate"] = data[CONF_SERIAL_BAUD_RATE]
if CONF_ARDUINO_INSTANCE_ID in data:
board_data["arduino_instance_id"] = data[CONF_ARDUINO_INSTANCE_ID]
if CONF_ARDUINO_WAIT in data:
board_data["arduino_wait"] = data[CONF_ARDUINO_WAIT]
if CONF_SLEEP_TUNE in data:
board_data["sleep_tune"] = data[CONF_SLEEP_TUNE]
board_data["autostart"] = False
board_data["shutdown_on_exception"] = True
board_data["close_loop_on_shutdown"] = False
board = PymataExpress(**board_data)
await board.start_aio()
return board
|
from datetime import timedelta
import logging
import pocketcasts
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:rss"
SENSOR_NAME = "Pocketcasts unlistened episodes"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the pocketcasts platform for sensors."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
try:
api = pocketcasts.Api(username, password)
_LOGGER.debug("Found %d podcasts", len(api.my_podcasts()))
add_entities([PocketCastsSensor(api)], True)
except OSError as err:
_LOGGER.error("Connection to server failed: %s", err)
return False
class PocketCastsSensor(Entity):
"""Representation of a pocket casts sensor."""
def __init__(self, api):
"""Initialize the sensor."""
self._api = api
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return SENSOR_NAME
@property
def state(self):
"""Return the sensor state."""
return self._state
@property
def icon(self):
"""Return the icon for the sensor."""
return ICON
def update(self):
"""Update sensor values."""
try:
self._state = len(self._api.new_episodes_released())
_LOGGER.debug("Found %d new episodes", self._state)
except OSError as err:
_LOGGER.warning("Failed to contact server: %s", err)
|
Subsets and Splits