text
stringlengths 213
32.3k
|
---|
import asyncio
import logging
from pyvesync import VeSync
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .common import async_process_devices
from .config_flow import configured_instances
from .const import (
DOMAIN,
SERVICE_UPDATE_DEVS,
VS_DISCOVERY,
VS_DISPATCHERS,
VS_FANS,
VS_MANAGER,
VS_SWITCHES,
)
PLATFORMS = ["switch", "fan"]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the VeSync component."""
conf = config.get(DOMAIN)
if conf is None:
return True
if not configured_instances(hass):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: conf[CONF_USERNAME],
CONF_PASSWORD: conf[CONF_PASSWORD],
},
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up Vesync as config entry."""
username = config_entry.data[CONF_USERNAME]
password = config_entry.data[CONF_PASSWORD]
time_zone = str(hass.config.time_zone)
manager = VeSync(username, password, time_zone)
login = await hass.async_add_executor_job(manager.login)
if not login:
_LOGGER.error("Unable to login to the VeSync server")
return False
device_dict = await async_process_devices(hass, manager)
forward_setup = hass.config_entries.async_forward_entry_setup
hass.data[DOMAIN] = {}
hass.data[DOMAIN][VS_MANAGER] = manager
switches = hass.data[DOMAIN][VS_SWITCHES] = []
fans = hass.data[DOMAIN][VS_FANS] = []
hass.data[DOMAIN][VS_DISPATCHERS] = []
if device_dict[VS_SWITCHES]:
switches.extend(device_dict[VS_SWITCHES])
hass.async_create_task(forward_setup(config_entry, "switch"))
if device_dict[VS_FANS]:
fans.extend(device_dict[VS_FANS])
hass.async_create_task(forward_setup(config_entry, "fan"))
async def async_new_device_discovery(service):
"""Discover if new devices should be added."""
manager = hass.data[DOMAIN][VS_MANAGER]
switches = hass.data[DOMAIN][VS_SWITCHES]
fans = hass.data[DOMAIN][VS_FANS]
dev_dict = await async_process_devices(hass, manager)
switch_devs = dev_dict.get(VS_SWITCHES, [])
fan_devs = dev_dict.get(VS_FANS, [])
switch_set = set(switch_devs)
new_switches = list(switch_set.difference(switches))
if new_switches and switches:
switches.extend(new_switches)
async_dispatcher_send(hass, VS_DISCOVERY.format(VS_SWITCHES), new_switches)
return
if new_switches and not switches:
switches.extend(new_switches)
hass.async_create_task(forward_setup(config_entry, "switch"))
fan_set = set(fan_devs)
new_fans = list(fan_set.difference(fans))
if new_fans and fans:
fans.extend(new_fans)
async_dispatcher_send(hass, VS_DISCOVERY.format(VS_FANS), new_fans)
return
if new_fans and not fans:
fans.extend(new_fans)
hass.async_create_task(forward_setup(config_entry, "fan"))
hass.services.async_register(
DOMAIN, SERVICE_UPDATE_DEVS, async_new_device_discovery
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import argparse
import asyncio
import logging
import os
from homeassistant import runner
from homeassistant.auth import auth_manager_from_config
from homeassistant.auth.providers import homeassistant as hass_auth
from homeassistant.config import get_default_config_dir
from homeassistant.core import HomeAssistant
# mypy: allow-untyped-calls, allow-untyped-defs
def run(args):
"""Handle Home Assistant auth provider script."""
parser = argparse.ArgumentParser(description="Manage Home Assistant users")
parser.add_argument("--script", choices=["auth"])
parser.add_argument(
"-c",
"--config",
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration",
)
subparsers = parser.add_subparsers(dest="func")
subparsers.required = True
parser_list = subparsers.add_parser("list")
parser_list.set_defaults(func=list_users)
parser_add = subparsers.add_parser("add")
parser_add.add_argument("username", type=str)
parser_add.add_argument("password", type=str)
parser_add.set_defaults(func=add_user)
parser_validate_login = subparsers.add_parser("validate")
parser_validate_login.add_argument("username", type=str)
parser_validate_login.add_argument("password", type=str)
parser_validate_login.set_defaults(func=validate_login)
parser_change_pw = subparsers.add_parser("change_password")
parser_change_pw.add_argument("username", type=str)
parser_change_pw.add_argument("new_password", type=str)
parser_change_pw.set_defaults(func=change_password)
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
asyncio.run(run_command(parser.parse_args(args)))
async def run_command(args):
"""Run the command."""
hass = HomeAssistant()
hass.config.config_dir = os.path.join(os.getcwd(), args.config)
hass.auth = await auth_manager_from_config(hass, [{"type": "homeassistant"}], [])
provider = hass.auth.auth_providers[0]
await provider.async_initialize()
await args.func(hass, provider, args)
# Triggers save on used storage helpers with delay (core auth)
logging.getLogger("homeassistant.core").setLevel(logging.WARNING)
await hass.async_stop()
async def list_users(hass, provider, args):
"""List the users."""
count = 0
for user in provider.data.users:
count += 1
print(user["username"])
print()
print("Total users:", count)
async def add_user(hass, provider, args):
"""Create a user."""
try:
provider.data.add_auth(args.username, args.password)
except hass_auth.InvalidUser:
print("Username already exists!")
return
# Save username/password
await provider.data.async_save()
print("Auth created")
async def validate_login(hass, provider, args):
"""Validate a login."""
try:
provider.data.validate_login(args.username, args.password)
print("Auth valid")
except hass_auth.InvalidAuth:
print("Auth invalid")
async def change_password(hass, provider, args):
"""Change password."""
try:
provider.data.change_password(args.username, args.new_password)
await provider.data.async_save()
print("Password changed")
except hass_auth.InvalidUser:
print("User not found")
|
from __future__ import print_function
import errno
import os
import sys
import rospkg
from .xmlrunner import XMLTestRunner
XML_OUTPUT_FLAG = '--gtest_output=xml:' # use gtest-compatible flag
def printlog(msg, *args):
if args:
msg = msg % args
print('[ROSUNIT]' + msg)
def printlog_bold(msg, *args):
if args:
msg = msg % args
print('\033[1m[ROSUNIT]' + msg + '\033[0m')
def printerrlog(msg, *args):
if args:
msg = msg % args
print('[ROSUNIT]' + msg, file=sys.stderr)
# this is a copy of the roslogging utility. it's been moved here as it is a common
# routine for programs using accessing ROS directories
def makedirs_with_parent_perms(p):
"""
Create the directory using the permissions of the nearest
(existing) parent directory. This is useful for logging, where a
root process sometimes has to log in the user's space.
@param p: directory to create
@type p: str
"""
p = os.path.abspath(p)
parent = os.path.dirname(p)
# recurse upwards, checking to make sure we haven't reached the
# top
if not os.path.exists(p) and p and parent != p:
makedirs_with_parent_perms(parent)
s = os.stat(parent)
try:
os.mkdir(p)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# if perms of new dir don't match, set anew
s2 = os.stat(p)
if s.st_uid != s2.st_uid or s.st_gid != s2.st_gid:
os.chown(p, s.st_uid, s.st_gid)
if s.st_mode != s2.st_mode:
os.chmod(p, s.st_mode)
def xml_results_file(test_pkg, test_name, is_rostest=False, env=None):
"""
@param test_pkg: name of test's package
@type test_pkg: str
@param test_name str: name of test
@type test_name: str
@param is_rostest: True if the results file is for a rostest-generated unit instance
@type is_rostest: bool
@return: name of xml results file for specified test
@rtype: str
"""
test_dir = os.path.join(rospkg.get_test_results_dir(env=env), test_pkg)
if not os.path.exists(test_dir):
try:
makedirs_with_parent_perms(test_dir)
except OSError as error:
raise IOError('cannot create test results directory [%s]: %s' % (test_dir, str(error)))
# #576: strip out chars that would bork the filename
# this is fairly primitive, but for now just trying to catch some common cases
for c in ' "\'&$!`/\\':
if c in test_name:
test_name = test_name.replace(c, '_')
if is_rostest:
return os.path.join(test_dir, 'rostest-%s.xml' % test_name)
else:
return os.path.join(test_dir, 'rosunit-%s.xml' % test_name)
def rostest_name_from_path(pkg_dir, test_file):
"""
Derive name of rostest based on file name/path. rostest follows a
certain convention defined above.
@return: name of test
@rtype: str
"""
test_file_abs = os.path.abspath(test_file)
if test_file_abs.startswith(pkg_dir):
# compute package-relative path
test_file = test_file_abs[len(pkg_dir):]
if test_file[0] == os.sep:
test_file = test_file[1:]
outname = test_file.replace(os.sep, '_')
if '.' in outname:
outname = outname[:outname.rfind('.')]
return outname
def create_xml_runner(test_pkg, test_name, results_file=None, is_rostest=False):
"""
Create the unittest test runner with XML output
@param test_pkg: package name
@type test_pkg: str
@param test_name: test name
@type test_name: str
@param is_rostest: if True, use naming scheme for rostest itself instead of individual unit test naming
@type is_rostest: bool
"""
test_name = os.path.basename(test_name)
# determine output xml file name
if not results_file:
results_file = xml_results_file(test_pkg, test_name, is_rostest)
test_dir = os.path.abspath(os.path.dirname(results_file))
if not os.path.exists(test_dir):
try:
makedirs_with_parent_perms(test_dir) # NOTE: this will pass up an error exception if it fails
except OSError as error:
raise IOError('cannot create test results directory [%s]: %s' % (test_dir, str(error)))
elif os.path.isfile(test_dir):
raise Exception('ERROR: cannot run test suite, file is preventing creation of test dir: %s' % test_dir)
print('[ROSUNIT] Outputting test results to ' + results_file)
outstream = open(results_file, 'w')
outstream.write('<?xml version="1.0" encoding="utf-8"?>\n')
return XMLTestRunner(stream=outstream)
|
from homeassistant import config_entries
from homeassistant.components.zwave import const, lock
from tests.async_mock import MagicMock, patch
from tests.mock.zwave import MockEntityValues, MockNode, MockValue, value_changed
def test_get_device_detects_lock(mock_openzwave):
"""Test get_device returns a Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=None,
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert isinstance(device, lock.ZwaveLock)
def test_lock_turn_on_and_off(mock_openzwave):
"""Test turning on a Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=None,
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert not values.primary.data
device.lock()
assert values.primary.data
device.unlock()
assert not values.primary.data
def test_lock_value_changed(mock_openzwave):
"""Test value changed for Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=None,
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert not device.is_locked
values.primary.data = True
value_changed(values.primary)
assert device.is_locked
def test_lock_state_workaround(mock_openzwave):
"""Test value changed for Z-Wave lock using notification state."""
node = MockNode(manufacturer_id="0090", product_id="0440")
values = MockEntityValues(
primary=MockValue(data=True, node=node),
access_control=MockValue(data=1, node=node),
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values)
assert device.is_locked
values.access_control.data = 2
value_changed(values.access_control)
assert not device.is_locked
def test_track_message_workaround(mock_openzwave):
"""Test value changed for Z-Wave lock by alarm-clearing workaround."""
node = MockNode(
manufacturer_id="003B",
product_id="5044",
stats={"lastReceivedMessage": [0] * 6},
)
values = MockEntityValues(
primary=MockValue(data=True, node=node),
access_control=None,
alarm_type=None,
alarm_level=None,
)
# Here we simulate an RF lock. The first lock.get_device will call
# update properties, simulating the first DoorLock report. We then trigger
# a change, simulating the openzwave automatic refreshing behavior (which
# is enabled for at least the lock that needs this workaround)
node.stats["lastReceivedMessage"][5] = const.COMMAND_CLASS_DOOR_LOCK
device = lock.get_device(node=node, values=values)
value_changed(values.primary)
assert device.is_locked
assert device.device_state_attributes[lock.ATTR_NOTIFICATION] == "RF Lock"
# Simulate a keypad unlock. We trigger a value_changed() which simulates
# the Alarm notification received from the lock. Then, we trigger
# value_changed() to simulate the automatic refreshing behavior.
values.access_control = MockValue(data=6, node=node)
values.alarm_type = MockValue(data=19, node=node)
values.alarm_level = MockValue(data=3, node=node)
node.stats["lastReceivedMessage"][5] = const.COMMAND_CLASS_ALARM
value_changed(values.access_control)
node.stats["lastReceivedMessage"][5] = const.COMMAND_CLASS_DOOR_LOCK
values.primary.data = False
value_changed(values.primary)
assert not device.is_locked
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS]
== "Unlocked with Keypad by user 3"
)
# Again, simulate an RF lock.
device.lock()
node.stats["lastReceivedMessage"][5] = const.COMMAND_CLASS_DOOR_LOCK
value_changed(values.primary)
assert device.is_locked
assert device.device_state_attributes[lock.ATTR_NOTIFICATION] == "RF Lock"
def test_v2btze_value_changed(mock_openzwave):
"""Test value changed for v2btze Z-Wave lock."""
node = MockNode(manufacturer_id="010e", product_id="0002")
values = MockEntityValues(
primary=MockValue(data=None, node=node),
v2btze_advanced=MockValue(data="Advanced", node=node),
access_control=MockValue(data=19, node=node),
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert device._v2btze
assert not device.is_locked
values.access_control.data = 24
value_changed(values.primary)
assert device.is_locked
def test_alarm_type_workaround(mock_openzwave):
"""Test value changed for Z-Wave lock using alarm type."""
node = MockNode(manufacturer_id="0109", product_id="0000")
values = MockEntityValues(
primary=MockValue(data=True, node=node),
access_control=None,
alarm_type=MockValue(data=16, node=node),
alarm_level=None,
)
device = lock.get_device(node=node, values=values)
assert not device.is_locked
values.alarm_type.data = 18
value_changed(values.alarm_type)
assert device.is_locked
values.alarm_type.data = 19
value_changed(values.alarm_type)
assert not device.is_locked
values.alarm_type.data = 21
value_changed(values.alarm_type)
assert device.is_locked
values.alarm_type.data = 22
value_changed(values.alarm_type)
assert not device.is_locked
values.alarm_type.data = 24
value_changed(values.alarm_type)
assert device.is_locked
values.alarm_type.data = 25
value_changed(values.alarm_type)
assert not device.is_locked
values.alarm_type.data = 27
value_changed(values.alarm_type)
assert device.is_locked
def test_lock_access_control(mock_openzwave):
"""Test access control for Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=MockValue(data=11, node=node),
alarm_type=None,
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert device.device_state_attributes[lock.ATTR_NOTIFICATION] == "Lock Jammed"
def test_lock_alarm_type(mock_openzwave):
"""Test alarm type for Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=None,
alarm_type=MockValue(data=None, node=node),
alarm_level=None,
)
device = lock.get_device(node=node, values=values, node_config={})
assert lock.ATTR_LOCK_STATUS not in device.device_state_attributes
values.alarm_type.data = 21
value_changed(values.alarm_type)
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS] == "Manually Locked None"
)
values.alarm_type.data = 18
value_changed(values.alarm_type)
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS]
== "Locked with Keypad by user None"
)
values.alarm_type.data = 161
value_changed(values.alarm_type)
assert device.device_state_attributes[lock.ATTR_LOCK_STATUS] == "Tamper Alarm: None"
values.alarm_type.data = 9
value_changed(values.alarm_type)
assert device.device_state_attributes[lock.ATTR_LOCK_STATUS] == "Deadbolt Jammed"
def test_lock_alarm_level(mock_openzwave):
"""Test alarm level for Z-Wave lock."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(data=None, node=node),
access_control=None,
alarm_type=MockValue(data=None, node=node),
alarm_level=MockValue(data=None, node=node),
)
device = lock.get_device(node=node, values=values, node_config={})
assert lock.ATTR_LOCK_STATUS not in device.device_state_attributes
values.alarm_type.data = 21
values.alarm_level.data = 1
value_changed(values.alarm_type)
value_changed(values.alarm_level)
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS]
== "Manually Locked by Key Cylinder or Inside thumb turn"
)
values.alarm_type.data = 18
values.alarm_level.data = "alice"
value_changed(values.alarm_type)
value_changed(values.alarm_level)
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS]
== "Locked with Keypad by user alice"
)
values.alarm_type.data = 161
values.alarm_level.data = 1
value_changed(values.alarm_type)
value_changed(values.alarm_level)
assert (
device.device_state_attributes[lock.ATTR_LOCK_STATUS]
== "Tamper Alarm: Too many keypresses"
)
async def setup_ozw(hass, mock_openzwave):
"""Set up the mock ZWave config entry."""
hass.config.components.add("zwave")
config_entry = config_entries.ConfigEntry(
1,
"zwave",
"Mock Title",
{"usb_path": "mock-path", "network_key": "mock-key"},
"test",
config_entries.CONN_CLASS_LOCAL_PUSH,
system_options={},
)
await hass.config_entries.async_forward_entry_setup(config_entry, "lock")
await hass.async_block_till_done()
async def test_lock_set_usercode_service(hass, mock_openzwave):
"""Test the zwave lock set_usercode service."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode(node_id=12)
value0 = MockValue(data=" ", node=node, index=0)
value1 = MockValue(data=" ", node=node, index=1)
node.get_values.return_value = {value0.value_id: value0, value1.value_id: value1}
mock_network.nodes = {node.node_id: node}
await setup_ozw(hass, mock_openzwave)
await hass.async_block_till_done()
await hass.services.async_call(
lock.DOMAIN,
lock.SERVICE_SET_USERCODE,
{
const.ATTR_NODE_ID: node.node_id,
lock.ATTR_USERCODE: "1234",
lock.ATTR_CODE_SLOT: 1,
},
)
await hass.async_block_till_done()
assert value1.data == "1234"
mock_network.nodes = {node.node_id: node}
await hass.services.async_call(
lock.DOMAIN,
lock.SERVICE_SET_USERCODE,
{
const.ATTR_NODE_ID: node.node_id,
lock.ATTR_USERCODE: "123",
lock.ATTR_CODE_SLOT: 1,
},
)
await hass.async_block_till_done()
assert value1.data == "1234"
async def test_lock_get_usercode_service(hass, mock_openzwave):
"""Test the zwave lock get_usercode service."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode(node_id=12)
value0 = MockValue(data=None, node=node, index=0)
value1 = MockValue(data="1234", node=node, index=1)
node.get_values.return_value = {value0.value_id: value0, value1.value_id: value1}
await setup_ozw(hass, mock_openzwave)
await hass.async_block_till_done()
with patch.object(lock, "_LOGGER") as mock_logger:
mock_network.nodes = {node.node_id: node}
await hass.services.async_call(
lock.DOMAIN,
lock.SERVICE_GET_USERCODE,
{const.ATTR_NODE_ID: node.node_id, lock.ATTR_CODE_SLOT: 1},
)
await hass.async_block_till_done()
# This service only seems to write to the log
assert mock_logger.info.called
assert len(mock_logger.info.mock_calls) == 1
assert mock_logger.info.mock_calls[0][1][2] == "1234"
async def test_lock_clear_usercode_service(hass, mock_openzwave):
"""Test the zwave lock clear_usercode service."""
mock_network = hass.data[const.DATA_NETWORK] = MagicMock()
node = MockNode(node_id=12)
value0 = MockValue(data=None, node=node, index=0)
value1 = MockValue(data="123", node=node, index=1)
node.get_values.return_value = {value0.value_id: value0, value1.value_id: value1}
mock_network.nodes = {node.node_id: node}
await setup_ozw(hass, mock_openzwave)
await hass.async_block_till_done()
await hass.services.async_call(
lock.DOMAIN,
lock.SERVICE_CLEAR_USERCODE,
{const.ATTR_NODE_ID: node.node_id, lock.ATTR_CODE_SLOT: 1},
)
await hass.async_block_till_done()
assert value1.data == "\0\0\0"
|
import logging
from typing import Any, Callable, Optional
import voluptuous as vol
from .const import CAT_ENTITIES
from .entities import ENTITY_POLICY_SCHEMA, compile_entities
from .merge import merge_policies # noqa: F401
from .models import PermissionLookup
from .types import PolicyType
from .util import test_all
POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
_LOGGER = logging.getLogger(__name__)
class AbstractPermissions:
"""Default permissions class."""
_cached_entity_func: Optional[Callable[[str, str], bool]] = None
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
raise NotImplementedError
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
raise NotImplementedError
def check_entity(self, entity_id: str, key: str) -> bool:
"""Check if we can access entity."""
entity_func = self._cached_entity_func
if entity_func is None:
entity_func = self._cached_entity_func = self._entity_func()
return entity_func(entity_id, key)
class PolicyPermissions(AbstractPermissions):
"""Handle permissions."""
def __init__(self, policy: PolicyType, perm_lookup: PermissionLookup) -> None:
"""Initialize the permission class."""
self._policy = policy
self._perm_lookup = perm_lookup
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
return test_all(self._policy.get(CAT_ENTITIES), key)
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
def __eq__(self, other: Any) -> bool:
"""Equals check."""
return isinstance(other, PolicyPermissions) and other._policy == self._policy
class _OwnerPermissions(AbstractPermissions):
"""Owner permissions."""
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
return True
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
return lambda entity_id, key: True
OwnerPermissions = _OwnerPermissions()
|
from Handler import Handler
import time
try:
import pika
except ImportError:
pika = None
class rmqHandler (Handler):
"""
Implements the abstract Handler class
Sending data to a RabbitMQ pub/sub channel
"""
def __init__(self, config=None):
"""
Create a new instance of rmqHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
if pika is None:
self.log.error('pika import failed. Handler disabled')
self.enabled = False
return
# Initialize Data
self.connections = {}
self.channels = {}
self.reconnect_interval = 1
# Initialize Options
tmp_rmq_server = self.config['rmq_server']
if type(tmp_rmq_server) is list:
self.rmq_server = tmp_rmq_server
else:
self.rmq_server = [tmp_rmq_server]
self.rmq_port = 5672
self.rmq_exchange = self.config['rmq_exchange']
self.rmq_user = None
self.rmq_password = None
self.rmq_vhost = '/'
self.rmq_exchange_type = 'fanout'
self.rmq_durable = True
self.rmq_heartbeat_interval = 300
self.get_config()
# Create rabbitMQ pub socket and bind
try:
self._bind_all()
except pika.exceptions.AMQPConnectionError:
self.log.error('Failed to bind to rabbitMQ pub socket')
def get_config(self):
""" Get and set config options from config file """
if 'rmq_port' in self.config:
self.rmq_port = int(self.config['rmq_port'])
if 'rmq_user' in self.config:
self.rmq_user = self.config['rmq_user']
if 'rmq_password' in self.config:
self.rmq_password = self.config['rmq_password']
if 'rmq_vhost' in self.config:
self.rmq_vhost = self.config['rmq_vhost']
if 'rmq_exchange_type' in self.config:
self.rmq_exchange_type = self.config['rmq_exchange_type']
if 'rmq_durable' in self.config:
self.rmq_durable = bool(self.config['rmq_durable'])
if 'rmq_heartbeat_interval' in self.config:
self.rmq_heartbeat_interval = int(
self.config['rmq_heartbeat_interval'])
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(rmqHandler, self).get_default_config_help()
config.update({
'server': '',
'rmq_exchange': '',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(rmqHandler, self).get_default_config()
config.update({
'server': '127.0.0.1',
'rmq_exchange': 'diamond',
})
return config
def _bind_all(self):
"""
Bind all RMQ servers defined in config
"""
for rmq_server in self.rmq_server:
self._bind(rmq_server)
def _bind(self, rmq_server):
"""
Create PUB socket and bind
"""
if ((rmq_server in self.connections.keys() and
self.connections[rmq_server] is not None and
self.connections[rmq_server].is_open)):
# It seems we already have this server, so let's try _unbind just
# to be safe.
self._unbind(rmq_server)
credentials = None
if self.rmq_user and self.rmq_password:
credentials = pika.PlainCredentials(
self.rmq_user,
self.rmq_password)
parameters = pika.ConnectionParameters(
host=rmq_server,
port=self.rmq_port,
virtual_host=self.rmq_vhost,
credentials=credentials,
heartbeat_interval=self.rmq_heartbeat_interval,
retry_delay=5,
connection_attempts=3)
self.connections[rmq_server] = None
while (self.connections[rmq_server] is None or
self.connections[rmq_server].is_open is False):
try:
self.connections[rmq_server] = pika.BlockingConnection(
parameters)
self.channels[rmq_server] = self.connections[
rmq_server].channel()
self.channels[rmq_server].exchange_declare(
exchange=self.rmq_exchange,
type=self.rmq_exchange_type,
durable=self.rmq_durable)
# Reset reconnect_interval after a successful connection
self.reconnect_interval = 1
except Exception as exception:
self.log.debug("Caught exception in _bind: %s", exception)
if rmq_server in self.connections.keys():
self._unbind(rmq_server)
if self.reconnect_interval >= 16:
break
if self.reconnect_interval < 16:
self.reconnect_interval = self.reconnect_interval * 2
time.sleep(self.reconnect_interval)
def _unbind(self, rmq_server=None):
""" Close AMQP connection and unset channel """
try:
self.connections[rmq_server].close()
except AttributeError:
pass
self.connections[rmq_server] = None
self.channels[rmq_server] = None
def __del__(self):
"""
Destroy instance of the rmqHandler class
"""
if hasattr(self, 'connections'):
for rmq_server in self.connections.keys():
self._unbind(rmq_server)
def process(self, metric):
"""
Process a metric and send it to RMQ pub socket
"""
for rmq_server in self.connections.keys():
try:
if ((self.connections[rmq_server] is None or
self.connections[rmq_server].is_open is False)):
self._bind(rmq_server)
channel = self.channels[rmq_server]
channel.basic_publish(exchange=self.rmq_exchange,
routing_key='', body="%s" % metric)
except Exception as exception:
self.log.error(
"Failed publishing to %s, attempting reconnect",
rmq_server)
self.log.debug("Caught exception: %s", exception)
self._unbind(rmq_server)
self._bind(rmq_server)
|
from datetime import timedelta
from requests.exceptions import HTTPError
from homeassistant.components.fritzbox.const import (
ATTR_STATE_DEVICE_LOCKED,
ATTR_STATE_LOCKED,
ATTR_TEMPERATURE_UNIT,
ATTR_TOTAL_CONSUMPTION,
ATTR_TOTAL_CONSUMPTION_UNIT,
DOMAIN as FB_DOMAIN,
)
from homeassistant.components.switch import ATTR_CURRENT_POWER_W, DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_TEMPERATURE,
ENERGY_KILO_WATT_HOUR,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
TEMP_CELSIUS,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import MOCK_CONFIG, FritzDeviceSwitchMock
from tests.async_mock import Mock
from tests.common import async_fire_time_changed
ENTITY_ID = f"{DOMAIN}.fake_name"
async def setup_fritzbox(hass: HomeAssistantType, config: dict):
"""Set up mock AVM Fritz!Box."""
assert await async_setup_component(hass, FB_DOMAIN, config)
await hass.async_block_till_done()
async def test_setup(hass: HomeAssistantType, fritz: Mock):
"""Test setup of platform."""
device = FritzDeviceSwitchMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.state == STATE_ON
assert state.attributes[ATTR_CURRENT_POWER_W] == 5.678
assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name"
assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device"
assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked"
assert state.attributes[ATTR_TEMPERATURE] == "135"
assert state.attributes[ATTR_TEMPERATURE_UNIT] == TEMP_CELSIUS
assert state.attributes[ATTR_TOTAL_CONSUMPTION] == "1.234"
assert state.attributes[ATTR_TOTAL_CONSUMPTION_UNIT] == ENERGY_KILO_WATT_HOUR
async def test_turn_on(hass: HomeAssistantType, fritz: Mock):
"""Test turn device on."""
device = FritzDeviceSwitchMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert device.set_switch_state_on.call_count == 1
async def test_turn_off(hass: HomeAssistantType, fritz: Mock):
"""Test turn device off."""
device = FritzDeviceSwitchMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert device.set_switch_state_off.call_count == 1
async def test_update(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceSwitchMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert device.update.call_count == 0
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert device.update.call_count == 1
assert fritz().login.call_count == 1
async def test_update_error(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceSwitchMock()
device.update.side_effect = HTTPError("Boom")
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert device.update.call_count == 0
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert device.update.call_count == 1
assert fritz().login.call_count == 2
|
from flexx import flx
class Example(flx.Widget):
CSS = '''
.flx-TreeWidget {
background: #000;
color: #afa;
}
'''
def init(self):
with flx.HSplit():
self.label = flx.Label(flex=1, style='overflow-y: scroll;')
with flx.TreeWidget(flex=1, max_selected=1) as self.tree:
for t in ['foo', 'bar', 'spam', 'eggs']:
with flx.TreeItem(text=t, checked=None):
for i in range(4):
item2 = flx.TreeItem(text=t + ' %i' % i, checked=False)
if i == 2:
with item2:
flx.TreeItem(title='A', text='more info on A')
flx.TreeItem(title='B', text='more info on B')
@flx.reaction('tree.children**.checked', 'tree.children**.selected',
'tree.children**.collapsed')
def on_event(self, *events):
for ev in events:
id = ev.source.title or ev.source.text
if ev.new_value:
text = id + ' was ' + ev.type
else:
text = id + ' was ' + 'un-' + ev.type
self.label.set_html(text + '<br />' + self.label.html)
if __name__ == '__main__':
m = flx.launch(Example)
flx.run()
|
import unittest
from mock import Mock, call
from trashcli.restore import RestoreAskingTheUser
class TestRestoreAskingTheUser(unittest.TestCase):
def setUp(self):
self.input = Mock(spec=[''])
self.println = Mock(spec=[''])
self.restore = Mock(spec=[''])
self.die = Mock(spec=[''])
self.restorer = RestoreAskingTheUser(self.input,
self.println,
self.restore,
self.die)
def test(self):
self.input.return_value = '0'
self.restorer.restore_asking_the_user(['trashed_file1',
'trashed_file2'])
self.assertEqual([call('What file to restore [0..1]: ')],
self.input.mock_calls)
self.assertEqual([], self.println.mock_calls)
self.assertEqual([call('trashed_file1')] ,
self.restore.mock_calls)
self.assertEqual([], self.die.mock_calls)
def test2(self):
self.input.side_effect = KeyboardInterrupt
self.restorer.restore_asking_the_user(['trashed_file1',
'trashed_file2'])
self.assertEqual([call('What file to restore [0..1]: ')],
self.input.mock_calls)
self.assertEqual([], self.println.mock_calls)
self.assertEqual([], self.restore.mock_calls)
self.assertEqual([call('')], self.die.mock_calls)
|
from homeassistant import data_entry_flow
from homeassistant.components.axis import config_flow
from homeassistant.components.axis.const import (
CONF_EVENTS,
CONF_MODEL,
CONF_STREAM_PROFILE,
DEFAULT_STREAM_PROFILE,
DOMAIN as AXIS_DOMAIN,
)
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import (
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from .test_device import MAC, MODEL, NAME, setup_axis_integration, vapix_request
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_flow_manual_configuration(hass):
"""Test that config flow works."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"M1065-LW - {MAC}"
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
CONF_MAC: MAC,
CONF_MODEL: "M1065-LW",
CONF_NAME: "M1065-LW 0",
}
async def test_manual_configuration_update_configuration(hass):
"""Test that config flow fails on already configured device."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch(
"homeassistant.components.axis.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "2.3.4.5",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert device.host == "2.3.4.5"
assert len(mock_setup_entry.mock_calls) == 1
async def test_flow_fails_already_configured(hass):
"""Test that config flow fails on already configured device."""
await setup_axis_integration(hass)
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_fails_faulty_credentials(hass):
"""Test that config flow fails on faulty credentials."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch(
"homeassistant.components.axis.config_flow.get_device",
side_effect=config_flow.AuthenticationRequired,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["errors"] == {"base": "invalid_auth"}
async def test_flow_fails_cannot_connect(hass):
"""Test that config flow fails on cannot connect."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch(
"homeassistant.components.axis.config_flow.get_device",
side_effect=config_flow.CannotConnect,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_create_entry_multiple_existing_entries_of_same_model(hass):
"""Test that create entry can generate a name with other entries."""
entry = MockConfigEntry(
domain=AXIS_DOMAIN,
data={CONF_NAME: "M1065-LW 0", CONF_MODEL: "M1065-LW"},
)
entry.add_to_hass(hass)
entry2 = MockConfigEntry(
domain=AXIS_DOMAIN,
data={CONF_NAME: "M1065-LW 1", CONF_MODEL: "M1065-LW"},
)
entry2.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"M1065-LW - {MAC}"
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
CONF_MAC: MAC,
CONF_MODEL: "M1065-LW",
CONF_NAME: "M1065-LW 2",
}
assert result["data"][CONF_NAME] == "M1065-LW 2"
async def test_zeroconf_flow(hass):
"""Test that zeroconf discovery for new devices work."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN,
data={
CONF_HOST: "1.2.3.4",
CONF_PORT: 80,
"hostname": "name",
"properties": {"macaddress": MAC},
},
context={"source": SOURCE_ZEROCONF},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
with patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"M1065-LW - {MAC}"
assert result["data"] == {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_PORT: 80,
CONF_MAC: MAC,
CONF_MODEL: "M1065-LW",
CONF_NAME: "M1065-LW 0",
}
assert result["data"][CONF_NAME] == "M1065-LW 0"
async def test_zeroconf_flow_already_configured(hass):
"""Test that zeroconf doesn't setup already configured devices."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
assert device.host == "1.2.3.4"
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN,
data={
CONF_HOST: "1.2.3.4",
CONF_PORT: 80,
"hostname": "name",
"properties": {"macaddress": MAC},
},
context={"source": SOURCE_ZEROCONF},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert device.host == "1.2.3.4"
async def test_zeroconf_flow_updated_configuration(hass):
"""Test that zeroconf update configuration with new parameters."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
assert device.host == "1.2.3.4"
assert device.config_entry.data == {
CONF_HOST: "1.2.3.4",
CONF_PORT: 80,
CONF_USERNAME: "root",
CONF_PASSWORD: "pass",
CONF_MAC: MAC,
CONF_MODEL: MODEL,
CONF_NAME: NAME,
}
with patch(
"homeassistant.components.axis.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch("axis.vapix.Vapix.request", new=vapix_request):
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN,
data={
CONF_HOST: "2.3.4.5",
CONF_PORT: 8080,
"hostname": "name",
"properties": {"macaddress": MAC},
},
context={"source": SOURCE_ZEROCONF},
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert device.config_entry.data == {
CONF_HOST: "2.3.4.5",
CONF_PORT: 8080,
CONF_USERNAME: "root",
CONF_PASSWORD: "pass",
CONF_MAC: MAC,
CONF_MODEL: MODEL,
CONF_NAME: NAME,
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf_flow_ignore_non_axis_device(hass):
"""Test that zeroconf doesn't setup devices with link local addresses."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN,
data={CONF_HOST: "169.254.3.4", "properties": {"macaddress": "01234567890"}},
context={"source": SOURCE_ZEROCONF},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "not_axis_device"
async def test_zeroconf_flow_ignore_link_local_address(hass):
"""Test that zeroconf doesn't setup devices with link local addresses."""
result = await hass.config_entries.flow.async_init(
AXIS_DOMAIN,
data={CONF_HOST: "169.254.3.4", "properties": {"macaddress": MAC}},
context={"source": SOURCE_ZEROCONF},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "link_local_address"
async def test_option_flow(hass):
"""Test config flow options."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
assert device.option_stream_profile == DEFAULT_STREAM_PROFILE
result = await hass.config_entries.options.async_init(device.config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "configure_stream"
assert set(result["data_schema"].schema[CONF_STREAM_PROFILE].container) == {
DEFAULT_STREAM_PROFILE,
"profile_1",
"profile_2",
}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_STREAM_PROFILE: "profile_1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
CONF_EVENTS: True,
CONF_STREAM_PROFILE: "profile_1",
}
assert device.option_stream_profile == "profile_1"
|
import numpy as np
import tempfile
import unittest
from PIL import Image
import chainer
from chainer import testing
from chainercv.utils import read_image
from chainercv.utils import write_image
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
def _write_rgba_image(rgba, file, format):
rgba = rgba.transpose((1, 2, 0))
rgba = Image.fromarray(rgba, 'RGBA')
canvas = Image.new('RGBA', rgba.size, (255, 255, 255, 255))
# Paste the image onto the canvas, using it's alpha channel as mask
canvas.paste(rgba, mask=rgba)
canvas.save(file, format)
def _create_parameters():
params = testing.product({
'file_obj': [False, True],
'size': [(48, 32)],
'dtype': [np.float32, np.uint8, bool]})
no_color_params = testing.product({
'format': ['bmp', 'jpeg', 'png'],
'color': [False],
'alpha': [None]})
no_alpha_params = testing.product({
'format': ['bmp', 'jpeg', 'png'],
'color': [True],
'alpha': [None]})
alpha_params = testing.product({
# writing alpha image with jpeg encoding didn't work
'format': ['png'],
'color': [True],
'alpha': ['ignore', 'blend_with_white', 'blend_with_black']})
params = testing.product_dict(
params,
no_color_params + no_alpha_params + alpha_params)
return params
@testing.parameterize(*testing.product_dict(
_create_parameters(),
[{'backend': 'cv2'}, {'backend': 'PIL'}, {'backend': None}]))
class TestReadImage(unittest.TestCase):
def setUp(self):
if self.file_obj:
self.f = tempfile.TemporaryFile()
self.file = self.f
format = self.format
else:
if self.format == 'jpeg':
suffix = '.jpg'
else:
suffix = '.' + self.format
self.f = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
self.file = self.f.name
format = None
if self.alpha is None:
if self.color:
self.img = np.random.randint(
0, 255, size=(3,) + self.size, dtype=np.uint8)
else:
self.img = np.random.randint(
0, 255, size=(1,) + self.size, dtype=np.uint8)
write_image(self.img, self.file, format=format)
else:
self.img = np.random.randint(
0, 255, size=(4,) + self.size, dtype=np.uint8)
_write_rgba_image(self.img, self.file, format=format)
if self.file_obj:
self.file.seek(0)
def test_read_image_as_color(self):
if self.backend == 'cv2' and not _cv2_available:
return
with chainer.using_config('cv_read_image_backend', self.backend):
img = read_image(self.file, dtype=self.dtype, alpha=self.alpha)
self.assertEqual(img.shape, (3,) + self.size)
self.assertEqual(img.dtype, self.dtype)
if self.format in {'bmp', 'png'} and self.alpha is None:
np.testing.assert_equal(
img,
np.broadcast_to(self.img, (3,) + self.size).astype(self.dtype))
def test_read_image_as_grayscale(self):
if self.backend == 'cv2' and not _cv2_available:
return
with chainer.using_config('cv_read_image_backend', self.backend):
img = read_image(
self.file, dtype=self.dtype, color=False, alpha=self.alpha)
self.assertEqual(img.shape, (1,) + self.size)
self.assertEqual(img.dtype, self.dtype)
if (self.format in {'bmp', 'png'}
and not self.color and self.alpha is None):
np.testing.assert_equal(img, self.img.astype(self.dtype))
def test_read_image_mutable(self):
if self.backend == 'cv2' and not _cv2_available:
return
with chainer.using_config('cv_read_image_backend', self.backend):
img = read_image(self.file, dtype=self.dtype, alpha=self.alpha)
img[:] = 0
np.testing.assert_equal(img, 0)
def test_read_image_raise_error_with_cv2(self):
if self.backend == 'cv2' and not _cv2_available:
with chainer.using_config('cv_read_image_backend', self.backend):
with self.assertRaises(ValueError):
read_image(self.file)
@testing.parameterize(*_create_parameters())
class TestReadImageDifferentBackends(unittest.TestCase):
def setUp(self):
if self.file_obj:
self.f = tempfile.TemporaryFile()
self.file = self.f
format = self.format
else:
if self.format == 'jpeg':
suffix = '.jpg'
else:
suffix = '.' + self.format
self.f = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
self.file = self.f.name
format = None
if self.alpha is None:
if self.color:
self.img = np.random.randint(
0, 255, size=(3,) + self.size, dtype=np.uint8)
else:
self.img = np.random.randint(
0, 255, size=(1,) + self.size, dtype=np.uint8)
write_image(self.img, self.file, format=format)
else:
self.img = np.random.randint(
0, 255, size=(4,) + self.size, dtype=np.uint8)
_write_rgba_image(self.img, self.file, format=format)
if self.file_obj:
self.file.seek(0)
@unittest.skipUnless(_cv2_available, 'cv2 is not installed')
def test_read_image_different_backends_as_color(self):
with chainer.using_config('cv_read_image_backend', 'cv2'):
cv2_img = read_image(self.file, dtype=self.dtype,
color=self.color, alpha=self.alpha)
with chainer.using_config('cv_read_image_backend', 'PIL'):
pil_img = read_image(self.file, dtype=self.dtype,
color=self.color, alpha=self.alpha)
if self.format != 'jpeg':
if self.dtype == np.float32 and self.alpha is not None:
np.testing.assert_almost_equal(cv2_img, pil_img, decimal=4)
else:
np.testing.assert_equal(cv2_img, pil_img)
else:
# jpeg decoders are differnet, so they produce different results
assert np.mean(cv2_img == pil_img) > 0.99
testing.run_module(__name__, __file__)
|
import pysonos
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
async def _async_has_devices(hass):
"""Return if there are devices that can be discovered."""
return await hass.async_add_executor_job(pysonos.discover)
config_entry_flow.register_discovery_flow(
DOMAIN, "Sonos", _async_has_devices, config_entries.CONN_CLASS_LOCAL_PUSH
)
|
from abc import abstractmethod
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import as_timestamp, now, parse_datetime, utc_from_timestamp
from .const import (
CONF_MANUAL_RUN_MINS,
DEFAULT_MANUAL_RUN_MINS,
DOMAIN as DOMAIN_RACHIO,
KEY_CUSTOM_CROP,
KEY_CUSTOM_SHADE,
KEY_CUSTOM_SLOPE,
KEY_DEVICE_ID,
KEY_DURATION,
KEY_ENABLED,
KEY_ID,
KEY_IMAGE_URL,
KEY_NAME,
KEY_ON,
KEY_RAIN_DELAY,
KEY_RAIN_DELAY_END,
KEY_SCHEDULE_ID,
KEY_SUBTYPE,
KEY_SUMMARY,
KEY_TYPE,
KEY_ZONE_ID,
KEY_ZONE_NUMBER,
SCHEDULE_TYPE_FIXED,
SCHEDULE_TYPE_FLEX,
SERVICE_SET_ZONE_MOISTURE,
SERVICE_START_MULTIPLE_ZONES,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
SIGNAL_RACHIO_SCHEDULE_UPDATE,
SIGNAL_RACHIO_ZONE_UPDATE,
SLOPE_FLAT,
SLOPE_MODERATE,
SLOPE_SLIGHT,
SLOPE_STEEP,
)
from .entity import RachioDevice
from .webhooks import (
SUBTYPE_RAIN_DELAY_OFF,
SUBTYPE_RAIN_DELAY_ON,
SUBTYPE_SCHEDULE_COMPLETED,
SUBTYPE_SCHEDULE_STARTED,
SUBTYPE_SCHEDULE_STOPPED,
SUBTYPE_SLEEP_MODE_OFF,
SUBTYPE_SLEEP_MODE_ON,
SUBTYPE_ZONE_COMPLETED,
SUBTYPE_ZONE_PAUSED,
SUBTYPE_ZONE_STARTED,
SUBTYPE_ZONE_STOPPED,
)
_LOGGER = logging.getLogger(__name__)
ATTR_DURATION = "duration"
ATTR_ID = "id"
ATTR_PERCENT = "percent"
ATTR_SCHEDULE_SUMMARY = "Summary"
ATTR_SCHEDULE_ENABLED = "Enabled"
ATTR_SCHEDULE_DURATION = "Duration"
ATTR_SCHEDULE_TYPE = "Type"
ATTR_SORT_ORDER = "sortOrder"
ATTR_ZONE_NUMBER = "Zone number"
ATTR_ZONE_SHADE = "Shade"
ATTR_ZONE_SLOPE = "Slope"
ATTR_ZONE_SUMMARY = "Summary"
ATTR_ZONE_TYPE = "Type"
START_MULTIPLE_ZONES_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_DURATION): cv.ensure_list_csv,
}
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Rachio switches."""
zone_entities = []
has_flex_sched = False
entities = await hass.async_add_executor_job(_create_entities, hass, config_entry)
for entity in entities:
if isinstance(entity, RachioZone):
zone_entities.append(entity)
if isinstance(entity, RachioSchedule) and entity.type == SCHEDULE_TYPE_FLEX:
has_flex_sched = True
async_add_entities(entities)
_LOGGER.info("%d Rachio switch(es) added", len(entities))
def start_multiple(service):
"""Service to start multiple zones in sequence."""
zones_list = []
person = hass.data[DOMAIN_RACHIO][config_entry.entry_id]
entity_id = service.data[ATTR_ENTITY_ID]
duration = iter(service.data[ATTR_DURATION])
default_time = service.data[ATTR_DURATION][0]
entity_to_zone_id = {
entity.entity_id: entity.zone_id for entity in zone_entities
}
for (count, data) in enumerate(entity_id):
if data in entity_to_zone_id:
# Time can be passed as a list per zone,
# or one time for all zones
time = int(next(duration, default_time)) * 60
zones_list.append(
{
ATTR_ID: entity_to_zone_id.get(data),
ATTR_DURATION: time,
ATTR_SORT_ORDER: count,
}
)
if len(zones_list) != 0:
person.start_multiple_zones(zones_list)
_LOGGER.debug("Starting zone(s) %s", entity_id)
else:
raise HomeAssistantError("No matching zones found in given entity_ids")
hass.services.async_register(
DOMAIN_RACHIO,
SERVICE_START_MULTIPLE_ZONES,
start_multiple,
schema=START_MULTIPLE_ZONES_SCHEMA,
)
if has_flex_sched:
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_SET_ZONE_MOISTURE,
{vol.Required(ATTR_PERCENT): cv.positive_int},
"set_moisture_percent",
)
def _create_entities(hass, config_entry):
entities = []
person = hass.data[DOMAIN_RACHIO][config_entry.entry_id]
# Fetch the schedule once at startup
# in order to avoid every zone doing it
for controller in person.controllers:
entities.append(RachioStandbySwitch(controller))
entities.append(RachioRainDelay(controller))
zones = controller.list_zones()
schedules = controller.list_schedules()
flex_schedules = controller.list_flex_schedules()
current_schedule = controller.current_schedule
for zone in zones:
entities.append(RachioZone(person, controller, zone, current_schedule))
for sched in schedules + flex_schedules:
entities.append(RachioSchedule(person, controller, sched, current_schedule))
_LOGGER.debug("Added %s", entities)
return entities
class RachioSwitch(RachioDevice, SwitchEntity):
"""Represent a Rachio state that can be toggled."""
def __init__(self, controller):
"""Initialize a new Rachio switch."""
super().__init__(controller)
self._state = None
@property
def name(self) -> str:
"""Get a name for this switch."""
return f"Switch on {self._controller.name}"
@property
def is_on(self) -> bool:
"""Return whether the switch is currently on."""
return self._state
@callback
def _async_handle_any_update(self, *args, **kwargs) -> None:
"""Determine whether an update event applies to this device."""
if args[0][KEY_DEVICE_ID] != self._controller.controller_id:
# For another device
return
# For this device
self._async_handle_update(args, kwargs)
@abstractmethod
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook data."""
class RachioStandbySwitch(RachioSwitch):
"""Representation of a standby status/button."""
@property
def name(self) -> str:
"""Return the name of the standby switch."""
return f"{self._controller.name} in standby mode"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and purpose."""
return f"{self._controller.controller_id}-standby"
@property
def icon(self) -> str:
"""Return an icon for the standby switch."""
return "mdi:power"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Update the state using webhook data."""
if args[0][0][KEY_SUBTYPE] == SUBTYPE_SLEEP_MODE_ON:
self._state = True
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_SLEEP_MODE_OFF:
self._state = False
self.async_write_ha_state()
def turn_on(self, **kwargs) -> None:
"""Put the controller in standby mode."""
self._controller.rachio.device.turn_off(self._controller.controller_id)
def turn_off(self, **kwargs) -> None:
"""Resume controller functionality."""
self._controller.rachio.device.turn_on(self._controller.controller_id)
async def async_added_to_hass(self):
"""Subscribe to updates."""
if KEY_ON in self._controller.init_data:
self._state = not self._controller.init_data[KEY_ON]
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
self._async_handle_any_update,
)
)
class RachioRainDelay(RachioSwitch):
"""Representation of a rain delay status/switch."""
def __init__(self, controller):
"""Set up a Rachio rain delay switch."""
self._cancel_update = None
super().__init__(controller)
@property
def name(self) -> str:
"""Return the name of the switch."""
return f"{self._controller.name} rain delay"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and purpose."""
return f"{self._controller.controller_id}-delay"
@property
def icon(self) -> str:
"""Return an icon for rain delay."""
return "mdi:camera-timer"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Update the state using webhook data."""
if self._cancel_update:
self._cancel_update()
self._cancel_update = None
if args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_ON:
endtime = parse_datetime(args[0][0][KEY_RAIN_DELAY_END])
_LOGGER.debug("Rain delay expires at %s", endtime)
self._state = True
self._cancel_update = async_track_point_in_utc_time(
self.hass, self._delay_expiration, endtime
)
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_OFF:
self._state = False
self.async_write_ha_state()
@callback
def _delay_expiration(self, *args) -> None:
"""Trigger when a rain delay expires."""
self._state = False
self._cancel_update = None
self.async_write_ha_state()
def turn_on(self, **kwargs) -> None:
"""Activate a 24 hour rain delay on the controller."""
self._controller.rachio.device.rain_delay(self._controller.controller_id, 86400)
_LOGGER.debug("Starting rain delay for 24 hours")
def turn_off(self, **kwargs) -> None:
"""Resume controller functionality."""
self._controller.rachio.device.rain_delay(self._controller.controller_id, 0)
_LOGGER.debug("Canceling rain delay")
async def async_added_to_hass(self):
"""Subscribe to updates."""
if KEY_RAIN_DELAY in self._controller.init_data:
self._state = self._controller.init_data[
KEY_RAIN_DELAY
] / 1000 > as_timestamp(now())
# If the controller was in a rain delay state during a reboot, this re-sets the timer
if self._state is True:
delay_end = utc_from_timestamp(
self._controller.init_data[KEY_RAIN_DELAY] / 1000
)
_LOGGER.debug("Re-setting rain delay timer for %s", delay_end)
self._cancel_update = async_track_point_in_utc_time(
self.hass, self._delay_expiration, delay_end
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
self._async_handle_any_update,
)
)
class RachioZone(RachioSwitch):
"""Representation of one zone of sprinklers connected to the Rachio Iro."""
def __init__(self, person, controller, data, current_schedule):
"""Initialize a new Rachio Zone."""
self.id = data[KEY_ID]
self._zone_name = data[KEY_NAME]
self._zone_number = data[KEY_ZONE_NUMBER]
self._zone_enabled = data[KEY_ENABLED]
self._entity_picture = data.get(KEY_IMAGE_URL)
self._person = person
self._shade_type = data.get(KEY_CUSTOM_SHADE, {}).get(KEY_NAME)
self._zone_type = data.get(KEY_CUSTOM_CROP, {}).get(KEY_NAME)
self._slope_type = data.get(KEY_CUSTOM_SLOPE, {}).get(KEY_NAME)
self._summary = ""
self._current_schedule = current_schedule
super().__init__(controller)
def __str__(self):
"""Display the zone as a string."""
return 'Rachio Zone "{}" on {}'.format(self.name, str(self._controller))
@property
def zone_id(self) -> str:
"""How the Rachio API refers to the zone."""
return self.id
@property
def name(self) -> str:
"""Return the friendly name of the zone."""
return self._zone_name
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and zone number."""
return f"{self._controller.controller_id}-zone-{self.zone_id}"
@property
def icon(self) -> str:
"""Return the icon to display."""
return "mdi:water"
@property
def zone_is_enabled(self) -> bool:
"""Return whether the zone is allowed to run."""
return self._zone_enabled
@property
def entity_picture(self):
"""Return the entity picture to use in the frontend, if any."""
return self._entity_picture
@property
def device_state_attributes(self) -> dict:
"""Return the optional state attributes."""
props = {ATTR_ZONE_NUMBER: self._zone_number, ATTR_ZONE_SUMMARY: self._summary}
if self._shade_type:
props[ATTR_ZONE_SHADE] = self._shade_type
if self._zone_type:
props[ATTR_ZONE_TYPE] = self._zone_type
if self._slope_type:
if self._slope_type == SLOPE_FLAT:
props[ATTR_ZONE_SLOPE] = "Flat"
elif self._slope_type == SLOPE_SLIGHT:
props[ATTR_ZONE_SLOPE] = "Slight"
elif self._slope_type == SLOPE_MODERATE:
props[ATTR_ZONE_SLOPE] = "Moderate"
elif self._slope_type == SLOPE_STEEP:
props[ATTR_ZONE_SLOPE] = "Steep"
return props
def turn_on(self, **kwargs) -> None:
"""Start watering this zone."""
# Stop other zones first
self.turn_off()
# Start this zone
manual_run_time = timedelta(
minutes=self._person.config_entry.options.get(
CONF_MANUAL_RUN_MINS, DEFAULT_MANUAL_RUN_MINS
)
)
self._controller.rachio.zone.start(self.zone_id, manual_run_time.seconds)
_LOGGER.debug(
"Watering %s on %s for %s",
self.name,
self._controller.name,
str(manual_run_time),
)
def turn_off(self, **kwargs) -> None:
"""Stop watering all zones."""
self._controller.stop_watering()
def set_moisture_percent(self, percent) -> None:
"""Set the zone moisture percent."""
_LOGGER.debug("Setting %s moisture to %s percent", self._zone_name, percent)
self._controller.rachio.zone.set_moisture_percent(self.id, percent / 100)
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook zone data."""
if args[0][KEY_ZONE_ID] != self.zone_id:
return
self._summary = args[0][KEY_SUMMARY]
if args[0][KEY_SUBTYPE] == SUBTYPE_ZONE_STARTED:
self._state = True
elif args[0][KEY_SUBTYPE] in [
SUBTYPE_ZONE_STOPPED,
SUBTYPE_ZONE_COMPLETED,
SUBTYPE_ZONE_PAUSED,
]:
self._state = False
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self.zone_id == self._current_schedule.get(KEY_ZONE_ID)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_RACHIO_ZONE_UPDATE, self._async_handle_update
)
)
class RachioSchedule(RachioSwitch):
"""Representation of one fixed schedule on the Rachio Iro."""
def __init__(self, person, controller, data, current_schedule):
"""Initialize a new Rachio Schedule."""
self._schedule_id = data[KEY_ID]
self._schedule_name = data[KEY_NAME]
self._duration = data[KEY_DURATION]
self._schedule_enabled = data[KEY_ENABLED]
self._summary = data[KEY_SUMMARY]
self.type = data.get(KEY_TYPE, SCHEDULE_TYPE_FIXED)
self._current_schedule = current_schedule
super().__init__(controller)
@property
def name(self) -> str:
"""Return the friendly name of the schedule."""
return f"{self._schedule_name} Schedule"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and schedule."""
return f"{self._controller.controller_id}-schedule-{self._schedule_id}"
@property
def icon(self) -> str:
"""Return the icon to display."""
return "mdi:water" if self.schedule_is_enabled else "mdi:water-off"
@property
def device_state_attributes(self) -> dict:
"""Return the optional state attributes."""
return {
ATTR_SCHEDULE_SUMMARY: self._summary,
ATTR_SCHEDULE_ENABLED: self.schedule_is_enabled,
ATTR_SCHEDULE_DURATION: f"{round(self._duration / 60)} minutes",
ATTR_SCHEDULE_TYPE: self.type,
}
@property
def schedule_is_enabled(self) -> bool:
"""Return whether the schedule is allowed to run."""
return self._schedule_enabled
def turn_on(self, **kwargs) -> None:
"""Start this schedule."""
self._controller.rachio.schedulerule.start(self._schedule_id)
_LOGGER.debug(
"Schedule %s started on %s",
self.name,
self._controller.name,
)
def turn_off(self, **kwargs) -> None:
"""Stop watering all zones."""
self._controller.stop_watering()
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook schedule data."""
# Schedule ID not passed when running individual zones, so we catch that error
try:
if args[0][KEY_SCHEDULE_ID] == self._schedule_id:
if args[0][KEY_SUBTYPE] in [SUBTYPE_SCHEDULE_STARTED]:
self._state = True
elif args[0][KEY_SUBTYPE] in [
SUBTYPE_SCHEDULE_STOPPED,
SUBTYPE_SCHEDULE_COMPLETED,
]:
self._state = False
except KeyError:
pass
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self._schedule_id == self._current_schedule.get(KEY_SCHEDULE_ID)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_RACHIO_SCHEDULE_UPDATE, self._async_handle_update
)
)
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker.providers.aws import aws_vpc_endpoint
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
SERVICE_NAME = 's3'
REGION = 'us-west-1'
FULL_SERVICE_NAME = 'com.amazonaws.{}.s3'.format(REGION)
VPC_ID = 'vpc-1234'
ENDPOINT_ID = 'vpce-1234'
ROUTE_TABLE_ID = 'rtb-1234'
CREATE_RES = {'VpcEndpoint': {'VpcEndpointId': ENDPOINT_ID}}
DELETE_RES = {'Unsuccessful': []}
QUERY_ENDPOINTS_CMD = [
'describe-vpc-endpoints', '--filters',
'Name=service-name,Values={}'.format(FULL_SERVICE_NAME),
'Name=vpc-id,Values={}'.format(VPC_ID), '--query',
'VpcEndpoints[].VpcEndpointId'
]
DESCRIBE_ROUTES_CMD = [
'describe-route-tables', '--filters',
'Name=vpc-id,Values={}'.format(VPC_ID), '--query',
'RouteTables[].RouteTableId'
]
CREATE_ENDPOINT_CMD = [
'create-vpc-endpoint', '--vpc-endpoint-type', 'Gateway', '--vpc-id', VPC_ID,
'--service-name', FULL_SERVICE_NAME, '--route-table-ids', ROUTE_TABLE_ID
]
DELETE_ENDPOINT_CMD = [
'delete-vpc-endpoints', '--vpc-endpoint-ids', ENDPOINT_ID
]
class AwsVpcS3EndpointTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AwsVpcS3EndpointTest, self).setUp()
self.mock_vpc = mock.Mock()
self.mock_vpc.region = REGION
self.mock_run_cmd = self.enter_context(
mock.patch.object(aws_vpc_endpoint.AwsVpcS3Endpoint, '_RunCommand'))
def _InitEndpoint(self, vpc_id):
self.mock_vpc.id = vpc_id
return aws_vpc_endpoint.CreateEndpointService(SERVICE_NAME, self.mock_vpc)
def testEndPointIdNoVpc(self):
# initialize with no VPC means no immediate lookups done
endpoint = self._InitEndpoint(None)
self.assertIsNone(endpoint.id)
endpoint._RunCommand.assert_not_called()
def testEndPointIdHasVpc(self):
# initialize with a VPC does an immediate call to find existing endpoints
endpoint = self._InitEndpoint(VPC_ID)
self.assertIsNone(endpoint.id, 'Endpoint id always None on initialization')
self.mock_run_cmd.reset_mock()
self.mock_run_cmd.side_effect = [[ENDPOINT_ID]]
self.assertEqual(ENDPOINT_ID, endpoint.endpoint_id)
endpoint._RunCommand.assert_called_with(QUERY_ENDPOINTS_CMD)
def testCreate(self):
# shows that a call to .Create() will get the routing table info followed
# by the create-vpc-endpoint call
endpoint = self._InitEndpoint(VPC_ID)
self.mock_run_cmd.reset_mock()
self.mock_run_cmd.side_effect = [
[], # query for endpoint id
[ROUTE_TABLE_ID], # query for route tables
CREATE_RES, # _Create()
[ENDPOINT_ID], # _Exists()
]
endpoint.Create()
calls = endpoint._RunCommand.call_args_list
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[0])
self.assertEqual(mock.call(DESCRIBE_ROUTES_CMD), calls[1])
self.assertEqual(mock.call(CREATE_ENDPOINT_CMD), calls[2])
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[3])
self.assertEqual(ENDPOINT_ID, endpoint.id)
def testDelete(self):
endpoint = self._InitEndpoint(VPC_ID)
self.mock_run_cmd.reset_mock()
endpoint.id = ENDPOINT_ID
self.mock_run_cmd.side_effect = [DELETE_RES, []]
endpoint.Delete()
calls = endpoint._RunCommand.call_args_list
self.assertEqual(mock.call(DELETE_ENDPOINT_CMD), calls[0])
self.assertEqual(mock.call(QUERY_ENDPOINTS_CMD), calls[1])
if __name__ == '__main__':
unittest.main()
|
from diamond.collector import Collector
from diamond import convertor
import os
class UptimeCollector(Collector):
PROC = '/proc/uptime'
def get_default_config(self):
config = super(UptimeCollector, self).get_default_config()
config.update({
'path': 'uptime',
'metric_name': 'minutes'
})
return config
def collect(self):
if not os.path.exists(self.PROC):
self.log.error('Input path %s does not exist' % self.PROC)
return {}
v = self.read()
if v is not None:
self.publish(self.config['metric_name'], v)
def read(self):
try:
fd = open(self.PROC)
uptime = fd.readline()
fd.close()
v = float(uptime.split()[0].strip())
return convertor.time.convert(v, 's', self.config['metric_name'])
except Exception as e:
self.log.error('Unable to read uptime from %s: %s' % (self.PROC,
e))
return None
|
from datetime import timedelta
from functools import partial
import logging
from i2csense.htu21d import HTU21D # pylint: disable=import-error
import smbus # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, PERCENTAGE, TEMP_FAHRENHEIT
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util.temperature import celsius_to_fahrenheit
_LOGGER = logging.getLogger(__name__)
CONF_I2C_BUS = "i2c_bus"
DEFAULT_I2C_BUS = 1
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
DEFAULT_NAME = "HTU21D Sensor"
SENSOR_TEMPERATURE = "temperature"
SENSOR_HUMIDITY = "humidity"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): vol.Coerce(int),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the HTU21D sensor."""
name = config.get(CONF_NAME)
bus_number = config.get(CONF_I2C_BUS)
temp_unit = hass.config.units.temperature_unit
bus = smbus.SMBus(config.get(CONF_I2C_BUS))
sensor = await hass.async_add_executor_job(partial(HTU21D, bus, logger=_LOGGER))
if not sensor.sample_ok:
_LOGGER.error("HTU21D sensor not detected in bus %s", bus_number)
return False
sensor_handler = await hass.async_add_executor_job(HTU21DHandler, sensor)
dev = [
HTU21DSensor(sensor_handler, name, SENSOR_TEMPERATURE, temp_unit),
HTU21DSensor(sensor_handler, name, SENSOR_HUMIDITY, PERCENTAGE),
]
async_add_entities(dev)
class HTU21DHandler:
"""Implement HTU21D communication."""
def __init__(self, sensor):
"""Initialize the sensor handler."""
self.sensor = sensor
self.sensor.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Read raw data and calculate temperature and humidity."""
self.sensor.update()
class HTU21DSensor(Entity):
"""Implementation of the HTU21D sensor."""
def __init__(self, htu21d_client, name, variable, unit):
"""Initialize the sensor."""
self._name = f"{name}_{variable}"
self._variable = variable
self._unit_of_measurement = unit
self._client = htu21d_client
self._state = None
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
def state(self) -> int:
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data from the HTU21D sensor and update the state."""
await self.hass.async_add_executor_job(self._client.update)
if self._client.sensor.sample_ok:
if self._variable == SENSOR_TEMPERATURE:
value = round(self._client.sensor.temperature, 1)
if self.unit_of_measurement == TEMP_FAHRENHEIT:
value = celsius_to_fahrenheit(value)
else:
value = round(self._client.sensor.humidity, 1)
self._state = value
else:
_LOGGER.warning("Bad sample")
|
import sys, os
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
os.environ['is_test_suite'] = 'True'
import pandas as pd
from sklearn.datasets import load_boston
from sklearn.metrics import brier_score_loss, mean_squared_error
from sklearn.model_selection import train_test_split
from auto_ml import Predictor
def get_boston_regression_dataset():
boston = load_boston()
df_boston = pd.DataFrame(boston.data)
df_boston.columns = boston.feature_names
df_boston['MEDV'] = boston['target']
df_boston_train, df_boston_test = train_test_split(df_boston, test_size=0.33, random_state=42)
return df_boston_train, df_boston_test
def get_titanic_binary_classification_dataset(basic=True):
dir_name = os.path.abspath(os.path.dirname(__file__))
file_name = os.path.join(dir_name, 'titanic.csv')
print('file_name')
print(file_name)
print('dir_name')
print(dir_name)
try:
df_titanic = pd.read_csv(file_name)
except Exception as e:
print('Error')
print(e)
dataset_url = 'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic3.csv'
df_titanic = pd.read_csv(dataset_url)
# Do not write the index that pandas automatically creates
df_titanic.to_csv(file_name, index=False)
df_titanic = df_titanic.drop(['boat', 'body'], axis=1)
if basic == True:
df_titanic = df_titanic.drop(['name', 'ticket', 'cabin', 'home.dest'], axis=1)
df_titanic_train, df_titanic_test = train_test_split(df_titanic, test_size=0.33, random_state=42)
return df_titanic_train, df_titanic_test
def train_basic_binary_classifier(df_titanic_train):
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train)
return ml_predictor
def train_basic_regressor(df_boston_train):
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
ml_predictor.train(df_boston_train, verbose=False)
return ml_predictor
def calculate_rmse(actuals, preds):
return mean_squared_error(actuals, preds)**0.5 * -1
def calculate_brier_score_loss(actuals, probas):
return -1 * brier_score_loss(actuals, probas)
def get_twitter_sentiment_multilabel_classification_dataset():
file_name = os.path.join('tests', 'twitter_sentiment.h5')
try:
df_twitter = pd.read_hdf(file_name)
except Exception as e:
print('Error')
print(e)
dataset_url = 'https://raw.githubusercontent.com/ClimbsRocks/sample_datasets/master/twitter_airline_sentiment.csv'
df_twitter = pd.read_csv(dataset_url, encoding='latin-1')
# Do not write the index that pandas automatically creates
df_twitter.to_hdf(file_name, key='df', format='fixed')
# Grab only 10% of the dataset- runs much faster this way
df_twitter = df_twitter.sample(frac=0.1)
df_twitter['tweet_created'] = pd.to_datetime(df_twitter.tweet_created)
df_twitter_train, df_twitter_test = train_test_split(df_twitter, test_size=0.33, random_state=42)
return df_twitter_train, df_twitter_test
def train_basic_multilabel_classifier(df_twitter_train):
column_descriptions = {
'airline_sentiment': 'output'
, 'airline': 'categorical'
, 'text': 'ignore'
, 'tweet_location': 'categorical'
, 'user_timezone': 'categorical'
, 'tweet_created': 'date'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_twitter_train)
return ml_predictor
import pandas as pd
import datetime
def make_test_df():
today = datetime.datetime.today()
raw_input = {
'a': [1,2,3,4,5]
, 'b': [6,7,8,9,10]
, 'text_col': ['hi', 'there', 'mesmerizingly', 'intriguing', 'world']
, 'date_col': [today, today - datetime.timedelta(days=1), today - datetime.timedelta(days=2), today - datetime.timedelta(days=3), today - datetime.timedelta(days=4)]
}
df = pd.DataFrame(raw_input)
return df
|
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import gettext_lazy as _
from django_fsm import transition, RETURN_VALUE
from shop.models.order import BaseOrder
class ManualPaymentWorkflowMixin:
"""
Add this class to `settings.SHOP_ORDER_WORKFLOWS` to mix it into your `OrderModel`.
It adds all the methods required for state transitions, when used with the
`ForwardFundPayment` provider from above.
"""
TRANSITION_TARGETS = {
'awaiting_payment': _("Awaiting a forward fund payment"),
'prepayment_deposited': _("Prepayment deposited"),
'no_payment_required': _("No Payment Required"),
}
_manual_payment_transitions = TRANSITION_TARGETS.keys()
def __init__(self, *args, **kwargs):
if not isinstance(self, BaseOrder):
raise ImproperlyConfigured("class 'ManualPaymentWorkflowMixin' is not of type 'BaseOrder'")
CancelOrderWorkflowMixin.CANCELABLE_SOURCES.update(self._manual_payment_transitions)
super().__init__(*args, **kwargs)
@transition(field='status', source=['created'], target='no_payment_required')
def no_payment_required(self):
"""
Signals that an Order can proceed directly, by confirming a payment of value zero.
"""
@transition(field='status', source=['created'], target='awaiting_payment')
def awaiting_payment(self):
"""
Signals that the current Order awaits a payment.
Invoked by ForwardFundPayment.get_payment_request.
"""
def payment_deposited(self):
if hasattr(self, 'amount_paid'):
del self.amount_paid
return self.amount_paid > 0
@transition(field='status', source=['awaiting_payment'],
target=RETURN_VALUE('awaiting_payment', 'prepayment_deposited'),
conditions=[payment_deposited],
custom=dict(admin=True, button_name=_("Payment Received")))
def prepayment_deposited(self):
"""
Signals that the current Order received a payment.
"""
return 'prepayment_deposited' if self.is_fully_paid() else 'awaiting_payment'
@transition(field='status', source=['prepayment_deposited', 'no_payment_required'],
custom=dict(auto=True))
def acknowledge_prepayment(self):
"""
Acknowledge the payment. This method is invoked automatically.
"""
self.acknowledge_payment()
@transition(field='status', source='refund_payment', target=RETURN_VALUE('refund_payment', 'order_canceled'),
custom=dict(admin=True, button_name=_("Mark as Refunded")))
def payment_refunded(self):
"""
Signals that the payment for this Order has been refunded manually.
"""
return 'refund_payment' if self.amount_paid else 'order_canceled'
class CancelOrderWorkflowMixin:
"""
Add this class to `settings.SHOP_ORDER_WORKFLOWS` to mix it into your `OrderModel`.
It adds all the methods required for state transitions, to cancel an order.
"""
CANCELABLE_SOURCES = {'new', 'created', 'payment_confirmed', 'payment_declined', 'ready_for_delivery'}
TRANSITION_TARGETS = {
'refund_payment': _("Refund payment"),
'order_canceled': _("Order Canceled"),
}
def cancelable(self):
return super().cancelable() or self.status in self.CANCELABLE_SOURCES
@transition(field='status', target=RETURN_VALUE(*TRANSITION_TARGETS.keys()),
conditions=[cancelable], custom=dict(admin=True, button_name=_("Cancel Order")))
def cancel_order(self):
"""
Signals that an Order shall be canceled.
"""
self.withdraw_from_delivery()
if self.amount_paid:
self.refund_payment()
return 'refund_payment' if self.amount_paid else 'order_canceled'
|
from datetime import datetime, timedelta
import pytest
from homeassistant import util
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, patch
def test_sanitize_filename():
"""Test sanitize_filename."""
assert util.sanitize_filename("test") == "test"
assert util.sanitize_filename("/test") == "test"
assert util.sanitize_filename("..test") == "test"
assert util.sanitize_filename("\\test") == "test"
assert util.sanitize_filename("\\../test") == "test"
def test_sanitize_path():
"""Test sanitize_path."""
assert util.sanitize_path("test/path") == "test/path"
assert util.sanitize_path("~test/path") == "test/path"
assert util.sanitize_path("~/../test/path") == "//test/path"
def test_slugify():
"""Test slugify."""
assert util.slugify("T-!@#$!#@$!$est") == "t_est"
assert util.slugify("Test More") == "test_more"
assert util.slugify("Test_(More)") == "test_more"
assert util.slugify("Tèst_Mörê") == "test_more"
assert util.slugify("B8:27:EB:00:00:00") == "b8_27_eb_00_00_00"
assert util.slugify("test.com") == "test_com"
assert util.slugify("greg_phone - exp_wayp1") == "greg_phone_exp_wayp1"
assert (
util.slugify("We are, we are, a... Test Calendar")
== "we_are_we_are_a_test_calendar"
)
assert util.slugify("Tèst_äöüß_ÄÖÜ") == "test_aouss_aou"
assert util.slugify("影師嗎") == "ying_shi_ma"
assert util.slugify("けいふぉんと") == "keihuonto"
def test_repr_helper():
"""Test repr_helper."""
assert util.repr_helper("A") == "A"
assert util.repr_helper(5) == "5"
assert util.repr_helper(True) == "True"
assert util.repr_helper({"test": 1}) == "test=1"
assert (
util.repr_helper(datetime(1986, 7, 9, 12, 0, 0)) == "1986-07-09T12:00:00+00:00"
)
def test_convert():
"""Test convert."""
assert util.convert("5", int) == 5
assert util.convert("5", float) == 5.0
assert util.convert("True", bool) is True
assert util.convert("NOT A NUMBER", int, 1) == 1
assert util.convert(None, int, 1) == 1
assert util.convert(object, int, 1) == 1
def test_ensure_unique_string():
"""Test ensure_unique_string."""
assert util.ensure_unique_string("Beer", ["Beer", "Beer_2"]) == "Beer_3"
assert util.ensure_unique_string("Beer", ["Wine", "Soda"]) == "Beer"
def test_ordered_enum():
"""Test the ordered enum class."""
class TestEnum(util.OrderedEnum):
"""Test enum that can be ordered."""
FIRST = 1
SECOND = 2
THIRD = 3
assert TestEnum.SECOND >= TestEnum.FIRST
assert TestEnum.SECOND >= TestEnum.SECOND
assert TestEnum.SECOND < TestEnum.THIRD
assert TestEnum.SECOND > TestEnum.FIRST
assert TestEnum.SECOND <= TestEnum.SECOND
assert TestEnum.SECOND <= TestEnum.THIRD
assert TestEnum.SECOND > TestEnum.FIRST
assert TestEnum.SECOND <= TestEnum.SECOND
assert TestEnum.SECOND <= TestEnum.THIRD
assert TestEnum.SECOND >= TestEnum.FIRST
assert TestEnum.SECOND >= TestEnum.SECOND
assert TestEnum.SECOND < TestEnum.THIRD
# Python will raise a TypeError if the <, <=, >, >= methods
# raise a NotImplemented error.
with pytest.raises(TypeError):
TestEnum.FIRST < 1
with pytest.raises(TypeError):
TestEnum.FIRST <= 1
with pytest.raises(TypeError):
TestEnum.FIRST > 1
with pytest.raises(TypeError):
TestEnum.FIRST >= 1
def test_throttle():
"""Test the add cooldown decorator."""
calls1 = []
calls2 = []
@util.Throttle(timedelta(seconds=4))
def test_throttle1():
calls1.append(1)
@util.Throttle(timedelta(seconds=4), timedelta(seconds=2))
def test_throttle2():
calls2.append(1)
now = dt_util.utcnow()
plus3 = now + timedelta(seconds=3)
plus5 = plus3 + timedelta(seconds=2)
# Call first time and ensure methods got called
test_throttle1()
test_throttle2()
assert len(calls1) == 1
assert len(calls2) == 1
# Call second time. Methods should not get called
test_throttle1()
test_throttle2()
assert len(calls1) == 1
assert len(calls2) == 1
# Call again, overriding throttle, only first one should fire
test_throttle1(no_throttle=True)
test_throttle2(no_throttle=True)
assert len(calls1) == 2
assert len(calls2) == 1
with patch("homeassistant.util.utcnow", return_value=plus3):
test_throttle1()
test_throttle2()
assert len(calls1) == 2
assert len(calls2) == 1
with patch("homeassistant.util.utcnow", return_value=plus5):
test_throttle1()
test_throttle2()
assert len(calls1) == 3
assert len(calls2) == 2
def test_throttle_per_instance():
"""Test that the throttle method is done per instance of a class."""
class Tester:
"""A tester class for the throttle."""
@util.Throttle(timedelta(seconds=1))
def hello(self):
"""Test the throttle."""
return True
assert Tester().hello()
assert Tester().hello()
def test_throttle_on_method():
"""Test that throttle works when wrapping a method."""
class Tester:
"""A tester class for the throttle."""
def hello(self):
"""Test the throttle."""
return True
tester = Tester()
throttled = util.Throttle(timedelta(seconds=1))(tester.hello)
assert throttled()
assert throttled() is None
def test_throttle_on_two_method():
"""Test that throttle works when wrapping two methods."""
class Tester:
"""A test class for the throttle."""
@util.Throttle(timedelta(seconds=1))
def hello(self):
"""Test the throttle."""
return True
@util.Throttle(timedelta(seconds=1))
def goodbye(self):
"""Test the throttle."""
return True
tester = Tester()
assert tester.hello()
assert tester.goodbye()
@patch.object(util, "random")
def test_get_random_string(mock_random):
"""Test get random string."""
results = ["A", "B", "C"]
def mock_choice(choices):
return results.pop(0)
generator = MagicMock()
generator.choice.side_effect = mock_choice
mock_random.SystemRandom.return_value = generator
assert util.get_random_string(length=3) == "ABC"
async def test_throttle_async():
"""Test Throttle decorator with async method."""
@util.Throttle(timedelta(seconds=2))
async def test_method():
"""Only first call should return a value."""
return True
assert (await test_method()) is True
assert (await test_method()) is None
@util.Throttle(timedelta(seconds=2), timedelta(seconds=0.1))
async def test_method2():
"""Only first call should return a value."""
return True
assert (await test_method2()) is True
assert (await test_method2()) is None
|
from august.activity import ActivityType
from august.util import update_doorbell_image_from_activity
from homeassistant.components.camera import Camera
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from .const import DATA_AUGUST, DEFAULT_NAME, DEFAULT_TIMEOUT, DOMAIN
from .entity import AugustEntityMixin
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up August cameras."""
data = hass.data[DOMAIN][config_entry.entry_id][DATA_AUGUST]
devices = []
for doorbell in data.doorbells:
devices.append(AugustCamera(data, doorbell, DEFAULT_TIMEOUT))
async_add_entities(devices, True)
class AugustCamera(AugustEntityMixin, Camera):
"""An implementation of a August security camera."""
def __init__(self, data, device, timeout):
"""Initialize a August security camera."""
super().__init__(data, device)
self._data = data
self._device = device
self._timeout = timeout
self._image_url = None
self._image_content = None
@property
def name(self):
"""Return the name of this device."""
return f"{self._device.device_name} Camera"
@property
def is_recording(self):
"""Return true if the device is recording."""
return self._device.has_subscription
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return True
@property
def brand(self):
"""Return the camera brand."""
return DEFAULT_NAME
@property
def model(self):
"""Return the camera model."""
return self._detail.model
@callback
def _update_from_data(self):
"""Get the latest state of the sensor."""
doorbell_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, [ActivityType.DOORBELL_MOTION]
)
if doorbell_activity is not None:
update_doorbell_image_from_activity(self._detail, doorbell_activity)
async def async_camera_image(self):
"""Return bytes of camera image."""
self._update_from_data()
if self._image_url is not self._detail.image_url:
self._image_url = self._detail.image_url
self._image_content = await self._detail.async_get_doorbell_image(
aiohttp_client.async_get_clientsession(self.hass), timeout=self._timeout
)
return self._image_content
@property
def unique_id(self) -> str:
"""Get the unique id of the camera."""
return f"{self._device_id:s}_camera"
|
import asyncio
from datetime import timedelta
import logging
from griddypower.async_api import LOAD_ZONES, AsyncGriddy
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_LOADZONE, DOMAIN, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_LOADZONE): vol.In(LOAD_ZONES)})},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Griddy Power component."""
hass.data.setdefault(DOMAIN, {})
conf = config.get(DOMAIN)
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_LOADZONE: conf.get(CONF_LOADZONE)},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Griddy Power from a config entry."""
entry_data = entry.data
async_griddy = AsyncGriddy(
aiohttp_client.async_get_clientsession(hass),
settlement_point=entry_data[CONF_LOADZONE],
)
async def async_update_data():
"""Fetch data from API endpoint."""
return await async_griddy.async_getnow()
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="Griddy getnow",
update_method=async_update_data,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import numpy as np
from bokeh.plotting import figure
from flexx import flx
# Plot 1
N = 1000
x = np.random.normal(0, np.pi, N)
y = np.sin(x) + np.random.normal(0, 0.2, N)
TOOLS = "pan,wheel_zoom,box_zoom,reset,box_select"
p1 = figure(tools=TOOLS)
p1.scatter(x, y, alpha=0.1, nonselection_alpha=0.1)
# Plot2
t = np.linspace(0, 6.5, 100)
p2 = figure(tools=TOOLS, sizing_mode='scale_width')
p2.line(t, np.sin(t))
p3 = figure(tools=TOOLS, sizing_mode='scale_width')
p3.line(t, np.cos(t))
class BokehExample(flx.PyComponent):
def init(self):
with flx.HSplit(minsize=300) as self.widget:
self.plot1 = flx.BokehWidget.from_plot(p1, title='Scatter')
with flx.VFix(title='Sine'):
Controls()
with flx.Widget(style='overflow-y:auto;', flex=1):
self.plot2 = flx.BokehWidget.from_plot(p2)
self.plot3 = flx.BokehWidget.from_plot(p3)
class Controls(flx.FormLayout):
def init(self):
self.amp = flx.Slider(title='Amplitude', max=2, value=1)
self.freq = flx.Slider(title='Frequency', max=10, value=5)
self.phase = flx.Slider(title='Phase', max=3, value=1)
@flx.reaction
def _update_sine(self):
global window
amp, freq, phase = self.amp.value, self.freq.value, self.phase.value
# Get reference to data source
ds = None
plot2 = self.parent.children[1].children[0]
plot = plot2.plot
if plot:
for ren in plot.model.renderers.values():
if ren.data_source:
ds = ren.data_source
break
# Update
if ds:
ds.data.y = [amp*window.Math.sin(x*freq+phase) for x in ds.data.x]
ds.change.emit() # or trigger('change') in older versions
if __name__ == '__main__':
m = flx.launch(BokehExample, 'app')
flx.run()
|
import logging
from requests.exceptions import RequestException
import thingspeak
import voluptuous as vol
from homeassistant.const import (
CONF_API_KEY,
CONF_ID,
CONF_WHITELIST,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.helpers import event, state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "thingspeak"
TIMEOUT = 5
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ID): int,
vol.Required(CONF_WHITELIST): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Thingspeak environment."""
conf = config[DOMAIN]
api_key = conf.get(CONF_API_KEY)
channel_id = conf.get(CONF_ID)
entity = conf.get(CONF_WHITELIST)
try:
channel = thingspeak.Channel(channel_id, api_key=api_key, timeout=TIMEOUT)
channel.get()
except RequestException:
_LOGGER.error(
"Error while accessing the ThingSpeak channel. "
"Please check that the channel exists and your API key is correct"
)
return False
def thingspeak_listener(entity_id, old_state, new_state):
"""Listen for new events and send them to Thingspeak."""
if new_state is None or new_state.state in (
STATE_UNKNOWN,
"",
STATE_UNAVAILABLE,
):
return
try:
if new_state.entity_id != entity:
return
_state = state_helper.state_as_number(new_state)
except ValueError:
return
try:
channel.update({"field1": _state})
except RequestException:
_LOGGER.error("Error while sending value '%s' to Thingspeak", _state)
event.track_state_change(hass, entity, thingspeak_listener)
return True
|
import argparse
import chainer
import tensorflow as tf
from chainercv.links import MobileNetV2
def load_expanded_conv(econv, expand_params, depthwise_params, project_params):
if hasattr(econv, 'expand'):
assert expand_params is not None
c_to_p = [(econv.expand.conv, econv.expand.bn, expand_params)]
else:
assert expand_params is None
c_to_p = []
c_to_p.extend([(econv.depthwise.conv, econv.depthwise.bn,
depthwise_params), (econv.project.conv, econv.project.bn,
project_params)])
for conv, bn, params in c_to_p:
init_conv_with_tf_weights(conv, params["weights"])
init_bn_with_tf_params(bn, params["beta"], params["gamma"],
params["moving_mean"],
params["moving_variance"])
def init_conv_with_tf_weights(conv, weights, bias=None):
# Shifting input and output channel dimensions.
weights = weights.transpose((3, 2, 0, 1))
if conv.W.shape != weights.shape: # for depthwise conv
weights = weights.transpose((1, 0, 2, 3))
conv.W.data[:] = weights.data[:]
if bias is not None:
conv.b.data[:] = bias.data[:]
def init_bn_with_tf_params(bn, beta, gamma, moving_mean, moving_variance):
beta = beta.flatten().astype(chainer.config.dtype)
bn.beta.initializer = chainer.initializers.Constant(
beta, dtype=chainer.config.dtype)
bn.beta.initialize(shape=beta.shape)
gamma = gamma.flatten().astype(chainer.config.dtype)
bn.gamma.initializer = chainer.initializers.Constant(
gamma, dtype=chainer.config.dtype)
bn.gamma.initialize(shape=gamma.shape)
bn.avg_mean = moving_mean.flatten().astype(chainer.config.dtype)
bn.avg_var = moving_variance.flatten().astype(chainer.config.dtype)
def get_tensor(ckpt_reader, name, ema_ratio=0.999):
if (name + '/ExponentialMovingAverage'
) in ckpt_reader.get_variable_to_shape_map().keys():
base = ckpt_reader.get_tensor(name)
ema = ckpt_reader.get_tensor(name + '/ExponentialMovingAverage')
return (1.0 - ema_ratio) * base + ema_ratio * ema
else:
return ckpt_reader.get_tensor(name)
def load_mobilenetv2_from_tensorflow_checkpoint(model, checkpoint_filename):
ckpt_reader = tf.train.NewCheckpointReader(checkpoint_filename)
# Loading weights for the expanded convolutions.
tf_scope_to_expanded_conv = {
"MobilenetV2/expanded_conv": model.expanded_conv,
}
for i in range(16):
tf_scope_to_expanded_conv["MobilenetV2/expanded_conv_{}".format(
i + 1)] = getattr(model, "expanded_conv_{}".format(i + 1))
for tf_scope, expanded_conv in tf_scope_to_expanded_conv.items():
print("Loading weights for %s" % tf_scope)
# Expand convolution parameters
if hasattr(expanded_conv, 'expand'):
expand_params = {
"weights":
get_tensor(ckpt_reader, tf_scope + '/expand/weights'),
"beta":
get_tensor(ckpt_reader, tf_scope + '/expand/BatchNorm/beta'),
"gamma":
get_tensor(ckpt_reader, tf_scope + '/expand/BatchNorm/gamma'),
"moving_mean":
get_tensor(ckpt_reader,
tf_scope + '/expand/BatchNorm/moving_mean'),
"moving_variance":
get_tensor(ckpt_reader,
tf_scope + '/expand/BatchNorm/moving_variance')
}
else:
print("Skipping expanded convolution for {}".format(tf_scope))
expand_params = None
# Depthwise convolution parameters
depthwise_params = {
"weights":
get_tensor(ckpt_reader, tf_scope + '/depthwise/depthwise_weights'),
"beta":
get_tensor(ckpt_reader, tf_scope + '/depthwise/BatchNorm/beta'),
"gamma":
get_tensor(ckpt_reader, tf_scope + '/depthwise/BatchNorm/gamma'),
"moving_mean":
get_tensor(ckpt_reader,
tf_scope + '/depthwise/BatchNorm/moving_mean'),
"moving_variance":
get_tensor(ckpt_reader,
tf_scope + '/depthwise/BatchNorm/moving_variance')
}
# Project convolution parameters
project_params = {
"weights":
get_tensor(ckpt_reader, tf_scope + '/project/weights'),
"beta":
get_tensor(ckpt_reader, tf_scope + '/project/BatchNorm/beta'),
"gamma":
get_tensor(ckpt_reader, tf_scope + '/project/BatchNorm/gamma'),
"moving_mean":
get_tensor(ckpt_reader,
tf_scope + '/project/BatchNorm/moving_mean'),
"moving_variance":
get_tensor(ckpt_reader,
tf_scope + '/project/BatchNorm/moving_variance'),
}
load_expanded_conv(
expanded_conv,
expand_params=expand_params,
depthwise_params=depthwise_params,
project_params=project_params,
)
# Similarly loading the vanilla convolutions.
# Initial convolution
init_conv_with_tf_weights(
model.conv.conv,
weights=get_tensor(ckpt_reader, 'MobilenetV2/Conv/weights'))
init_bn_with_tf_params(
model.conv.bn,
beta=get_tensor(ckpt_reader, 'MobilenetV2/Conv/BatchNorm/beta'),
gamma=get_tensor(ckpt_reader, 'MobilenetV2/Conv/BatchNorm/gamma'),
moving_mean=get_tensor(ckpt_reader,
'MobilenetV2/Conv/BatchNorm/moving_mean'),
moving_variance=get_tensor(
ckpt_reader, 'MobilenetV2/Conv/BatchNorm/moving_variance'))
# Final convolution before dropout (conv1)
init_conv_with_tf_weights(
model.conv1.conv,
weights=get_tensor(ckpt_reader, 'MobilenetV2/Conv_1/weights'))
init_bn_with_tf_params(
model.conv1.bn,
beta=get_tensor(ckpt_reader, 'MobilenetV2/Conv_1/BatchNorm/beta'),
gamma=get_tensor(ckpt_reader, 'MobilenetV2/Conv_1/BatchNorm/gamma'),
moving_mean=get_tensor(ckpt_reader,
'MobilenetV2/Conv_1/BatchNorm/moving_mean'),
moving_variance=get_tensor(
ckpt_reader, 'MobilenetV2/Conv_1/BatchNorm/moving_variance'))
# Logits convolution
init_conv_with_tf_weights(
model.logits_conv,
weights=get_tensor(ckpt_reader,
'MobilenetV2/Logits/Conv2d_1c_1x1/weights'),
bias=get_tensor(ckpt_reader,
'MobilenetV2/Logits/Conv2d_1c_1x1/biases'))
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'model_name', choices=('mobilenetv2', ), default='mobilenetv2')
parser.add_argument('pretrained_model')
parser.add_argument('--n-class', type=int, default=1001)
parser.add_argument('--depth-multiplier', type=float, default=1.0)
parser.add_argument('output', nargs='?', default=None)
args = parser.parse_args()
model = MobileNetV2(args.n_class, depth_multiplier=args.depth_multiplier)
load_mobilenetv2_from_tensorflow_checkpoint(model, args.pretrained_model)
if args.output is None:
output = '{}_{}_imagenet_convert.npz'.format(args.model_name,
args.depth_multiplier)
else:
output = args.output
model.conv.conv.W.array /= 255.0 # scaling [0, 255] -> [0, 1.0]
chainer.serializers.save_npz(output, model)
print("output: ", output)
if __name__ == '__main__':
main()
|
import logging
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import CONF_FOR, CONF_PLATFORM, CONF_VALUE_TEMPLATE
from homeassistant.core import HassJob, callback
from homeassistant.helpers import config_validation as cv, template
from homeassistant.helpers.event import (
TrackTemplate,
async_call_later,
async_track_template_result,
)
from homeassistant.helpers.template import result_as_boolean
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
TRIGGER_SCHEMA = IF_ACTION_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "template",
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_FOR): cv.positive_time_period_template,
}
)
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type="template"
):
"""Listen for state changes based on configuration."""
value_template = config.get(CONF_VALUE_TEMPLATE)
value_template.hass = hass
time_delta = config.get(CONF_FOR)
template.attach(hass, time_delta)
delay_cancel = None
job = HassJob(action)
@callback
def template_listener(event, updates):
"""Listen for state changes and calls action."""
nonlocal delay_cancel
result = updates.pop().result
if delay_cancel:
# pylint: disable=not-callable
delay_cancel()
delay_cancel = None
if not result_as_boolean(result):
return
entity_id = event.data.get("entity_id")
from_s = event.data.get("old_state")
to_s = event.data.get("new_state")
@callback
def call_action(*_):
"""Call action with right context."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "template",
"entity_id": entity_id,
"from_state": from_s,
"to_state": to_s,
"for": time_delta if not time_delta else period,
"description": f"{entity_id} via template",
}
},
(to_s.context if to_s else None),
)
if not time_delta:
call_action()
return
variables = {
"trigger": {
"platform": platform_type,
"entity_id": entity_id,
"from_state": from_s,
"to_state": to_s,
}
}
try:
period = cv.positive_time_period(
template.render_complex(time_delta, variables)
)
except (exceptions.TemplateError, vol.Invalid) as ex:
_LOGGER.error(
"Error rendering '%s' for template: %s", automation_info["name"], ex
)
return
delay_cancel = async_call_later(hass, period.seconds, call_action)
info = async_track_template_result(
hass,
[TrackTemplate(value_template, automation_info["variables"])],
template_listener,
)
unsub = info.async_remove
@callback
def async_remove():
"""Remove state listeners async."""
unsub()
if delay_cancel:
# pylint: disable=not-callable
delay_cancel()
return async_remove
|
from django.contrib.sites.models import Site
from django.test import TestCase
from zinnia.managers import PUBLISHED
from zinnia.models.category import Category
from zinnia.models.entry import Entry
from zinnia.signals import disconnect_entry_signals
class CategoryTestCase(TestCase):
def setUp(self):
disconnect_entry_signals()
self.site = Site.objects.get_current()
self.categories = [Category.objects.create(title='Category 1',
slug='category-1'),
Category.objects.create(title='Category 2',
slug='category-2')]
params = {'title': 'My entry',
'content': 'My content',
'tags': 'zinnia, test',
'slug': 'my-entry'}
self.entry = Entry.objects.create(**params)
self.entry.categories.add(*self.categories)
self.entry.sites.add(self.site)
def test_entries_published(self):
category = self.categories[0]
self.assertEqual(category.entries_published().count(), 0)
self.entry.status = PUBLISHED
self.entry.save()
self.assertEqual(category.entries_published().count(), 1)
params = {'title': 'My second entry',
'content': 'My second content',
'tags': 'zinnia, test',
'status': PUBLISHED,
'slug': 'my-second-entry'}
new_entry = Entry.objects.create(**params)
new_entry.sites.add(self.site)
new_entry.categories.add(self.categories[0])
self.assertEqual(self.categories[0].entries_published().count(), 2)
self.assertEqual(self.categories[1].entries_published().count(), 1)
def test_entries_tree_path(self):
self.categories.extend([Category.objects.create(title='Category 3',
slug='category-3'),
Category.objects.create(title='Category 4',
slug='category-4')])
with self.assertNumQueries(0):
self.assertEqual(self.categories[0].tree_path, 'category-1')
self.assertEqual(self.categories[1].tree_path, 'category-2')
self.categories[1].parent = self.categories[0]
self.categories[1].save()
self.categories[1].parent = self.categories[0]
self.categories[1].save()
self.categories[2].parent = self.categories[1]
self.categories[2].save()
self.categories[3].parent = self.categories[2]
self.categories[3].save()
category = Category.objects.get(slug='category-2')
with self.assertNumQueries(1):
self.assertEqual(category.tree_path, 'category-1/category-2')
category = Category.objects.get(slug='category-4')
with self.assertNumQueries(1):
self.assertEqual(category.tree_path,
'category-1/category-2/category-3/category-4')
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import math
from numpy.random import randn
def get_radar(dt):
""" Simulate radar range to object at 1K altidue and moving at 100m/s.
Adds about 5% measurement noise. Returns slant range to the object.
Call once for each new measurement at dt time from last call.
"""
if not hasattr(get_radar, "posp"):
get_radar.posp = 0
vel = 100 + .5 * randn()
alt = 1000 + 10 * randn()
pos = get_radar.posp + vel*dt
v = 0 + pos* 0.05*randn()
slant_range = math.sqrt(pos**2 + alt**2) + v
get_radar.posp = pos
return slant_range
if __name__ == "__main__":
for i in range(100):
print(get_radar(0.1))
|
from __future__ import absolute_import
version='0.20.12.26'
import os
import logging
logger=logging.getLogger('trashcli.trash')
logger.setLevel(logging.WARNING)
logger.addHandler(logging.StreamHandler())
# Error codes (from os on *nix, hard coded for Windows):
EX_OK = getattr(os, 'EX_OK' , 0)
EX_USAGE = getattr(os, 'EX_USAGE', 64)
EX_IOERR = getattr(os, 'EX_IOERR', 74)
import os
def backup_file_path_from(trashinfo_file_path):
trashinfo_basename = os.path.basename(trashinfo_file_path)
backupfile_basename = trashinfo_basename[:-len('.trashinfo')]
info_dir = os.path.dirname(trashinfo_file_path)
trash_dir = os.path.dirname(info_dir)
files_dir = os.path.join(trash_dir, 'files')
return os.path.join(files_dir, backupfile_basename)
def home_trash_dir_path(environ):
if 'XDG_DATA_HOME' in environ:
return ['%(XDG_DATA_HOME)s/Trash' % environ]
elif 'HOME' in environ:
return ['%(HOME)s/.local/share/Trash' % environ]
return []
def home_trash_dir(environ, volume_of):
paths = home_trash_dir_path(environ)
for path in paths:
yield path, volume_of(path)
def volume_trash_dir1(volume, getuid):
path = os.path.join(volume, '.Trash/%s' % getuid())
yield path, volume
def volume_trash_dir2(volume, getuid):
path = os.path.join(volume, ".Trash-%s" % getuid())
yield path, volume
def do_nothing(*argv, **argvk): pass
class Parser:
def __init__(self):
self.default_action = do_nothing
self.argument_action = do_nothing
self.short_options = ''
self.long_options = []
self.actions = dict()
self._on_invalid_option = do_nothing
def __call__(self, argv):
program_name = argv[0]
from getopt import getopt, GetoptError
try:
options, arguments = getopt(argv[1:],
self.short_options,
self.long_options)
except GetoptError as e:
invalid_option = e.opt
self._on_invalid_option(program_name, invalid_option)
else:
for option, value in options:
if option in ('--help', '-h', '--version'):
self.actions[option](program_name)
return
if option in self.actions:
self.actions[option](value)
return
for argument in arguments:
self.argument_action(argument)
self.default_action()
def on_invalid_option(self, action):
self._on_invalid_option = action
def on_help(self, action):
self.add_option('help', action, 'h')
def on_version(self, action):
self.add_option('version', action)
def add_option(self, long_option, action, short_aliases=''):
self.long_options.append(long_option)
if long_option.endswith('='):
import re
long_option = re.sub('=$', '', long_option)
self.actions['--' + long_option] = action
for short_alias in short_aliases:
self.add_short_option(short_alias, action)
def add_short_option(self, short_option, action):
self.short_options += short_option
self.actions['-' + short_option] = action
def on_argument(self, argument_action):
self.argument_action = argument_action
def as_default(self, default_action):
self.default_action = default_action
class TrashDirs:
def __init__(self, environ, getuid, list_volumes, top_trashdir_rules):
self.getuid = getuid
self.mount_points = list_volumes
self.top_trashdir_rules = top_trashdir_rules
self.environ = environ
# events
self.on_trash_dir_found = lambda trashdir, volume: None
self.on_trashdir_skipped_because_parent_not_sticky = lambda trashdir: None
self.on_trashdir_skipped_because_parent_is_symlink = lambda trashdir: None
def list_trashdirs(self):
self.emit_home_trashcan()
self._for_each_volume_trashcan()
def emit_home_trashcan(self):
home_trash_dir_paths = home_trash_dir_path(self.environ)
for path in home_trash_dir_paths:
self.on_trash_dir_found(path, '/')
def _for_each_volume_trashcan(self):
for volume in self.mount_points():
self.emit_trashcans_for(volume)
def emit_trashcans_for(self, volume):
self.emit_trashcan_1_for(volume)
self.emit_trashcan_2_for(volume)
def emit_trashcan_1_for(self,volume):
top_trashdir_path = os.path.join(volume, '.Trash/%s' % self.getuid())
class IsValidOutput:
def not_valid_parent_should_not_be_a_symlink(_):
self.on_trashdir_skipped_because_parent_is_symlink(top_trashdir_path)
def not_valid_parent_should_be_sticky(_):
self.on_trashdir_skipped_because_parent_not_sticky(top_trashdir_path)
def is_valid(_):
self.on_trash_dir_found(top_trashdir_path, volume)
self.top_trashdir_rules.valid_to_be_read(top_trashdir_path, IsValidOutput())
def emit_trashcan_2_for(self, volume):
alt_top_trashdir = os.path.join(volume, '.Trash-%s' % self.getuid())
self.on_trash_dir_found(alt_top_trashdir, volume)
class Harvester:
def __init__(self, file_reader):
self.file_reader = file_reader
self.on_orphan_found = do_nothing
self.on_trashinfo_found = do_nothing
self.on_volume = do_nothing
def analize_trash_directory(self, trash_dir_path, volume_path):
self.on_volume(volume_path)
trashdir = TrashDir(self.file_reader)
trashdir.open(trash_dir_path, volume_path)
trashdir.each_trashinfo(self.on_trashinfo_found)
trashdir.each_orphan(self.on_orphan_found)
class PrintHelp:
def __init__(self, description, println):
class Printer:
def __init__(self, println):
self.println = println
def usage(self, usage):
self.println(usage)
self.println('')
def summary(self, summary):
self.println(summary)
self.println('')
def options(self, *line_describing_option):
self.println('Options:')
for line in line_describing_option:
self.println(line)
self.println('')
def bug_reporting(self):
self.println("Report bugs to https://github.com/andreafrancia/trash-cli/issues")
self.description = description
self.printer = Printer(println)
def __call__(self, program_name):
self.description(program_name, self.printer)
class PrintVersion:
def __init__(self, println, version):
self.println = println
self.version = version
def __call__(self, program_name):
self.println("%s %s" % (program_name, self.version))
class TopTrashDirRules:
def __init__(self, fs):
self.fs = fs
def valid_to_be_read(self, path, output):
parent_trashdir = os.path.dirname(path)
if not self.fs.exists(path):
return
if not self.fs.is_sticky_dir(parent_trashdir):
output.not_valid_parent_should_be_sticky()
return
if self.fs.is_symlink(parent_trashdir):
output.not_valid_parent_should_not_be_a_symlink()
return
else:
output.is_valid()
class Dir:
def __init__(self, path, entries_if_dir_exists):
self.path = path
self.entries_if_dir_exists = entries_if_dir_exists
def entries(self):
return self.entries_if_dir_exists(self.path)
def full_path(self, entry):
return os.path.join(self.path, entry)
class TrashDir:
def __init__(self, file_reader):
self.file_reader = file_reader
def open(self, path, volume_path):
self.trash_dir_path = path
self.volume_path = volume_path
self.files_dir = Dir(self._files_dir(),
self.file_reader.entries_if_dir_exists)
def each_orphan(self, action):
for entry in self.files_dir.entries():
trashinfo_path = self._trashinfo_path_from_file(entry)
file_path = self.files_dir.full_path(entry)
if not self.file_reader.exists(trashinfo_path): action(file_path)
def _entries_if_dir_exists(self, path):
return self.file_reader.entries_if_dir_exists(path)
def each_trashinfo(self, action):
for entry in self._trashinfo_entries():
action(os.path.join(self._info_dir(), entry))
def _info_dir(self):
return os.path.join(self.trash_dir_path, 'info')
def _trashinfo_path_from_file(self, file_entry):
return os.path.join(self._info_dir(), file_entry + '.trashinfo')
def _files_dir(self):
return os.path.join(self.trash_dir_path, 'files')
def _trashinfo_entries(self, on_non_trashinfo=do_nothing):
for entry in self._entries_if_dir_exists(self._info_dir()):
if entry.endswith('.trashinfo'):
yield entry
else:
on_non_trashinfo()
class ParseError(ValueError): pass
def maybe_parse_deletion_date(contents):
result = Basket(unknown_date())
ParseTrashInfo(
on_deletion_date = lambda date: result.collect(date),
on_invalid_date = lambda: result.collect(unknown_date())
)(contents)
return result.collected
def unknown_date():
return '????-??-?? ??:??:??'
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
class ParseTrashInfo:
def __init__(self,
on_deletion_date = do_nothing,
on_invalid_date = do_nothing,
on_path = do_nothing):
self.found_deletion_date = on_deletion_date
self.found_invalid_date = on_invalid_date
self.found_path = on_path
def __call__(self, contents):
from datetime import datetime
for line in contents.split('\n'):
if line.startswith('DeletionDate='):
try:
date = datetime.strptime(line, "DeletionDate=%Y-%m-%dT%H:%M:%S")
except ValueError:
self.found_invalid_date()
else:
self.found_deletion_date(date)
if line.startswith('Path='):
path=unquote(line[len('Path='):])
self.found_path(path)
class Basket:
def __init__(self, initial_value = None):
self.collected = initial_value
def collect(self, value):
self.collected = value
def parse_deletion_date(contents):
result = Basket()
ParseTrashInfo(on_deletion_date=result.collect)(contents)
return result.collected
def parse_path(contents):
for line in contents.split('\n'):
if line.startswith('Path='):
return unquote(line[len('Path='):])
raise ParseError('Unable to parse Path')
|
import sys
import time
import mne
from mne.report import Report
from mne.utils import verbose, logger
@verbose
def log_elapsed(t, verbose=None):
"""Log elapsed time."""
logger.info('Report complete in %s seconds' % round(t, 1))
def run():
"""Run command."""
from mne.commands.utils import get_optparser, _add_verbose_flag
parser = get_optparser(__file__)
parser.add_option("-p", "--path", dest="path",
help="Path to folder who MNE-Report must be created")
parser.add_option("-i", "--info", dest="info_fname",
help="File from which info dictionary is to be read",
metavar="FILE")
parser.add_option("-c", "--cov", dest="cov_fname",
help="File from which noise covariance is to be read",
metavar="FILE")
parser.add_option("--bmin", dest="bmin",
help="Time at which baseline correction starts for "
"evokeds", default=None)
parser.add_option("--bmax", dest="bmax",
help="Time at which baseline correction stops for "
"evokeds", default=None)
parser.add_option("-d", "--subjects-dir", dest="subjects_dir",
help="The subjects directory")
parser.add_option("-s", "--subject", dest="subject",
help="The subject name")
parser.add_option("--no-browser", dest="no_browser", action='store_false',
help="Do not open MNE-Report in browser")
parser.add_option("--overwrite", dest="overwrite", action='store_false',
help="Overwrite html report if it already exists")
parser.add_option("-j", "--jobs", dest="n_jobs", help="Number of jobs to"
" run in parallel")
parser.add_option("-m", "--mri-decim", type="int", dest="mri_decim",
default=2, help="Integer factor used to decimate "
"BEM plots")
parser.add_option("--image-format", type="str", dest="image_format",
default='png', help="Image format to use "
"(can be 'png' or 'svg')")
_add_verbose_flag(parser)
options, args = parser.parse_args()
path = options.path
if path is None:
parser.print_help()
sys.exit(1)
info_fname = options.info_fname
cov_fname = options.cov_fname
subjects_dir = options.subjects_dir
subject = options.subject
image_format = options.image_format
mri_decim = int(options.mri_decim)
verbose = True if options.verbose is not None else False
open_browser = False if options.no_browser is not None else True
overwrite = True if options.overwrite is not None else False
n_jobs = int(options.n_jobs) if options.n_jobs is not None else 1
bmin = float(options.bmin) if options.bmin is not None else None
bmax = float(options.bmax) if options.bmax is not None else None
# XXX: this means (None, None) cannot be specified through command line
if bmin is None and bmax is None:
baseline = None
else:
baseline = (bmin, bmax)
t0 = time.time()
report = Report(info_fname, subjects_dir=subjects_dir,
subject=subject, baseline=baseline,
cov_fname=cov_fname, verbose=verbose,
image_format=image_format)
report.parse_folder(path, verbose=verbose, n_jobs=n_jobs,
mri_decim=mri_decim)
log_elapsed(time.time() - t0, verbose=verbose)
report.save(open_browser=open_browser, overwrite=overwrite)
mne.utils.run_command_if_main()
|
from elkm1_lib.const import ThermostatFan, ThermostatMode, ThermostatSetting
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import PRECISION_WHOLE, STATE_ON
from . import ElkEntity, create_elk_entities
from .const import DOMAIN
SUPPORT_HVAC = [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_AUTO,
HVAC_MODE_FAN_ONLY,
]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Create the Elk-M1 thermostat platform."""
elk_data = hass.data[DOMAIN][config_entry.entry_id]
entities = []
elk = elk_data["elk"]
create_elk_entities(
elk_data, elk.thermostats, "thermostat", ElkThermostat, entities
)
async_add_entities(entities, True)
class ElkThermostat(ElkEntity, ClimateEntity):
"""Representation of an Elk-M1 Thermostat."""
def __init__(self, element, elk, elk_data):
"""Initialize climate entity."""
super().__init__(element, elk, elk_data)
self._state = None
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FAN_MODE | SUPPORT_AUX_HEAT | SUPPORT_TARGET_TEMPERATURE_RANGE
@property
def temperature_unit(self):
"""Return the temperature unit."""
return self._temperature_unit
@property
def current_temperature(self):
"""Return the current temperature."""
return self._element.current_temp
@property
def target_temperature(self):
"""Return the temperature we are trying to reach."""
if (self._element.mode == ThermostatMode.HEAT.value) or (
self._element.mode == ThermostatMode.EMERGENCY_HEAT.value
):
return self._element.heat_setpoint
if self._element.mode == ThermostatMode.COOL.value:
return self._element.cool_setpoint
return None
@property
def target_temperature_high(self):
"""Return the high target temperature."""
return self._element.cool_setpoint
@property
def target_temperature_low(self):
"""Return the low target temperature."""
return self._element.heat_setpoint
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 1
@property
def current_humidity(self):
"""Return the current humidity."""
return self._element.humidity
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
return self._state
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return SUPPORT_HVAC
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_WHOLE
@property
def is_aux_heat(self):
"""Return if aux heater is on."""
return self._element.mode == ThermostatMode.EMERGENCY_HEAT.value
@property
def min_temp(self):
"""Return the minimum temperature supported."""
return 1
@property
def max_temp(self):
"""Return the maximum temperature supported."""
return 99
@property
def fan_mode(self):
"""Return the fan setting."""
if self._element.fan == ThermostatFan.AUTO.value:
return HVAC_MODE_AUTO
if self._element.fan == ThermostatFan.ON.value:
return STATE_ON
return None
def _elk_set(self, mode, fan):
if mode is not None:
self._element.set(ThermostatSetting.MODE.value, mode)
if fan is not None:
self._element.set(ThermostatSetting.FAN.value, fan)
async def async_set_hvac_mode(self, hvac_mode):
"""Set thermostat operation mode."""
settings = {
HVAC_MODE_OFF: (ThermostatMode.OFF.value, ThermostatFan.AUTO.value),
HVAC_MODE_HEAT: (ThermostatMode.HEAT.value, None),
HVAC_MODE_COOL: (ThermostatMode.COOL.value, None),
HVAC_MODE_AUTO: (ThermostatMode.AUTO.value, None),
HVAC_MODE_FAN_ONLY: (ThermostatMode.OFF.value, ThermostatFan.ON.value),
}
self._elk_set(settings[hvac_mode][0], settings[hvac_mode][1])
async def async_turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self._elk_set(ThermostatMode.EMERGENCY_HEAT.value, None)
async def async_turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self._elk_set(ThermostatMode.HEAT.value, None)
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return [HVAC_MODE_AUTO, STATE_ON]
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
if fan_mode == HVAC_MODE_AUTO:
self._elk_set(None, ThermostatFan.AUTO.value)
elif fan_mode == STATE_ON:
self._elk_set(None, ThermostatFan.ON.value)
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if low_temp is not None:
self._element.set(ThermostatSetting.HEAT_SETPOINT.value, round(low_temp))
if high_temp is not None:
self._element.set(ThermostatSetting.COOL_SETPOINT.value, round(high_temp))
def _element_changed(self, element, changeset):
mode_to_state = {
ThermostatMode.OFF.value: HVAC_MODE_OFF,
ThermostatMode.COOL.value: HVAC_MODE_COOL,
ThermostatMode.HEAT.value: HVAC_MODE_HEAT,
ThermostatMode.EMERGENCY_HEAT.value: HVAC_MODE_HEAT,
ThermostatMode.AUTO.value: HVAC_MODE_AUTO,
}
self._state = mode_to_state.get(self._element.mode)
if self._state == HVAC_MODE_OFF and self._element.fan == ThermostatFan.ON.value:
self._state = HVAC_MODE_FAN_ONLY
|
import logging
from typing import Optional
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, HTTP_OK
from .const import (
KEY_DEVICES,
KEY_ENABLED,
KEY_EXTERNAL_ID,
KEY_FLEX_SCHEDULES,
KEY_ID,
KEY_MAC_ADDRESS,
KEY_MODEL,
KEY_NAME,
KEY_SCHEDULES,
KEY_SERIAL_NUMBER,
KEY_STATUS,
KEY_USERNAME,
KEY_ZONES,
)
from .webhooks import LISTEN_EVENT_TYPES, WEBHOOK_CONST_ID
_LOGGER = logging.getLogger(__name__)
class RachioPerson:
"""Represent a Rachio user."""
def __init__(self, rachio, config_entry):
"""Create an object from the provided API instance."""
# Use API token to get user ID
self.rachio = rachio
self.config_entry = config_entry
self.username = None
self._id = None
self._controllers = []
def setup(self, hass):
"""Rachio device setup."""
response = self.rachio.person.info()
assert int(response[0][KEY_STATUS]) == HTTP_OK, "API key error"
self._id = response[1][KEY_ID]
# Use user ID to get user data
data = self.rachio.person.get(self._id)
assert int(data[0][KEY_STATUS]) == HTTP_OK, "User ID error"
self.username = data[1][KEY_USERNAME]
devices = data[1][KEY_DEVICES]
for controller in devices:
webhooks = self.rachio.notification.get_device_webhook(controller[KEY_ID])[
1
]
# The API does not provide a way to tell if a controller is shared
# or if they are the owner. To work around this problem we fetch the webooks
# before we setup the device so we can skip it instead of failing.
# webhooks are normally a list, however if there is an error
# rachio hands us back a dict
if isinstance(webhooks, dict):
_LOGGER.error(
"Failed to add rachio controller '%s' because of an error: %s",
controller[KEY_NAME],
webhooks.get("error", "Unknown Error"),
)
continue
rachio_iro = RachioIro(hass, self.rachio, controller, webhooks)
rachio_iro.setup()
self._controllers.append(rachio_iro)
_LOGGER.info('Using Rachio API as user "%s"', self.username)
@property
def user_id(self) -> str:
"""Get the user ID as defined by the Rachio API."""
return self._id
@property
def controllers(self) -> list:
"""Get a list of controllers managed by this account."""
return self._controllers
def start_multiple_zones(self, zones) -> None:
"""Start multiple zones."""
self.rachio.zone.start_multiple(zones)
class RachioIro:
"""Represent a Rachio Iro."""
def __init__(self, hass, rachio, data, webhooks):
"""Initialize a Rachio device."""
self.hass = hass
self.rachio = rachio
self._id = data[KEY_ID]
self.name = data[KEY_NAME]
self.serial_number = data[KEY_SERIAL_NUMBER]
self.mac_address = data[KEY_MAC_ADDRESS]
self.model = data[KEY_MODEL]
self._zones = data[KEY_ZONES]
self._schedules = data[KEY_SCHEDULES]
self._flex_schedules = data[KEY_FLEX_SCHEDULES]
self._init_data = data
self._webhooks = webhooks
_LOGGER.debug('%s has ID "%s"', str(self), self.controller_id)
def setup(self):
"""Rachio Iro setup for webhooks."""
# Listen for all updates
self._init_webhooks()
def _init_webhooks(self) -> None:
"""Start getting updates from the Rachio API."""
current_webhook_id = None
# First delete any old webhooks that may have stuck around
def _deinit_webhooks(_) -> None:
"""Stop getting updates from the Rachio API."""
if not self._webhooks:
# We fetched webhooks when we created the device, however if we call _init_webhooks
# again we need to fetch again
self._webhooks = self.rachio.notification.get_device_webhook(
self.controller_id
)[1]
for webhook in self._webhooks:
if (
webhook[KEY_EXTERNAL_ID].startswith(WEBHOOK_CONST_ID)
or webhook[KEY_ID] == current_webhook_id
):
self.rachio.notification.delete(webhook[KEY_ID])
self._webhooks = None
_deinit_webhooks(None)
# Choose which events to listen for and get their IDs
event_types = []
for event_type in self.rachio.notification.get_webhook_event_type()[1]:
if event_type[KEY_NAME] in LISTEN_EVENT_TYPES:
event_types.append({"id": event_type[KEY_ID]})
# Register to listen to these events from the device
url = self.rachio.webhook_url
auth = WEBHOOK_CONST_ID + self.rachio.webhook_auth
new_webhook = self.rachio.notification.add(
self.controller_id, auth, url, event_types
)
# Save ID for deletion at shutdown
current_webhook_id = new_webhook[1][KEY_ID]
self.hass.bus.listen(EVENT_HOMEASSISTANT_STOP, _deinit_webhooks)
def __str__(self) -> str:
"""Display the controller as a string."""
return f'Rachio controller "{self.name}"'
@property
def controller_id(self) -> str:
"""Return the Rachio API controller ID."""
return self._id
@property
def current_schedule(self) -> str:
"""Return the schedule that the device is running right now."""
return self.rachio.device.current_schedule(self.controller_id)[1]
@property
def init_data(self) -> dict:
"""Return the information used to set up the controller."""
return self._init_data
def list_zones(self, include_disabled=False) -> list:
"""Return a list of the zone dicts connected to the device."""
# All zones
if include_disabled:
return self._zones
# Only enabled zones
return [z for z in self._zones if z[KEY_ENABLED]]
def get_zone(self, zone_id) -> Optional[dict]:
"""Return the zone with the given ID."""
for zone in self.list_zones(include_disabled=True):
if zone[KEY_ID] == zone_id:
return zone
return None
def list_schedules(self) -> list:
"""Return a list of fixed schedules."""
return self._schedules
def list_flex_schedules(self) -> list:
"""Return a list of flex schedules."""
return self._flex_schedules
def stop_watering(self) -> None:
"""Stop watering all zones connected to this controller."""
self.rachio.device.stop_water(self.controller_id)
_LOGGER.info("Stopped watering of all zones on %s", str(self))
|
import sys
from collections import Counter
from unittest import TestCase
from scattertext import whitespace_nlp
from scattertext.features.FeatsFromOnlyEmpath import FeatsFromOnlyEmpath
from scattertext.features.FeatsFromSpacyDocAndEmpath import FeatsFromSpacyDocAndEmpath
def mock_empath_analyze(doc, **kwargs):
doc.split()
return {'disappointment': 0.0, 'violence': 0.0, 'ugliness': 0.0, 'legend': 0.0, 'farming': 0.0, 'hygiene': 0.0,
'help': 0.0, 'payment': 0.0, 'pride': 0.0, 'prison': 0.0, 'night': 2.0, 'warmth': 0.0, 'magic': 0.0,
'tourism': 0.0, 'play': 0.0, 'fight': 0.0, 'sympathy': 0.0, 'competing': 0.0, 'speaking': 2.0,
'politeness': 0.0, 'hipster': 0.0, 'blue_collar_job': 1.0, 'musical': 0.0, 'optimism': 0.0, 'power': 0.0,
'reading': 0.0, 'royalty': 0.0, 'noise': 0.0, 'rage': 0.0, 'work': 2.0, 'smell': 0.0, 'shame': 0.0,
'medieval': 0.0, 'terrorism': 1.0, 'health': 0.0, 'school': 0.0, 'poor': 1.0, 'money': 0.0, 'politics': 0.0,
'pain': 0.0, 'hearing': 0.0, 'rural': 0.0, 'economics': 1.0, 'eating': 0.0, 'leader': 0.0, 'hiking': 0.0,
'shape_and_size': 0.0, 'weakness': 0.0, 'friends': 0.0, 'strength': 1.0, 'ocean': 0.0, 'lust': 0.0,
'medical_emergency': 0.0, 'restaurant': 0.0, 'death': 0.0, 'morning': 1.0, 'cooking': 0.0, 'banking': 0.0,
'dominant_heirarchical': 0.0, 'party': 1.0, 'weapon': 0.0, 'nervousness': 0.0, 'anticipation': 0.0,
'hate': 0.0, 'vehicle': 4.0, 'art': 0.0, 'car': 4.0, 'leisure': 0.0, 'air_travel': 0.0, 'traveling': 0.0,
'animal': 0.0, 'dispute': 0.0, 'shopping': 1.0, 'monster': 0.0, 'pet': 0.0, 'science': 0.0, 'children': 0.0,
'ridicule': 1.0, 'affection': 0.0, 'superhero': 0.0, 'sexual': 0.0, 'celebration': 0.0, 'gain': 0.0,
'government': 1.0, 'beach': 0.0, 'law': 0.0, 'childish': 0.0, 'philosophy': 0.0, 'liquid': 0.0, 'fire': 0.0,
'war': 0.0, 'timidity': 0.0, 'love': 0.0, 'occupation': 1.0, 'achievement': 0.0, 'worship': 0.0, 'crime': 0.0,
'cheerfulness': 0.0, 'cold': 0.0, 'weather': 0.0, 'disgust': 0.0, 'phone': 0.0, 'journalism': 0.0,
'sadness': 0.0, 'contentment': 0.0, 'sound': 0.0, 'breaking': 0.0, 'neglect': 0.0, 'listen': 0.0,
'divine': 0.0, 'internet': 0.0, 'confusion': 0.0, 'religion': 0.0, 'exotic': 0.0, 'white_collar_job': 1.0,
'computer': 0.0, 'envy': 0.0, 'wealthy': 0.0, 'swimming': 0.0, 'ship': 0.0, 'suffering': 0.0, 'college': 0.0,
'sleep': 2.0, 'valuable': 0.0, 'real_estate': 0.0, 'sailing': 0.0, 'programming': 0.0, 'zest': 0.0,
'anger': 0.0, 'sports': 0.0, 'irritability': 0.0, 'exasperation': 0.0, 'independence': 0.0, 'torment': 0.0,
'dance': 0.0, 'order': 0.0, 'urban': 0.0, 'tool': 0.0, 'exercise': 0.0, 'negotiate': 0.0, 'wedding': 0.0,
'healing': 0.0, 'business': 1.0, 'social_media': 0.0, 'messaging': 1.0, 'swearing_terms': 0.0,
'stealing': 0.0, 'fabric': 0.0, 'driving': 4.0, 'fear': 0.0, 'fun': 1.0, 'office': 1.0, 'communication': 2.0,
'vacation': 0.0, 'emotional': 0.0, 'ancient': 0.0, 'music': 0.0, 'domestic_work': 1.0, 'giving': 1.0,
'deception': 0.0, 'beauty': 0.0, 'movement': 1.0, 'meeting': 0.0, 'alcohol': 0.0, 'heroic': 0.0, 'plant': 0.0,
'technology': 0.0, 'anonymity': 0.0, 'writing': 0.0, 'feminine': 0.0, 'surprise': 0.0, 'kill': 0.0,
'water': 0.0, 'joy': 0.0, 'dominant_personality': 0.0, 'toy': 0.0, 'positive_emotion': 1.0, 'appearance': 0.0,
'military': 0.0, 'aggression': 0.0, 'negative_emotion': 1.0, 'youth': 0.0, 'injury': 0.0, 'body': 0.0,
'clothing': 0.0, 'home': 0.0, 'family': 0.0, 'fashion': 0.0, 'furniture': 0.0, 'attractive': 0.0,
'trust': 0.0, 'cleaning': 0.0, 'masculine': 0.0, 'horror': 0.0}
class TestFeatsFsromSpacyDocAndEmpath(TestCase):
def test_main(self):
try:
from mock import Mock
except:
from unittest.mock import Mock
feat_getter = FeatsFromSpacyDocAndEmpath(empath_analyze_function=mock_empath_analyze)
sys.modules['empath'] = Mock(analyze=mock_empath_analyze)
FeatsFromSpacyDocAndEmpath()
doc = whitespace_nlp('Hello this is a document.')
term_freq = feat_getter.get_feats(doc)
self.assertEqual(set(term_freq.items()),
set({'document': 1, 'hello': 1, 'is': 1, 'this': 1,
'a document': 1, 'hello this': 1, 'is a': 1,
'a': 1, 'this is': 1}.items()))
metadata_freq = feat_getter.get_doc_metadata(doc)
self.assertEqual(metadata_freq['ridicule'], 1)
self.assertNotIn('empath_fashion', metadata_freq)
def test_empath_not_presesnt(self):
sys.modules['empath'] = None
if sys.version_info.major == 3:
with self.assertRaisesRegex(Exception,
"Please install the empath library to use FeatsFromSpacyDocAndEmpath."):
FeatsFromSpacyDocAndEmpath()
else:
with self.assertRaises(Exception):
FeatsFromSpacyDocAndEmpath()
class TestFeatsFromOnlyEmpath(TestCase):
def test_main(self):
try:
from mock import Mock
except:
from unittest.mock import Mock
sys.modules['empath'] = Mock(analyze=mock_empath_analyze)
FeatsFromOnlyEmpath()
feat_getter = FeatsFromOnlyEmpath(empath_analyze_function=mock_empath_analyze)
doc = whitespace_nlp('Hello this is a document.')
term_freq = feat_getter.get_feats(doc)
metadata_freq = feat_getter.get_doc_metadata(doc)
self.assertEqual(term_freq, Counter())
self.assertEqual(metadata_freq['ridicule'], 1)
self.assertNotIn('fashion', metadata_freq)
self.assertNotIn('document', metadata_freq)
self.assertNotIn('a document', metadata_freq)
def test_empath_not_presesnt(self):
sys.modules['empath'] = None
if sys.version_info.major == 3:
with self.assertRaisesRegex(Exception,
"Please install the empath library to use FeatsFromSpacyDocAndEmpath."):
FeatsFromSpacyDocAndEmpath()
else:
with self.assertRaises(Exception):
FeatsFromSpacyDocAndEmpath()
|
from homeassistant.const import (
CONF_DEVICES,
CONF_NAME,
CONF_SENSORS,
CONF_TYPE,
CONF_ZONE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DOMAIN as KONNECTED_DOMAIN, SIGNAL_DS18B20_NEW
SENSOR_TYPES = {
DEVICE_CLASS_TEMPERATURE: ["Temperature", TEMP_CELSIUS],
DEVICE_CLASS_HUMIDITY: ["Humidity", PERCENTAGE],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sensors attached to a Konnected device from a config entry."""
data = hass.data[KONNECTED_DOMAIN]
device_id = config_entry.data["id"]
sensors = []
# Initialize all DHT sensors.
dht_sensors = [
sensor
for sensor in data[CONF_DEVICES][device_id][CONF_SENSORS]
if sensor[CONF_TYPE] == "dht"
]
for sensor in dht_sensors:
sensors.append(KonnectedSensor(device_id, sensor, DEVICE_CLASS_TEMPERATURE))
sensors.append(KonnectedSensor(device_id, sensor, DEVICE_CLASS_HUMIDITY))
async_add_entities(sensors)
@callback
def async_add_ds18b20(attrs):
"""Add new KonnectedSensor representing a ds18b20 sensor."""
sensor_config = next(
(
s
for s in data[CONF_DEVICES][device_id][CONF_SENSORS]
if s[CONF_TYPE] == "ds18b20" and s[CONF_ZONE] == attrs.get(CONF_ZONE)
),
None,
)
async_add_entities(
[
KonnectedSensor(
device_id,
sensor_config,
DEVICE_CLASS_TEMPERATURE,
addr=attrs.get("addr"),
initial_state=attrs.get("temp"),
)
],
True,
)
# DS18B20 sensors entities are initialized when they report for the first
# time. Set up a listener for that signal from the Konnected component.
async_dispatcher_connect(hass, SIGNAL_DS18B20_NEW, async_add_ds18b20)
class KonnectedSensor(Entity):
"""Represents a Konnected DHT Sensor."""
def __init__(self, device_id, data, sensor_type, addr=None, initial_state=None):
"""Initialize the entity for a single sensor_type."""
self._addr = addr
self._data = data
self._device_id = device_id
self._type = sensor_type
self._zone_num = self._data.get(CONF_ZONE)
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._unique_id = addr or f"{device_id}-{self._zone_num}-{sensor_type}"
# set initial state if known at initialization
self._state = initial_state
if self._state:
self._state = round(float(self._state), 1)
# set entity name if given
self._name = self._data.get(CONF_NAME)
if self._name:
self._name += f" {SENSOR_TYPES[sensor_type][0]}"
@property
def unique_id(self) -> str:
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def device_info(self):
"""Return the device info."""
return {"identifiers": {(KONNECTED_DOMAIN, self._device_id)}}
async def async_added_to_hass(self):
"""Store entity_id and register state change callback."""
entity_id_key = self._addr or self._type
self._data[entity_id_key] = self.entity_id
async_dispatcher_connect(
self.hass, f"konnected.{self.entity_id}.update", self.async_set_state
)
@callback
def async_set_state(self, state):
"""Update the sensor's state."""
if self._type == DEVICE_CLASS_HUMIDITY:
self._state = int(float(state))
else:
self._state = round(float(state), 1)
self.async_write_ha_state()
|
import asyncio
from datetime import timedelta
import logging
from RMVtransport import RMVtransport
from RMVtransport.rmvtransport import RMVtransportApiConnectionError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_NEXT_DEPARTURE = "next_departure"
CONF_STATION = "station"
CONF_DESTINATIONS = "destinations"
CONF_DIRECTION = "direction"
CONF_LINES = "lines"
CONF_PRODUCTS = "products"
CONF_TIME_OFFSET = "time_offset"
CONF_MAX_JOURNEYS = "max_journeys"
CONF_TIMEOUT = "timeout"
DEFAULT_NAME = "RMV Journey"
VALID_PRODUCTS = ["U-Bahn", "Tram", "Bus", "S", "RB", "RE", "EC", "IC", "ICE"]
ICONS = {
"U-Bahn": "mdi:subway",
"Tram": "mdi:tram",
"Bus": "mdi:bus",
"S": "mdi:train",
"RB": "mdi:train",
"RE": "mdi:train",
"EC": "mdi:train",
"IC": "mdi:train",
"ICE": "mdi:train",
"SEV": "mdi:checkbox-blank-circle-outline",
None: "mdi:clock",
}
ATTRIBUTION = "Data provided by opendata.rmv.de"
SCAN_INTERVAL = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NEXT_DEPARTURE): [
{
vol.Required(CONF_STATION): cv.string,
vol.Optional(CONF_DESTINATIONS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_DIRECTION): cv.string,
vol.Optional(CONF_LINES, default=[]): vol.All(
cv.ensure_list, [cv.positive_int, cv.string]
),
vol.Optional(CONF_PRODUCTS, default=VALID_PRODUCTS): vol.All(
cv.ensure_list, [vol.In(VALID_PRODUCTS)]
),
vol.Optional(CONF_TIME_OFFSET, default=0): cv.positive_int,
vol.Optional(CONF_MAX_JOURNEYS, default=5): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
],
vol.Optional(CONF_TIMEOUT, default=10): cv.positive_int,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the RMV departure sensor."""
timeout = config.get(CONF_TIMEOUT)
session = async_get_clientsession(hass)
sensors = []
for next_departure in config.get(CONF_NEXT_DEPARTURE):
sensors.append(
RMVDepartureSensor(
session,
next_departure[CONF_STATION],
next_departure.get(CONF_DESTINATIONS),
next_departure.get(CONF_DIRECTION),
next_departure.get(CONF_LINES),
next_departure.get(CONF_PRODUCTS),
next_departure.get(CONF_TIME_OFFSET),
next_departure.get(CONF_MAX_JOURNEYS),
next_departure.get(CONF_NAME),
timeout,
)
)
tasks = [sensor.async_update() for sensor in sensors]
if tasks:
await asyncio.wait(tasks)
if not any(sensor.data for sensor in sensors):
raise PlatformNotReady
async_add_entities(sensors)
class RMVDepartureSensor(Entity):
"""Implementation of an RMV departure sensor."""
def __init__(
self,
session,
station,
destinations,
direction,
lines,
products,
time_offset,
max_journeys,
name,
timeout,
):
"""Initialize the sensor."""
self._station = station
self._name = name
self._state = None
self.data = RMVDepartureData(
session,
station,
destinations,
direction,
lines,
products,
time_offset,
max_journeys,
timeout,
)
self._icon = ICONS[None]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def available(self):
"""Return True if entity is available."""
return self._state is not None
@property
def state(self):
"""Return the next departure time."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
try:
return {
"next_departures": self.data.departures[1:],
"direction": self.data.departures[0].get("direction"),
"line": self.data.departures[0].get("line"),
"minutes": self.data.departures[0].get("minutes"),
"departure_time": self.data.departures[0].get("departure_time"),
"product": self.data.departures[0].get("product"),
ATTR_ATTRIBUTION: ATTRIBUTION,
}
except IndexError:
return {}
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_MINUTES
async def async_update(self):
"""Get the latest data and update the state."""
await self.data.async_update()
if self._name == DEFAULT_NAME:
self._name = self.data.station
self._station = self.data.station
if not self.data.departures:
self._state = None
self._icon = ICONS[None]
return
self._state = self.data.departures[0].get("minutes")
self._icon = ICONS[self.data.departures[0].get("product")]
class RMVDepartureData:
"""Pull data from the opendata.rmv.de web page."""
def __init__(
self,
session,
station_id,
destinations,
direction,
lines,
products,
time_offset,
max_journeys,
timeout,
):
"""Initialize the sensor."""
self.station = None
self._station_id = station_id
self._destinations = destinations
self._direction = direction
self._lines = lines
self._products = products
self._time_offset = time_offset
self._max_journeys = max_journeys
self.rmv = RMVtransport(session, timeout)
self.departures = []
self._error_notification = False
@Throttle(SCAN_INTERVAL)
async def async_update(self):
"""Update the connection data."""
try:
_data = await self.rmv.get_departures(
self._station_id,
products=self._products,
direction_id=self._direction,
max_journeys=50,
)
except RMVtransportApiConnectionError:
self.departures = []
_LOGGER.warning("Could not retrieve data from rmv.de")
return
self.station = _data.get("station")
_deps = []
_deps_not_found = set(self._destinations)
for journey in _data["journeys"]:
# find the first departure meeting the criteria
_nextdep = {}
if self._destinations:
dest_found = False
for dest in self._destinations:
if dest in journey["stops"]:
dest_found = True
if dest in _deps_not_found:
_deps_not_found.remove(dest)
_nextdep["destination"] = dest
if not dest_found:
continue
if self._lines and journey["number"] not in self._lines:
continue
if journey["minutes"] < self._time_offset:
continue
for attr in ["direction", "departure_time", "product", "minutes"]:
_nextdep[attr] = journey.get(attr, "")
_nextdep["line"] = journey.get("number", "")
_deps.append(_nextdep)
if len(_deps) > self._max_journeys:
break
if not self._error_notification and _deps_not_found:
self._error_notification = True
_LOGGER.info("Destination(s) %s not found", ", ".join(_deps_not_found))
self.departures = _deps
|
import argparse
from datetime import datetime
import re
import subprocess
from packaging.version import Version
from homeassistant import const
def _bump_release(release, bump_type):
"""Bump a release tuple consisting of 3 numbers."""
major, minor, patch = release
if bump_type == "patch":
patch += 1
elif bump_type == "minor":
minor += 1
patch = 0
return major, minor, patch
def bump_version(version, bump_type):
"""Return a new version given a current version and action."""
to_change = {}
if bump_type == "minor":
# Convert 0.67.3 to 0.68.0
# Convert 0.67.3.b5 to 0.68.0
# Convert 0.67.3.dev0 to 0.68.0
# Convert 0.67.0.b5 to 0.67.0
# Convert 0.67.0.dev0 to 0.67.0
to_change["dev"] = None
to_change["pre"] = None
if not version.is_prerelease or version.release[2] != 0:
to_change["release"] = _bump_release(version.release, "minor")
elif bump_type == "patch":
# Convert 0.67.3 to 0.67.4
# Convert 0.67.3.b5 to 0.67.3
# Convert 0.67.3.dev0 to 0.67.3
to_change["dev"] = None
to_change["pre"] = None
if not version.is_prerelease:
to_change["release"] = _bump_release(version.release, "patch")
elif bump_type == "dev":
# Convert 0.67.3 to 0.67.4.dev0
# Convert 0.67.3.b5 to 0.67.4.dev0
# Convert 0.67.3.dev0 to 0.67.3.dev1
if version.is_devrelease:
to_change["dev"] = ("dev", version.dev + 1)
else:
to_change["pre"] = ("dev", 0)
to_change["release"] = _bump_release(version.release, "minor")
elif bump_type == "beta":
# Convert 0.67.5 to 0.67.6b0
# Convert 0.67.0.dev0 to 0.67.0b0
# Convert 0.67.5.b4 to 0.67.5b5
if version.is_devrelease:
to_change["dev"] = None
to_change["pre"] = ("b", 0)
elif version.is_prerelease:
if version.pre[0] == "a":
to_change["pre"] = ("b", 0)
if version.pre[0] == "b":
to_change["pre"] = ("b", version.pre[1] + 1)
else:
to_change["pre"] = ("b", 0)
to_change["release"] = _bump_release(version.release, "patch")
else:
to_change["release"] = _bump_release(version.release, "patch")
to_change["pre"] = ("b", 0)
elif bump_type == "nightly":
# Convert 0.70.0d0 to 0.70.0d20190424, fails when run on non dev release
if not version.is_devrelease:
raise ValueError("Can only be run on dev release")
to_change["dev"] = (
"dev",
datetime.utcnow().date().isoformat().replace("-", ""),
)
else:
assert False, f"Unsupported type: {bump_type}"
temp = Version("0")
temp._version = version._version._replace(**to_change)
return Version(str(temp))
def write_version(version):
"""Update Home Assistant constant file with new version."""
with open("homeassistant/const.py") as fil:
content = fil.read()
major, minor, patch = str(version).split(".", 2)
content = re.sub("MAJOR_VERSION = .*\n", f"MAJOR_VERSION = {major}\n", content)
content = re.sub("MINOR_VERSION = .*\n", f"MINOR_VERSION = {minor}\n", content)
content = re.sub("PATCH_VERSION = .*\n", f'PATCH_VERSION = "{patch}"\n', content)
with open("homeassistant/const.py", "wt") as fil:
content = fil.write(content)
def main():
"""Execute script."""
parser = argparse.ArgumentParser(description="Bump version of Home Assistant")
parser.add_argument(
"type",
help="The type of the bump the version to.",
choices=["beta", "dev", "patch", "minor", "nightly"],
)
parser.add_argument(
"--commit", action="store_true", help="Create a version bump commit."
)
arguments = parser.parse_args()
if arguments.commit and subprocess.run(["git", "diff", "--quiet"]).returncode == 1:
print("Cannot use --commit because git is dirty.")
return
current = Version(const.__version__)
bumped = bump_version(current, arguments.type)
assert bumped > current, "BUG! New version is not newer than old version"
write_version(bumped)
if not arguments.commit:
return
subprocess.run(["git", "commit", "-nam", f"Bumped version to {bumped}"])
def test_bump_version():
"""Make sure it all works."""
import pytest
assert bump_version(Version("0.56.0"), "beta") == Version("0.56.1b0")
assert bump_version(Version("0.56.0b3"), "beta") == Version("0.56.0b4")
assert bump_version(Version("0.56.0.dev0"), "beta") == Version("0.56.0b0")
assert bump_version(Version("0.56.3"), "dev") == Version("0.57.0.dev0")
assert bump_version(Version("0.56.0b3"), "dev") == Version("0.57.0.dev0")
assert bump_version(Version("0.56.0.dev0"), "dev") == Version("0.56.0.dev1")
assert bump_version(Version("0.56.3"), "patch") == Version("0.56.4")
assert bump_version(Version("0.56.3.b3"), "patch") == Version("0.56.3")
assert bump_version(Version("0.56.0.dev0"), "patch") == Version("0.56.0")
assert bump_version(Version("0.56.0"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.3"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.0.b3"), "minor") == Version("0.56.0")
assert bump_version(Version("0.56.3.b3"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.0.dev0"), "minor") == Version("0.56.0")
assert bump_version(Version("0.56.2.dev0"), "minor") == Version("0.57.0")
today = datetime.utcnow().date().isoformat().replace("-", "")
assert bump_version(Version("0.56.0.dev0"), "nightly") == Version(
f"0.56.0.dev{today}"
)
with pytest.raises(ValueError):
assert bump_version(Version("0.56.0"), "nightly")
if __name__ == "__main__":
main()
|
from django.urls import reverse
from weblate.auth.models import User
from weblate.trans.tests.test_views import RegistrationTestMixin, ViewTestCase
class AccountRemovalTest(ViewTestCase, RegistrationTestMixin):
def test_page(self):
response = self.client.get(reverse("remove"))
self.assertContains(response, "Account removal deletes all your private data.")
def verify_removal(self, response):
self.assertRedirects(response, reverse("email-sent"))
# Get confirmation URL
url = self.assert_registration_mailbox("[Weblate] Account removal on Weblate")
# Verify confirmation URL
response = self.client.get(url, follow=True)
self.assertContains(
response, "By pressing following button, your will no longer be able to use"
)
# Confirm removal
response = self.client.post(reverse("remove"), follow=True)
self.assertContains(response, "Your account has been removed.")
self.assertFalse(User.objects.filter(username="testuser").exists())
def test_removal(self):
response = self.client.post(
reverse("remove"), {"password": "testpassword"}, follow=True
)
self.verify_removal(response)
def test_removal_failed(self):
response = self.client.post(
reverse("remove"), {"password": "invalidpassword"}, follow=True
)
self.assertContains(response, "You have entered an invalid password.")
self.assertTrue(User.objects.filter(username="testuser").exists())
def test_removal_nopass(self):
# Set unusuable password for test user.
self.user.set_unusable_password()
self.user.save()
# Need to force login as user has no password now.
# In the app he would login by third party auth.
self.client.force_login(self.user)
response = self.client.post(reverse("remove"), {"password": ""}, follow=True)
self.verify_removal(response)
def test_removal_change(self):
self.edit_unit("Hello, world!\n", "Nazdar svete!\n")
# We should have some change to commit
self.assertTrue(self.component.needs_commit())
# Remove account
self.test_removal()
# Changes should be committed
self.assertFalse(self.component.needs_commit())
|
import pytest
from homeassistant.components.lock import (
DOMAIN as LOCK_DOMAIN,
SERVICE_LOCK,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
DEFAULT_CONFIG = {
LOCK_DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"}
}
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "LOCKED")
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", "UNLOCKED")
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_non_default_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "closed",
"state_unlocked": "open",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "closed")
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", "open")
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
"value_template": "{{ value_json.val }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"LOCKED"}')
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"UNLOCKED"}')
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_controlling_non_default_state_via_topic_and_json_message(
hass, mqtt_mock
):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "closed",
"state_unlocked": "open",
"value_template": "{{ value_json.val }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"closed"}')
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
async_fire_mqtt_message(hass, "state-topic", '{"val":"open"}')
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test optimistic mode without state topic."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_sending_mqtt_commands_and_explicit_optimistic(hass, mqtt_mock):
"""Test optimistic mode without state topic."""
assert await async_setup_component(
hass,
LOCK_DOMAIN,
{
LOCK_DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_lock": "LOCK",
"payload_unlock": "UNLOCK",
"state_locked": "LOCKED",
"state_unlocked": "UNLOCKED",
"optimistic": True,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "LOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_LOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.test"}, blocking=True
)
mqtt_mock.async_publish.assert_called_once_with("command-topic", "UNLOCK", 0, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("lock.test")
assert state.state is STATE_UNLOCKED
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
await help_test_default_availability_payload(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
await help_test_custom_availability_payload(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_json(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one lock per unique_id."""
config = {
LOCK_DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"command_topic": "test_topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"command_topic": "test_topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, LOCK_DOMAIN, config)
async def test_discovery_removal_lock(hass, mqtt_mock, caplog):
"""Test removal of discovered lock."""
data = '{ "name": "test",' ' "command_topic": "test_topic" }'
await help_test_discovery_removal(hass, mqtt_mock, caplog, LOCK_DOMAIN, data)
async def test_discovery_update_lock(hass, mqtt_mock, caplog):
"""Test update of discovered lock."""
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "command_topic",'
' "availability_topic": "availability_topic1" }'
)
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic2",'
' "command_topic": "command_topic",'
' "availability_topic": "availability_topic2" }'
)
await help_test_discovery_update(hass, mqtt_mock, caplog, LOCK_DOMAIN, data1, data2)
async def test_discovery_update_unchanged_lock(hass, mqtt_mock, caplog):
"""Test update of discovered lock."""
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "command_topic" }'
)
with patch(
"homeassistant.components.mqtt.lock.MqttLock.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, LOCK_DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer" }'
data2 = '{ "name": "Milk",' ' "command_topic": "test_topic" }'
await help_test_discovery_broken(hass, mqtt_mock, caplog, LOCK_DOMAIN, data1, data2)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT lock device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT lock device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass, mqtt_mock, LOCK_DOMAIN, DEFAULT_CONFIG
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import json
import re
import threading
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import azure_network
from perfkitbenchmarker.providers.azure import flags as azure_flags
from perfkitbenchmarker.providers.azure import util
from six.moves import range
FLAGS = flags.FLAGS
MAX_DRIVE_SUFFIX_LENGTH = 2 # Last allowable device is /dev/sdzz.
PREMIUM_STORAGE = 'Premium_LRS'
STANDARD_DISK = 'Standard_LRS'
DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE}
HOST_CACHING = 'host_caching'
AZURE = 'Azure'
disk.RegisterDiskTypeMap(AZURE, DISK_TYPE)
AZURE_REPLICATION_MAP = {
azure_flags.LRS: disk.ZONE,
azure_flags.ZRS: disk.REGION,
# Deliberately omitting PLRS, because that is set explicty in __init__,
# and (RA)GRS, because those are asynchronously replicated.
}
LOCAL_SSD_PREFIXES = {'Standard_D', 'Standard_G', 'Standard_L'}
AZURE_NVME_TYPES = [
r'(Standard_L[0-9]+s_v2)',
]
def _ProductWithIncreasingLength(iterable, max_length):
"""Yields increasing length cartesian products of iterable."""
for length in range(1, max_length + 1):
for p in itertools.product(iterable, repeat=length):
yield p
def _GenerateDrivePathSuffixes():
"""Yields drive path suffix strings.
Drive path suffixes in the form 'c', 'd', ..., 'z', 'aa', 'ab', etc.
Note that because we need the first suffix to be 'c', we need to
fast-forward the iterator by two before yielding. Why start at 'c'?
The os-disk will be /dev/sda, and the temporary disk will be /dev/sdb:
https://docs.microsoft.com/en-us/azure/virtual-machines/linux/faq#can-i-use-the-temporary-disk-devsdb1-to-store-data
Therefore, any additional remote disks will need to begin at 'c'.
The linux kernel code that determines this naming can be found here:
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/drivers/scsi/sd.c?h=v2.6.37#n2262
Quoting the link from above:
SCSI disk names starts at sda. The 26th device is sdz and the 27th is sdaa.
The last one for two lettered suffix is sdzz which is followed by sdaaa.
"""
character_range = range(ord('a'), ord('z') + 1)
products = _ProductWithIncreasingLength(
character_range, MAX_DRIVE_SUFFIX_LENGTH)
# We want to start at 'c', so fast-forward the iterator by two.
next(products)
next(products)
for p in products:
yield ''.join(chr(c) for c in p)
REMOTE_DRIVE_PATH_SUFFIXES = list(_GenerateDrivePathSuffixes())
class TooManyAzureDisksError(Exception):
"""Exception raised when too many disks are attached."""
pass
def LocalDiskIsSSD(machine_type):
"""Check whether the local disk is an SSD drive."""
return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES))
def LocalDriveIsNvme(machine_type):
"""Check if the machine type uses NVMe driver."""
return any(
re.search(machine_series, machine_type)
for machine_series in AZURE_NVME_TYPES)
class AzureDisk(disk.BaseDisk):
"""Object representing an Azure Disk."""
_lock = threading.Lock()
def __init__(self,
disk_spec,
vm_name,
machine_type,
storage_account,
lun,
is_image=False):
super(AzureDisk, self).__init__(disk_spec)
self.host_caching = FLAGS.azure_host_caching
self.name = vm_name + str(lun)
self.vm_name = vm_name
self.resource_group = azure_network.GetResourceGroup()
self.storage_account = storage_account
# lun is Azure's abbreviation for "logical unit number"
self.lun = lun
self.is_image = is_image
self._deleted = False
self.machine_type = machine_type
if self.disk_type == PREMIUM_STORAGE:
self.metadata.update({
disk.MEDIA: disk.SSD,
disk.REPLICATION: disk.ZONE,
HOST_CACHING: self.host_caching,
})
elif self.disk_type == STANDARD_DISK:
self.metadata.update({
disk.MEDIA: disk.HDD,
disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type],
HOST_CACHING: self.host_caching,
})
elif self.disk_type == disk.LOCAL:
media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD
self.metadata.update({
disk.MEDIA: media,
disk.REPLICATION: disk.NONE,
})
def _Create(self):
"""Creates the disk."""
assert not self.is_image
with self._lock:
_, _, retcode = vm_util.IssueCommand([
azure.AZURE_PATH, 'vm', 'disk', 'attach', '--new', '--caching',
self.host_caching, '--name', self.name, '--lun',
str(self.lun), '--sku', self.disk_type, '--vm-name', self.vm_name,
'--size-gb',
str(self.disk_size)
] + self.resource_group.args, raise_on_failure=False)
if retcode:
raise errors.Resource.RetryableCreationError(
'Error creating Azure disk.')
_, _, retcode = vm_util.IssueCommand([
azure.AZURE_PATH, 'disk', 'update', '--name', self.name, '--set',
util.GetTagsJson(self.resource_group.timeout_minutes)
] + self.resource_group.args, raise_on_failure=False)
if retcode:
raise errors.Resource.RetryableCreationError(
'Error tagging Azure disk.')
def _Delete(self):
"""Deletes the disk."""
assert not self.is_image
self._deleted = True
def _Exists(self):
"""Returns true if the disk exists."""
assert not self.is_image
if self._deleted:
return False
stdout, _, _ = vm_util.IssueCommand([
azure.AZURE_PATH, 'disk', 'show', '--output', 'json', '--name',
self.name
] + self.resource_group.args, raise_on_failure=False)
try:
json.loads(stdout)
return True
except:
return False
def Attach(self, vm):
"""Attaches the disk to a VM.
Args:
vm: The AzureVirtualMachine instance to which the disk will be attached.
"""
pass # TODO(user): Implement Attach()
# (not critical because disks are attached to VMs when created)
def Detach(self):
"""Detaches the disk from a VM."""
# Not needed since the resource group can be deleted
# without detaching disks.
pass
def GetDevicePath(self):
"""Returns the path to the device inside the VM."""
if self.disk_type == disk.LOCAL:
if LocalDriveIsNvme(self.machine_type):
return '/dev/nvme%sn1' % str(self.lun)
return '/dev/sdb'
else:
try:
return '/dev/sd%s' % REMOTE_DRIVE_PATH_SUFFIXES[self.lun]
except IndexError:
raise TooManyAzureDisksError()
|
import json
import unittest
import pytest
import requests_mock
import voluptuous as vol
from homeassistant.components import vultr as base_vultr
from homeassistant.components.vultr import (
ATTR_ALLOWED_BANDWIDTH,
ATTR_AUTO_BACKUPS,
ATTR_COST_PER_MONTH,
ATTR_CREATED_AT,
ATTR_IPV4_ADDRESS,
ATTR_SUBSCRIPTION_ID,
CONF_SUBSCRIPTION,
switch as vultr,
)
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from tests.async_mock import patch
from tests.common import get_test_home_assistant, load_fixture
from tests.components.vultr.test_init import VALID_CONFIG
class TestVultrSwitchSetup(unittest.TestCase):
"""Test the Vultr switch platform."""
DEVICES = []
def add_entities(self, devices, action):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Init values for this testcase class."""
self.hass = get_test_home_assistant()
self.configs = [
{CONF_SUBSCRIPTION: "576965", CONF_NAME: "A Server"},
{CONF_SUBSCRIPTION: "123456", CONF_NAME: "Failed Server"},
{CONF_SUBSCRIPTION: "555555", CONF_NAME: vultr.DEFAULT_NAME},
]
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop our started services."""
self.hass.stop()
@requests_mock.Mocker()
def test_switch(self, mock):
"""Test successful instance."""
mock.get(
"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567",
text=load_fixture("vultr_account_info.json"),
)
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
):
# Setup hub
base_vultr.setup(self.hass, VALID_CONFIG)
# Setup each of our test configs
for config in self.configs:
vultr.setup_platform(self.hass, config, self.add_entities, None)
assert len(self.DEVICES) == 3
tested = 0
for device in self.DEVICES:
if device.subscription == "555555":
assert device.name == "Vultr {}"
tested += 1
device.update()
device_attrs = device.device_state_attributes
if device.subscription == "555555":
assert device.name == "Vultr Another Server"
tested += 1
if device.name == "A Server":
assert device.is_on is True
assert device.state == "on"
assert device.icon == "mdi:server"
assert device_attrs[ATTR_ALLOWED_BANDWIDTH] == "1000"
assert device_attrs[ATTR_AUTO_BACKUPS] == "yes"
assert device_attrs[ATTR_IPV4_ADDRESS] == "123.123.123.123"
assert device_attrs[ATTR_COST_PER_MONTH] == "10.05"
assert device_attrs[ATTR_CREATED_AT] == "2013-12-19 14:45:41"
assert device_attrs[ATTR_SUBSCRIPTION_ID] == "576965"
tested += 1
elif device.name == "Failed Server":
assert device.is_on is False
assert device.state == "off"
assert device.icon == "mdi:server-off"
assert device_attrs[ATTR_ALLOWED_BANDWIDTH] == "1000"
assert device_attrs[ATTR_AUTO_BACKUPS] == "no"
assert device_attrs[ATTR_IPV4_ADDRESS] == "192.168.100.50"
assert device_attrs[ATTR_COST_PER_MONTH] == "73.25"
assert device_attrs[ATTR_CREATED_AT] == "2014-10-13 14:45:41"
assert device_attrs[ATTR_SUBSCRIPTION_ID] == "123456"
tested += 1
assert tested == 4
@requests_mock.Mocker()
def test_turn_on(self, mock):
"""Test turning a subscription on."""
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
), patch("vultr.Vultr.server_start") as mock_start:
for device in self.DEVICES:
if device.name == "Failed Server":
device.turn_on()
# Turn on
assert mock_start.call_count == 1
@requests_mock.Mocker()
def test_turn_off(self, mock):
"""Test turning a subscription off."""
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
), patch("vultr.Vultr.server_halt") as mock_halt:
for device in self.DEVICES:
if device.name == "A Server":
device.turn_off()
# Turn off
assert mock_halt.call_count == 1
def test_invalid_switch_config(self):
"""Test config type failures."""
with pytest.raises(vol.Invalid): # No subscription
vultr.PLATFORM_SCHEMA({CONF_PLATFORM: base_vultr.DOMAIN})
@requests_mock.Mocker()
def test_invalid_switches(self, mock):
"""Test the VultrSwitch fails."""
mock.get(
"https://api.vultr.com/v1/account/info?api_key=ABCDEFG1234567",
text=load_fixture("vultr_account_info.json"),
)
with patch(
"vultr.Vultr.server_list",
return_value=json.loads(load_fixture("vultr_server_list.json")),
):
# Setup hub
base_vultr.setup(self.hass, VALID_CONFIG)
bad_conf = {} # No subscription
no_subs_setup = vultr.setup_platform(
self.hass, bad_conf, self.add_entities, None
)
assert no_subs_setup is not None
bad_conf = {
CONF_NAME: "Missing Server",
CONF_SUBSCRIPTION: "665544",
} # Sub not associated with API key (not in server_list)
wrong_subs_setup = vultr.setup_platform(
self.hass, bad_conf, self.add_entities, None
)
assert wrong_subs_setup is not None
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv, entity_registry
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DOMAIN
CONDITION_TYPES = {"is_on", "is_off"}
CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(CONDITION_TYPES),
}
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions for Fan devices."""
registry = await entity_registry.async_get_registry(hass)
conditions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_on",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_off",
}
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config_validation:
config = CONDITION_SCHEMA(config)
if config[CONF_TYPE] == "is_on":
state = STATE_ON
else:
state = STATE_OFF
@callback
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
return condition.state(hass, config[ATTR_ENTITY_ID], state)
return test_is_state
|
from stash.tests.stashtest import StashTestCase
class ExitTests(StashTestCase):
"""Tests for the 'exit' command."""
def test_help(self):
"""test 'exit --help'."""
output = self.run_command("exit --help", exitcode=0)
self.assertIn("-h", output)
self.assertIn("--help", output)
self.assertIn("status", output)
self.assertIn("exit", output)
def test_exit_default(self):
"""test 'exit'."""
output = self.run_command("exit", exitcode=0).replace("\n", "")
self.assertEqual(output, "")
def test_exit_0(self):
"""test 'exit 0'."""
output = self.run_command("exit 0", exitcode=0).replace("\n", "")
self.assertEqual(output, "")
def test_exit_1(self):
"""test 'exit 1'."""
output = self.run_command("exit 1", exitcode=1).replace("\n", "")
self.assertEqual(output, "")
def test_exit_0_to_255(self):
"""test 'exit {i}' where i = 0, ..., 255."""
for i in range(256):
output = self.run_command("exit " + str(i), exitcode=i).replace("\n", "")
self.assertEqual(output, "")
|
from queue import Empty
import string
from kombu.utils.encoding import safe_str
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
from . import virtual
try:
from azure.storage.queue import QueueService
except ImportError: # pragma: no cover
QueueService = None # noqa
# Azure storage queues allow only alphanumeric and dashes
# so, replace everything with a dash
CHARS_REPLACE_TABLE = {
ord(c): 0x2d for c in string.punctuation
}
class Channel(virtual.Channel):
"""Azure Storage Queues channel."""
domain_format = 'kombu%(vhost)s'
_queue_service = None
_queue_name_cache = {}
no_ack = True
_noack_queues = set()
def __init__(self, *args, **kwargs):
if QueueService is None:
raise ImportError('Azure Storage Queues transport requires the '
'azure-storage-queue library')
super().__init__(*args, **kwargs)
for queue_name in self.queue_service.list_queues():
self._queue_name_cache[queue_name] = queue_name
def basic_consume(self, queue, no_ack, *args, **kwargs):
if no_ack:
self._noack_queues.add(queue)
return super().basic_consume(queue, no_ack,
*args, **kwargs)
def entity_name(self, name, table=CHARS_REPLACE_TABLE):
"""Format AMQP queue name into a valid Azure Storage Queue name."""
return str(safe_str(name)).translate(table)
def _ensure_queue(self, queue):
"""Ensure a queue exists."""
queue = self.entity_name(self.queue_name_prefix + queue)
try:
return self._queue_name_cache[queue]
except KeyError:
self.queue_service.create_queue(queue, fail_on_exist=False)
q = self._queue_name_cache[queue] = queue
return q
def _delete(self, queue, *args, **kwargs):
"""Delete queue by name."""
queue_name = self.entity_name(queue)
self._queue_name_cache.pop(queue_name, None)
self.queue_service.delete_queue(queue_name)
super()._delete(queue_name)
def _put(self, queue, message, **kwargs):
"""Put message onto queue."""
q = self._ensure_queue(queue)
encoded_message = dumps(message)
self.queue_service.put_message(q, encoded_message)
def _get(self, queue, timeout=None):
"""Try to retrieve a single message off ``queue``."""
q = self._ensure_queue(queue)
messages = self.queue_service.get_messages(q, num_messages=1,
timeout=timeout)
if not messages:
raise Empty()
message = messages[0]
raw_content = self.queue_service.decode_function(message.content)
content = loads(raw_content)
self.queue_service.delete_message(q, message.id, message.pop_receipt)
return content
def _size(self, queue):
"""Return the number of messages in a queue."""
q = self._ensure_queue(queue)
metadata = self.queue_service.get_queue_metadata(q)
return metadata.approximate_message_count
def _purge(self, queue):
"""Delete all current messages in a queue."""
q = self._ensure_queue(queue)
n = self._size(q)
self.queue_service.clear_messages(q)
return n
@property
def queue_service(self):
if self._queue_service is None:
self._queue_service = QueueService(
account_name=self.conninfo.hostname,
account_key=self.conninfo.password)
return self._queue_service
@property
def conninfo(self):
return self.connection.client
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def queue_name_prefix(self):
return self.transport_options.get('queue_name_prefix', '')
class Transport(virtual.Transport):
"""Azure Storage Queues transport."""
Channel = Channel
polling_interval = 1
default_port = None
|
import logging
import os.path
from notify_events import Message
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
BaseNotificationService,
)
from homeassistant.const import CONF_TOKEN
from .const import DOMAIN
ATTR_LEVEL = "level"
ATTR_PRIORITY = "priority"
ATTR_FILES = "files"
ATTR_IMAGES = "images"
ATTR_FILE_URL = "url"
ATTR_FILE_PATH = "path"
ATTR_FILE_CONTENT = "content"
ATTR_FILE_NAME = "name"
ATTR_FILE_MIME_TYPE = "mime_type"
ATTR_FILE_KIND_FILE = "file"
ATTR_FILE_KIND_IMAGE = "image"
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the Notify.Events notification service."""
return NotifyEventsNotificationService(hass.data[DOMAIN][CONF_TOKEN])
class NotifyEventsNotificationService(BaseNotificationService):
"""Implement the notification service for Notify.Events."""
def __init__(self, token):
"""Initialize the service."""
self.token = token
def file_exists(self, filename) -> bool:
"""Check if a file exists on disk and is in authorized path."""
if not self.hass.config.is_allowed_path(filename):
return False
return os.path.isfile(filename)
def attach_file(self, msg: Message, item: dict, kind: str = ATTR_FILE_KIND_FILE):
"""Append a file or image to message."""
file_name = None
mime_type = None
if ATTR_FILE_NAME in item:
file_name = item[ATTR_FILE_NAME]
if ATTR_FILE_MIME_TYPE in item:
mime_type = item[ATTR_FILE_MIME_TYPE]
if ATTR_FILE_URL in item:
if kind == ATTR_FILE_KIND_IMAGE:
msg.add_image_from_url(item[ATTR_FILE_URL], file_name, mime_type)
else:
msg.add_file_from_url(item[ATTR_FILE_URL], file_name, mime_type)
elif ATTR_FILE_CONTENT in item:
if kind == ATTR_FILE_KIND_IMAGE:
msg.add_image_from_content(
item[ATTR_FILE_CONTENT], file_name, mime_type
)
else:
msg.add_file_from_content(item[ATTR_FILE_CONTENT], file_name, mime_type)
elif ATTR_FILE_PATH in item:
file_exists = self.file_exists(item[ATTR_FILE_PATH])
if file_exists:
if kind == ATTR_FILE_KIND_IMAGE:
msg.add_image(item[ATTR_FILE_PATH], file_name, mime_type)
else:
msg.add_file(item[ATTR_FILE_PATH], file_name, mime_type)
else:
_LOGGER.error("File does not exist: %s", item[ATTR_FILE_PATH])
def prepare_message(self, message, data) -> Message:
"""Prepare a message to send."""
msg = Message(message)
if ATTR_TITLE in data:
msg.set_title(data[ATTR_TITLE])
if ATTR_LEVEL in data:
try:
msg.set_level(data[ATTR_LEVEL])
except ValueError as error:
_LOGGER.warning("Setting level error: %s", error)
if ATTR_PRIORITY in data:
try:
msg.set_priority(data[ATTR_PRIORITY])
except ValueError as error:
_LOGGER.warning("Setting priority error: %s", error)
if ATTR_IMAGES in data:
for image in data[ATTR_IMAGES]:
self.attach_file(msg, image, ATTR_FILE_KIND_IMAGE)
if ATTR_FILES in data:
for file in data[ATTR_FILES]:
self.attach_file(msg, file)
return msg
def send_message(self, message, **kwargs):
"""Send a message."""
data = kwargs.get(ATTR_DATA) or {}
msg = self.prepare_message(message, data)
msg.send(self.token)
|
import pysmarthab
from homeassistant import config_entries, setup
from homeassistant.components.smarthab import DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from tests.async_mock import patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch("pysmarthab.SmartHab.async_login"), patch(
"pysmarthab.SmartHab.is_logged_in", return_value=True
), patch(
"homeassistant.components.smarthab.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.smarthab.async_setup_entry", return_value=True
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "[email protected]", CONF_PASSWORD: "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "[email protected]"
assert result2["data"] == {
CONF_EMAIL: "[email protected]",
CONF_PASSWORD: "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("pysmarthab.SmartHab.async_login"), patch(
"pysmarthab.SmartHab.is_logged_in", return_value=False
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "[email protected]", CONF_PASSWORD: "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_service_error(hass):
"""Test we handle service errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"pysmarthab.SmartHab.async_login",
side_effect=pysmarthab.RequestFailedException(42),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "[email protected]", CONF_PASSWORD: "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "service"}
async def test_form_unknown_error(hass):
"""Test we handle unknown errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"pysmarthab.SmartHab.async_login",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "[email protected]", CONF_PASSWORD: "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_import(hass):
"""Test import."""
await setup.async_setup_component(hass, "persistent_notification", {})
imported_conf = {
CONF_EMAIL: "[email protected]",
CONF_PASSWORD: "test-password",
}
with patch("pysmarthab.SmartHab.async_login"), patch(
"pysmarthab.SmartHab.is_logged_in", return_value=True
), patch(
"homeassistant.components.smarthab.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.smarthab.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_conf
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "[email protected]"
assert result["data"] == {
CONF_EMAIL: "[email protected]",
CONF_PASSWORD: "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
import asyncio
from typing import Dict
from aioguardian import Client
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION, CONF_IP_ADDRESS, CONF_PORT
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import (
API_SENSOR_PAIR_DUMP,
API_SENSOR_PAIRED_SENSOR_STATUS,
API_SYSTEM_DIAGNOSTICS,
API_SYSTEM_ONBOARD_SENSOR_STATUS,
API_VALVE_STATUS,
API_WIFI_STATUS,
CONF_UID,
DATA_CLIENT,
DATA_COORDINATOR,
DATA_PAIRED_SENSOR_MANAGER,
DATA_UNSUB_DISPATCHER_CONNECT,
DOMAIN,
LOGGER,
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED,
)
from .util import GuardianDataUpdateCoordinator
DATA_LAST_SENSOR_PAIR_DUMP = "last_sensor_pair_dump"
PLATFORMS = ["binary_sensor", "sensor", "switch"]
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the Elexa Guardian component."""
hass.data[DOMAIN] = {
DATA_CLIENT: {},
DATA_COORDINATOR: {},
DATA_LAST_SENSOR_PAIR_DUMP: {},
DATA_PAIRED_SENSOR_MANAGER: {},
DATA_UNSUB_DISPATCHER_CONNECT: {},
}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Elexa Guardian from a config entry."""
client = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id] = Client(
entry.data[CONF_IP_ADDRESS], port=entry.data[CONF_PORT]
)
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id] = {
API_SENSOR_PAIRED_SENSOR_STATUS: {}
}
hass.data[DOMAIN][DATA_UNSUB_DISPATCHER_CONNECT][entry.entry_id] = []
# The valve controller's UDP-based API can't handle concurrent requests very well,
# so we use a lock to ensure that only one API request is reaching it at a time:
api_lock = asyncio.Lock()
# Set up DataUpdateCoordinators for the valve controller:
init_valve_controller_tasks = []
for api, api_coro in [
(API_SENSOR_PAIR_DUMP, client.sensor.pair_dump),
(API_SYSTEM_DIAGNOSTICS, client.system.diagnostics),
(API_SYSTEM_ONBOARD_SENSOR_STATUS, client.system.onboard_sensor_status),
(API_VALVE_STATUS, client.valve.status),
(API_WIFI_STATUS, client.wifi.status),
]:
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
api
] = GuardianDataUpdateCoordinator(
hass,
client=client,
api_name=api,
api_coro=api_coro,
api_lock=api_lock,
valve_controller_uid=entry.data[CONF_UID],
)
init_valve_controller_tasks.append(coordinator.async_refresh())
await asyncio.gather(*init_valve_controller_tasks)
# Set up an object to evaluate each batch of paired sensor UIDs and add/remove
# devices as appropriate:
paired_sensor_manager = hass.data[DOMAIN][DATA_PAIRED_SENSOR_MANAGER][
entry.entry_id
] = PairedSensorManager(hass, entry, client, api_lock)
await paired_sensor_manager.async_process_latest_paired_sensor_uids()
@callback
def async_process_paired_sensor_uids():
"""Define a callback for when new paired sensor data is received."""
hass.async_create_task(
paired_sensor_manager.async_process_latest_paired_sensor_uids()
)
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
API_SENSOR_PAIR_DUMP
].async_add_listener(async_process_paired_sensor_uids)
# Set up all of the Guardian entity platforms:
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][DATA_CLIENT].pop(entry.entry_id)
hass.data[DOMAIN][DATA_COORDINATOR].pop(entry.entry_id)
hass.data[DOMAIN][DATA_LAST_SENSOR_PAIR_DUMP].pop(entry.entry_id)
for unsub in hass.data[DOMAIN][DATA_UNSUB_DISPATCHER_CONNECT][entry.entry_id]:
unsub()
hass.data[DOMAIN][DATA_UNSUB_DISPATCHER_CONNECT].pop(entry.entry_id)
return unload_ok
class PairedSensorManager:
"""Define an object that manages the addition/removal of paired sensors."""
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
client: Client,
api_lock: asyncio.Lock,
) -> None:
"""Initialize."""
self._api_lock = api_lock
self._client = client
self._entry = entry
self._hass = hass
self._listeners = []
self._paired_uids = set()
async def async_pair_sensor(self, uid: str) -> None:
"""Add a new paired sensor coordinator."""
LOGGER.debug("Adding paired sensor: %s", uid)
self._paired_uids.add(uid)
coordinator = self._hass.data[DOMAIN][DATA_COORDINATOR][self._entry.entry_id][
API_SENSOR_PAIRED_SENSOR_STATUS
][uid] = GuardianDataUpdateCoordinator(
self._hass,
client=self._client,
api_name=f"{API_SENSOR_PAIRED_SENSOR_STATUS}_{uid}",
api_coro=lambda: self._client.sensor.paired_sensor_status(uid),
api_lock=self._api_lock,
valve_controller_uid=self._entry.data[CONF_UID],
)
await coordinator.async_request_refresh()
async_dispatcher_send(
self._hass,
SIGNAL_PAIRED_SENSOR_COORDINATOR_ADDED.format(self._entry.data[CONF_UID]),
uid,
)
async def async_process_latest_paired_sensor_uids(self) -> None:
"""Process a list of new UIDs."""
try:
uids = set(
self._hass.data[DOMAIN][DATA_COORDINATOR][self._entry.entry_id][
API_SENSOR_PAIR_DUMP
].data["paired_uids"]
)
except KeyError:
# Sometimes the paired_uids key can fail to exist; the user can't do anything
# about it, so in this case, we quietly abort and return:
return
if uids == self._paired_uids:
return
old = self._paired_uids
new = self._paired_uids = set(uids)
tasks = [self.async_pair_sensor(uid) for uid in new.difference(old)]
tasks += [self.async_unpair_sensor(uid) for uid in old.difference(new)]
if tasks:
await asyncio.gather(*tasks)
async def async_unpair_sensor(self, uid: str) -> None:
"""Remove a paired sensor coordinator."""
LOGGER.debug("Removing paired sensor: %s", uid)
# Clear out objects related to this paired sensor:
self._paired_uids.remove(uid)
self._hass.data[DOMAIN][DATA_COORDINATOR][self._entry.entry_id][
API_SENSOR_PAIRED_SENSOR_STATUS
].pop(uid)
# Remove the paired sensor device from the device registry (which will
# clean up entities and the entity registry):
dev_reg = await self._hass.helpers.device_registry.async_get_registry()
device = dev_reg.async_get_or_create(
config_entry_id=self._entry.entry_id, identifiers={(DOMAIN, uid)}
)
dev_reg.async_remove_device(device.id)
class GuardianEntity(CoordinatorEntity):
"""Define a base Guardian entity."""
def __init__( # pylint: disable=super-init-not-called
self, entry: ConfigEntry, kind: str, name: str, device_class: str, icon: str
) -> None:
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: "Data provided by Elexa"}
self._available = True
self._entry = entry
self._device_class = device_class
self._device_info = {"manufacturer": "Elexa"}
self._icon = icon
self._kind = kind
self._name = name
@property
def device_class(self) -> str:
"""Return the device class."""
return self._device_class
@property
def device_info(self) -> dict:
"""Return device registry information for this entity."""
return self._device_info
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def icon(self) -> str:
"""Return the icon."""
return self._icon
@callback
def _async_update_from_latest_data(self):
"""Update the entity.
This should be extended by Guardian platforms.
"""
raise NotImplementedError
@callback
def _async_update_state_callback(self):
"""Update the entity's state."""
self._async_update_from_latest_data()
self.async_write_ha_state()
class PairedSensorEntity(GuardianEntity):
"""Define a Guardian paired sensor entity."""
def __init__(
self,
entry: ConfigEntry,
coordinator: DataUpdateCoordinator,
kind: str,
name: str,
device_class: str,
icon: str,
) -> None:
"""Initialize."""
super().__init__(entry, kind, name, device_class, icon)
self.coordinator = coordinator
self._paired_sensor_uid = coordinator.data["uid"]
self._device_info["identifiers"] = {(DOMAIN, self._paired_sensor_uid)}
self._device_info["name"] = f"Guardian Paired Sensor {self._paired_sensor_uid}"
self._device_info["via_device"] = (DOMAIN, self._entry.data[CONF_UID])
@property
def name(self) -> str:
"""Return the name of the entity."""
return f"Guardian Paired Sensor {self._paired_sensor_uid}: {self._name}"
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return f"{self._paired_sensor_uid}_{self._kind}"
async def async_added_to_hass(self) -> None:
"""Perform tasks when the entity is added."""
self._async_update_from_latest_data()
class ValveControllerEntity(GuardianEntity):
"""Define a Guardian valve controller entity."""
def __init__(
self,
entry: ConfigEntry,
coordinators: Dict[str, DataUpdateCoordinator],
kind: str,
name: str,
device_class: str,
icon: str,
) -> None:
"""Initialize."""
super().__init__(entry, kind, name, device_class, icon)
self.coordinators = coordinators
self._device_info["identifiers"] = {(DOMAIN, self._entry.data[CONF_UID])}
self._device_info[
"name"
] = f"Guardian Valve Controller {self._entry.data[CONF_UID]}"
self._device_info["model"] = self.coordinators[API_SYSTEM_DIAGNOSTICS].data[
"firmware"
]
@property
def availabile(self) -> bool:
"""Return if entity is available."""
return any(coordinator.last_update_success for coordinator in self.coordinators)
@property
def name(self) -> str:
"""Return the name of the entity."""
return f"Guardian {self._entry.data[CONF_UID]}: {self._name}"
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return f"{self._entry.data[CONF_UID]}_{self._kind}"
async def _async_continue_entity_setup(self):
"""Perform additional, internal tasks when the entity is about to be added.
This should be extended by Guardian platforms.
"""
raise NotImplementedError
@callback
def async_add_coordinator_update_listener(self, api: str) -> None:
"""Add a listener to a DataUpdateCoordinator based on the API referenced."""
self.async_on_remove(
self.coordinators[api].async_add_listener(self._async_update_state_callback)
)
async def async_added_to_hass(self) -> None:
"""Perform tasks when the entity is added."""
await self._async_continue_entity_setup()
self.async_add_coordinator_update_listener(API_SYSTEM_DIAGNOSTICS)
self._async_update_from_latest_data()
async def async_update(self) -> None:
"""Update the entity.
Only used by the generic entity update service.
"""
# Ignore manual update requests if the entity is disabled
if not self.enabled:
return
refresh_tasks = [
coordinator.async_request_refresh() for coordinator in self.coordinators
]
await asyncio.gather(*refresh_tasks)
|
import chainer
from chainer.backends import cuda
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
import numpy as np
import unittest
from chainercv import functions
def _outsize(x):
if isinstance(x, chainer.utils.collections_abc.Iterable):
if len(x) == 2:
return (None, ) + x
else:
return x
return None, x, x
@testing.parameterize(*testing.product({
'spatial_scale': [np.float(0.6), np.int(1), 0.6, 1.0, 2.0],
'outsize': [(np.int(2), np.int(4), np.int(4)), (2, 4, 4), (4, 4), 4],
}))
class TestPSROIAveragePooling2D(unittest.TestCase):
def setUp(self):
self.N = 3
self.group_size = 2
self.out_c, self.out_h, self.out_w = _outsize(self.outsize)
if self.out_c is None:
self.out_c = 2
self.n_channels = self.group_size * self.group_size * self.out_c
self.x = np.arange(
self.N * self.n_channels * 10 * 12,
dtype=np.float32).reshape((self.N, self.n_channels, 10, 12))
np.random.shuffle(self.x)
self.x = 2 * self.x / self.x.size - 1
self.x = self.x.astype(np.float32)
self.rois = np.array(
[[0, 0, 7, 7],
[1, 0, 5, 12],
[0, 1, 10, 5],
[3, 3, 4, 4]],
dtype=np.float32
)
self.roi_indices = np.array([0, 2, 1, 0], dtype=np.int32)
self.n_roi = self.rois.shape[0]
self.gy = np.random.uniform(
-1, 1, (self.n_roi, self.out_c, self.out_h, self.out_w))
self.gy = self.gy.astype(np.float32)
self.check_backward_options = {'atol': 5e-4, 'rtol': 5e-3}
def check_forward(self, x_data, roi_data, roi_index_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
y = functions.ps_roi_average_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
self.assertEqual(y.data.dtype, np.float32)
y_data = cuda.to_cpu(y.data)
self.assertEqual(
(self.n_roi, self.out_c, self.out_h, self.out_w), y_data.shape)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x, self.rois, self.roi_indices)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices))
def check_backward(self, x_data, roi_data, roi_index_data, y_grad_data):
def f(x, rois, roi_indices):
return functions.ps_roi_average_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
gradient_check.check_backward(
f, (x_data, roi_data, roi_index_data), y_grad_data,
no_grads=[False, True, True], **self.check_backward_options)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.rois, self.roi_indices, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices), cuda.to_gpu(self.gy))
def apply_backward(self, x_data, roi_data, roi_index_data, y_grad_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
y = functions.ps_roi_average_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
x.cleargrad()
y.grad = y_grad_data
y.backward()
return x, y
@attr.gpu
@condition.retry(3)
def test_consistency_with_gpu(self):
x_cpu, y_cpu = self.apply_backward(
self.x, self.rois, self.roi_indices, self.gy)
x_gpu, y_gpu = self.apply_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices), cuda.to_gpu(self.gy))
testing.assert_allclose(y_cpu.data, y_gpu.data)
testing.assert_allclose(x_cpu.grad, x_gpu.grad)
@testing.parameterize(*testing.product({
'outsize': [(2, 4, 4), (4, 4), 4]
}))
class TestPSROIAveragePooling2DFailure(unittest.TestCase):
def setUp(self):
self.N = 3
self.group_size = 2
self.spatial_scale = 0.6
out_c, _, _ = _outsize(self.outsize)
if out_c is None:
self.n_channels = self.group_size * self.group_size * 2 - 1
else:
self.n_channels = self.group_size * self.group_size * (out_c + 1)
self.x = np.arange(
self.N * self.n_channels * 10 * 12,
dtype=np.float32).reshape((self.N, self.n_channels, 10, 12))
np.random.shuffle(self.x)
self.x = 2 * self.x / self.x.size - 1
self.x = self.x.astype(np.float32)
self.rois = np.array(
[[0, 0, 7, 7],
[1, 0, 5, 12],
[0, 1, 10, 5],
[3, 3, 4, 4]],
dtype=np.float32
)
self.roi_indices = np.array([0, 2, 1, 0], dtype=np.int32)
self.n_roi = self.rois.shape[0]
def check_forward(self, x_data, roi_data, roi_index_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
functions.ps_roi_average_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
@condition.retry(3)
def test_invalid_outsize_cpu(self):
with self.assertRaises(ValueError):
self.check_forward(self.x, self.rois, self.roi_indices)
@attr.gpu
@condition.retry(3)
def test_invalid_outsize_gpu(self):
with self.assertRaises(ValueError):
self.check_forward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices))
testing.run_module(__name__, __file__)
|
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, Optional
from starline import StarlineApi, StarlineDevice
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.event import async_track_time_interval
from .const import (
_LOGGER,
DATA_EXPIRES,
DATA_SLID_TOKEN,
DATA_SLNET_TOKEN,
DATA_USER_ID,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
)
class StarlineAccount:
"""StarLine Account class."""
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry):
"""Initialize StarLine account."""
self._hass: HomeAssistant = hass
self._config_entry: ConfigEntry = config_entry
self._update_interval: int = DEFAULT_SCAN_INTERVAL
self._unsubscribe_auto_updater: Optional[Callable] = None
self._api: StarlineApi = StarlineApi(
config_entry.data[DATA_USER_ID], config_entry.data[DATA_SLNET_TOKEN]
)
def _check_slnet_token(self) -> None:
"""Check SLNet token expiration and update if needed."""
now = datetime.now().timestamp()
slnet_token_expires = self._config_entry.data[DATA_EXPIRES]
if now + self._update_interval > slnet_token_expires:
self._update_slnet_token()
def _update_slnet_token(self) -> None:
"""Update SLNet token."""
slid_token = self._config_entry.data[DATA_SLID_TOKEN]
try:
slnet_token, slnet_token_expires, user_id = self._api.get_user_id(
slid_token
)
self._api.set_slnet_token(slnet_token)
self._api.set_user_id(user_id)
self._hass.config_entries.async_update_entry(
self._config_entry,
data={
**self._config_entry.data,
DATA_SLNET_TOKEN: slnet_token,
DATA_EXPIRES: slnet_token_expires,
DATA_USER_ID: user_id,
},
)
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error updating SLNet token: %s", err)
def _update_data(self):
"""Update StarLine data."""
self._check_slnet_token()
self._api.update()
@property
def api(self) -> StarlineApi:
"""Return the instance of the API."""
return self._api
async def update(self, unused=None):
"""Update StarLine data."""
await self._hass.async_add_executor_job(self._update_data)
def set_update_interval(self, interval: int) -> None:
"""Set StarLine API update interval."""
_LOGGER.debug("Setting update interval: %ds", interval)
self._update_interval = interval
if self._unsubscribe_auto_updater is not None:
self._unsubscribe_auto_updater()
delta = timedelta(seconds=interval)
self._unsubscribe_auto_updater = async_track_time_interval(
self._hass, self.update, delta
)
def unload(self):
"""Unload StarLine API."""
_LOGGER.debug("Unloading StarLine API.")
if self._unsubscribe_auto_updater is not None:
self._unsubscribe_auto_updater()
self._unsubscribe_auto_updater = None
@staticmethod
def device_info(device: StarlineDevice) -> Dict[str, Any]:
"""Device information for entities."""
return {
"identifiers": {(DOMAIN, device.device_id)},
"manufacturer": "StarLine",
"name": device.name,
"sw_version": device.fw_version,
"model": device.typename,
}
@staticmethod
def gps_attrs(device: StarlineDevice) -> Dict[str, Any]:
"""Attributes for device tracker."""
return {
"updated": datetime.utcfromtimestamp(device.position["ts"]).isoformat(),
"online": device.online,
}
@staticmethod
def balance_attrs(device: StarlineDevice) -> Dict[str, Any]:
"""Attributes for balance sensor."""
return {
"operator": device.balance.get("operator"),
"state": device.balance.get("state"),
"updated": device.balance.get("ts"),
}
@staticmethod
def gsm_attrs(device: StarlineDevice) -> Dict[str, Any]:
"""Attributes for GSM sensor."""
return {
"raw": device.gsm_level,
"imei": device.imei,
"phone": device.phone,
"online": device.online,
}
@staticmethod
def engine_attrs(device: StarlineDevice) -> Dict[str, Any]:
"""Attributes for engine switch."""
return {
"autostart": device.car_state.get("r_start"),
"ignition": device.car_state.get("run"),
}
|
import os
import pytest
from molecule import config
from molecule.driver import openstack
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return openstack.Openstack(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'openstack' == _instance.name
def test_options_property(_instance):
x = {'managed': True}
assert x == _instance.options
def test_login_cmd_template_property(_instance):
x = ('ssh {address} -l {user} -p {port} -i {identity_file} '
'-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no')
assert x == _instance.login_cmd_template
def test_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.safe_files
def test_default_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.default_ssh_connection_options
def test_login_options(mocker, _instance):
m = mocker.patch(
'molecule.driver.openstack.Openstack._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
assert x == _instance.login_options('foo')
def test_ansible_connection_options(mocker, _instance):
m = mocker.patch(
'molecule.driver.openstack.Openstack._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'cloud-user',
'ansible_private_key_file':
'/foo/bar',
'connection':
'ssh',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
}
assert x == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
def test_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'openstack'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'openstack'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
x = {
'instance': 'foo',
}
assert x == _instance._get_instance_config('foo')
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
|
from typing import List, Optional
from xknx.devices import Climate as XknxClimate
from xknx.dpt import HVACOperationMode
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from .const import DOMAIN, OPERATION_MODES, PRESET_MODES
from .knx_entity import KnxEntity
OPERATION_MODES_INV = dict(reversed(item) for item in OPERATION_MODES.items())
PRESET_MODES_INV = dict(reversed(item) for item in PRESET_MODES.items())
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up climate(s) for KNX platform."""
entities = []
for device in hass.data[DOMAIN].xknx.devices:
if isinstance(device, XknxClimate):
entities.append(KNXClimate(device))
async_add_entities(entities)
class KNXClimate(KnxEntity, ClimateEntity):
"""Representation of a KNX climate device."""
def __init__(self, device: XknxClimate):
"""Initialize of a KNX climate device."""
super().__init__(device)
self._unit_of_measurement = TEMP_CELSIUS
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
async def async_update(self):
"""Request a state update from KNX bus."""
await self._device.sync()
await self._device.mode.sync()
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def current_temperature(self):
"""Return the current temperature."""
return self._device.temperature.value
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return self._device.temperature_step
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._device.target_temperature.value
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._device.target_temperature_min
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._device.target_temperature_max
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
await self._device.set_target_temperature(temperature)
self.async_write_ha_state()
@property
def hvac_mode(self) -> Optional[str]:
"""Return current operation ie. heat, cool, idle."""
if self._device.supports_on_off and not self._device.is_on:
return HVAC_MODE_OFF
if self._device.mode.supports_operation_mode:
return OPERATION_MODES.get(
self._device.mode.operation_mode.value, HVAC_MODE_HEAT
)
# default to "heat"
return HVAC_MODE_HEAT
@property
def hvac_modes(self) -> Optional[List[str]]:
"""Return the list of available operation modes."""
_operations = [
OPERATION_MODES.get(operation_mode.value)
for operation_mode in self._device.mode.operation_modes
]
if self._device.supports_on_off:
if not _operations:
_operations.append(HVAC_MODE_HEAT)
_operations.append(HVAC_MODE_OFF)
_modes = list(set(filter(None, _operations)))
# default to ["heat"]
return _modes if _modes else [HVAC_MODE_HEAT]
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set operation mode."""
if self._device.supports_on_off and hvac_mode == HVAC_MODE_OFF:
await self._device.turn_off()
else:
if self._device.supports_on_off and not self._device.is_on:
await self._device.turn_on()
if self._device.mode.supports_operation_mode:
knx_operation_mode = HVACOperationMode(
OPERATION_MODES_INV.get(hvac_mode)
)
await self._device.mode.set_operation_mode(knx_operation_mode)
self.async_write_ha_state()
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
if self._device.mode.supports_operation_mode:
return PRESET_MODES.get(self._device.mode.operation_mode.value, PRESET_AWAY)
return None
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
_presets = [
PRESET_MODES.get(operation_mode.value)
for operation_mode in self._device.mode.operation_modes
]
return list(filter(None, _presets))
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self._device.mode.supports_operation_mode:
knx_operation_mode = HVACOperationMode(PRESET_MODES_INV.get(preset_mode))
await self._device.mode.set_operation_mode(knx_operation_mode)
self.async_write_ha_state()
|
from aiohomekit.model.characteristics import (
CharacteristicsTypes,
InUseValues,
IsConfiguredValues,
)
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
OUTLET_IN_USE = "outlet_in_use"
ATTR_IN_USE = "in_use"
ATTR_IS_CONFIGURED = "is_configured"
ATTR_REMAINING_DURATION = "remaining_duration"
class HomeKitSwitch(HomeKitEntity, SwitchEntity):
"""Representation of a Homekit switch."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [CharacteristicsTypes.ON, CharacteristicsTypes.OUTLET_IN_USE]
@property
def is_on(self):
"""Return true if device is on."""
return self.service.value(CharacteristicsTypes.ON)
async def async_turn_on(self, **kwargs):
"""Turn the specified switch on."""
await self.async_put_characteristics({CharacteristicsTypes.ON: True})
async def async_turn_off(self, **kwargs):
"""Turn the specified switch off."""
await self.async_put_characteristics({CharacteristicsTypes.ON: False})
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
outlet_in_use = self.service.value(CharacteristicsTypes.OUTLET_IN_USE)
if outlet_in_use is not None:
return {OUTLET_IN_USE: outlet_in_use}
class HomeKitValve(HomeKitEntity, SwitchEntity):
"""Represents a valve in an irrigation system."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.ACTIVE,
CharacteristicsTypes.IN_USE,
CharacteristicsTypes.IS_CONFIGURED,
CharacteristicsTypes.REMAINING_DURATION,
]
async def async_turn_on(self, **kwargs):
"""Turn the specified valve on."""
await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: True})
async def async_turn_off(self, **kwargs):
"""Turn the specified valve off."""
await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: False})
@property
def icon(self) -> str:
"""Return the icon."""
return "mdi:water"
@property
def is_on(self):
"""Return true if device is on."""
return self.service.value(CharacteristicsTypes.ACTIVE)
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
attrs = {}
in_use = self.service.value(CharacteristicsTypes.IN_USE)
if in_use is not None:
attrs[ATTR_IN_USE] = in_use == InUseValues.IN_USE
is_configured = self.service.value(CharacteristicsTypes.IS_CONFIGURED)
if is_configured is not None:
attrs[ATTR_IS_CONFIGURED] = is_configured == IsConfiguredValues.CONFIGURED
remaining = self.service.value(CharacteristicsTypes.REMAINING_DURATION)
if remaining is not None:
attrs[ATTR_REMAINING_DURATION] = remaining
return attrs
ENTITY_TYPES = {
"switch": HomeKitSwitch,
"outlet": HomeKitSwitch,
"valve": HomeKitValve,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit switches."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
entity_class = ENTITY_TYPES.get(service["stype"])
if not entity_class:
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
import pytest
from homeassistant import setup
from homeassistant.components import lock
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from tests.common import assert_setup_component, async_mock_service
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_template_state(hass):
"""Test template."""
with assert_setup_component(1, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"name": "Test template lock",
"value_template": "{{ states.switch.test_state.state }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
state = hass.states.get("lock.test_template_lock")
assert state.state == lock.STATE_LOCKED
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
state = hass.states.get("lock.test_template_lock")
assert state.state == lock.STATE_UNLOCKED
async def test_template_state_boolean_on(hass):
"""Test the setting of the state with boolean on."""
with assert_setup_component(1, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ 1 == 1 }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_LOCKED
async def test_template_state_boolean_off(hass):
"""Test the setting of the state with off."""
with assert_setup_component(1, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ 1 == 2 }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_UNLOCKED
async def test_template_syntax_error(hass):
"""Test templating syntax error."""
with assert_setup_component(0, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{% if rubbish %}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
async def test_invalid_name_does_not_create(hass):
"""Test invalid name."""
with assert_setup_component(0, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"switch": {
"platform": "lock",
"name": "{{%}",
"value_template": "{{ rubbish }",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
async def test_invalid_lock_does_not_create(hass):
"""Test invalid lock."""
with assert_setup_component(0, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{"lock": {"platform": "template", "value_template": "Invalid"}},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
async def test_missing_template_does_not_create(hass):
"""Test missing template."""
with assert_setup_component(0, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"not_value_template": "{{ states.switch.test_state.state }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
async def test_template_static(hass, caplog):
"""Test that we allow static templates."""
with assert_setup_component(1, lock.DOMAIN):
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ 1 + 1 }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_UNLOCKED
hass.states.async_set("lock.template_lock", lock.STATE_LOCKED)
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_LOCKED
async def test_lock_action(hass, calls):
"""Test lock action."""
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ states.switch.test_state.state }}",
"lock": {"service": "test.automation"},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_UNLOCKED
await hass.services.async_call(
lock.DOMAIN, lock.SERVICE_LOCK, {ATTR_ENTITY_ID: "lock.template_lock"}
)
await hass.async_block_till_done()
assert len(calls) == 1
async def test_unlock_action(hass, calls):
"""Test unlock action."""
assert await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ states.switch.test_state.state }}",
"lock": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"unlock": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
state = hass.states.get("lock.template_lock")
assert state.state == lock.STATE_LOCKED
await hass.services.async_call(
lock.DOMAIN, lock.SERVICE_UNLOCK, {ATTR_ENTITY_ID: "lock.template_lock"}
)
await hass.async_block_till_done()
assert len(calls) == 1
async def test_available_template_with_entities(hass):
"""Test availability templates with values from other entities."""
await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ states('switch.test_state') }}",
"lock": {"service": "switch.turn_on", "entity_id": "switch.test_state"},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
"availability_template": "{{ is_state('availability_state.state', 'on') }}",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# When template returns true..
hass.states.async_set("availability_state.state", STATE_ON)
await hass.async_block_till_done()
# Device State should not be unavailable
assert hass.states.get("lock.template_lock").state != STATE_UNAVAILABLE
# When Availability template returns false
hass.states.async_set("availability_state.state", STATE_OFF)
await hass.async_block_till_done()
# device state should be unavailable
assert hass.states.get("lock.template_lock").state == STATE_UNAVAILABLE
async def test_invalid_availability_template_keeps_component_available(hass, caplog):
"""Test that an invalid availability keeps the device available."""
await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"value_template": "{{ 1 + 1 }}",
"availability_template": "{{ x - 12 }}",
"lock": {"service": "switch.turn_on", "entity_id": "switch.test_state"},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("lock.template_lock").state != STATE_UNAVAILABLE
assert ("UndefinedError: 'x' is undefined") in caplog.text
async def test_unique_id(hass):
"""Test unique_id option only creates one lock per id."""
await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"name": "test_template_lock_01",
"unique_id": "not-so-unique-anymore",
"value_template": "{{ true }}",
"lock": {"service": "switch.turn_on", "entity_id": "switch.test_state"},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
},
},
)
await setup.async_setup_component(
hass,
lock.DOMAIN,
{
"lock": {
"platform": "template",
"name": "test_template_lock_02",
"unique_id": "not-so-unique-anymore",
"value_template": "{{ false }}",
"lock": {"service": "switch.turn_on", "entity_id": "switch.test_state"},
"unlock": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
|
from glob import glob
import logging
import os
from pi1wire import InvalidCRCException, UnsupportResponseException
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_MOUNT_DIR,
CONF_NAMES,
CONF_TYPE_OWFS,
CONF_TYPE_OWSERVER,
CONF_TYPE_SYSBUS,
DEFAULT_OWSERVER_PORT,
DEFAULT_SYSBUS_MOUNT_DIR,
DOMAIN,
)
from .onewire_entities import OneWire, OneWireProxy
from .onewirehub import OneWireHub
_LOGGER = logging.getLogger(__name__)
DEVICE_SENSORS = {
# Family : { SensorType: owfs path }
"10": {"temperature": "temperature"},
"12": {"temperature": "TAI8570/temperature", "pressure": "TAI8570/pressure"},
"22": {"temperature": "temperature"},
"26": {
"temperature": "temperature",
"humidity": "humidity",
"humidity_hih3600": "HIH3600/humidity",
"humidity_hih4000": "HIH4000/humidity",
"humidity_hih5030": "HIH5030/humidity",
"humidity_htm1735": "HTM1735/humidity",
"pressure": "B1-R1-A/pressure",
"illuminance": "S3-R1-A/illuminance",
"voltage_VAD": "VAD",
"voltage_VDD": "VDD",
"current": "IAD",
},
"28": {"temperature": "temperature"},
"3B": {"temperature": "temperature"},
"42": {"temperature": "temperature"},
"1D": {"counter_a": "counter.A", "counter_b": "counter.B"},
"EF": {"HobbyBoard": "special"},
}
DEVICE_SUPPORT_SYSBUS = ["10", "22", "28", "3B", "42"]
# EF sensors are usually hobbyboards specialized sensors.
# These can only be read by OWFS. Currently this driver only supports them
# via owserver (network protocol)
HOBBYBOARD_EF = {
"HobbyBoards_EF": {
"humidity": "humidity/humidity_corrected",
"humidity_raw": "humidity/humidity_raw",
"temperature": "humidity/temperature",
},
"HB_MOISTURE_METER": {
"moisture_0": "moisture/sensor.0",
"moisture_1": "moisture/sensor.1",
"moisture_2": "moisture/sensor.2",
"moisture_3": "moisture/sensor.3",
},
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAMES): {cv.string: cv.string},
vol.Optional(CONF_MOUNT_DIR, default=DEFAULT_SYSBUS_MOUNT_DIR): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_OWSERVER_PORT): cv.port,
}
)
def hb_info_from_type(dev_type="std"):
"""Return the proper info array for the device type."""
if "std" in dev_type:
return DEVICE_SENSORS
if "HobbyBoard" in dev_type:
return HOBBYBOARD_EF
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up 1-Wire platform."""
if config.get(CONF_HOST):
config[CONF_TYPE] = CONF_TYPE_OWSERVER
elif config[CONF_MOUNT_DIR] == DEFAULT_SYSBUS_MOUNT_DIR:
config[CONF_TYPE] = CONF_TYPE_SYSBUS
else: # pragma: no cover
# This part of the implementation does not conform to policy regarding 3rd-party libraries, and will not longer be updated.
# https://developers.home-assistant.io/docs/creating_platform_code_review/#5-communication-with-devicesservices
config[CONF_TYPE] = CONF_TYPE_OWFS
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up 1-Wire platform."""
onewirehub = hass.data[DOMAIN][config_entry.unique_id]
entities = await hass.async_add_executor_job(
get_entities, onewirehub, config_entry.data
)
async_add_entities(entities, True)
def get_entities(onewirehub: OneWireHub, config):
"""Get a list of entities."""
entities = []
device_names = {}
if CONF_NAMES in config:
if isinstance(config[CONF_NAMES], dict):
device_names = config[CONF_NAMES]
conf_type = config[CONF_TYPE]
# We have an owserver on a remote(or local) host/port
if conf_type == CONF_TYPE_OWSERVER:
for device in onewirehub.devices:
family = device["family"]
device_type = device["type"]
sensor_id = os.path.split(os.path.split(device["path"])[0])[1]
dev_type = "std"
if "EF" in family:
dev_type = "HobbyBoard"
family = device_type
if family not in hb_info_from_type(dev_type):
_LOGGER.warning(
"Ignoring unknown family (%s) of sensor found for device: %s",
family,
sensor_id,
)
continue
device_info = {
"identifiers": {(DOMAIN, sensor_id)},
"manufacturer": "Maxim Integrated",
"model": device_type,
"name": sensor_id,
}
for sensor_key, sensor_value in hb_info_from_type(dev_type)[family].items():
if "moisture" in sensor_key:
s_id = sensor_key.split("_")[1]
is_leaf = int(
onewirehub.owproxy.read(
f"{device['path']}moisture/is_leaf.{s_id}"
).decode()
)
if is_leaf:
sensor_key = f"wetness_{s_id}"
device_file = os.path.join(
os.path.split(device["path"])[0], sensor_value
)
entities.append(
OneWireProxy(
device_names.get(sensor_id, sensor_id),
device_file,
sensor_key,
device_info,
onewirehub.owproxy,
)
)
# We have a raw GPIO ow sensor on a Pi
elif conf_type == CONF_TYPE_SYSBUS:
base_dir = config[CONF_MOUNT_DIR]
_LOGGER.debug("Initializing using SysBus %s", base_dir)
for p1sensor in onewirehub.devices:
family = p1sensor.mac_address[:2]
sensor_id = f"{family}-{p1sensor.mac_address[2:]}"
if family not in DEVICE_SUPPORT_SYSBUS:
_LOGGER.warning(
"Ignoring unknown family (%s) of sensor found for device: %s",
family,
sensor_id,
)
continue
device_info = {
"identifiers": {(DOMAIN, sensor_id)},
"manufacturer": "Maxim Integrated",
"model": family,
"name": sensor_id,
}
device_file = f"/sys/bus/w1/devices/{sensor_id}/w1_slave"
entities.append(
OneWireDirect(
device_names.get(sensor_id, sensor_id),
device_file,
"temperature",
device_info,
p1sensor,
)
)
if not entities:
_LOGGER.error(
"No onewire sensor found. Check if dtoverlay=w1-gpio "
"is in your /boot/config.txt. "
"Check the mount_dir parameter if it's defined"
)
# We have an owfs mounted
else: # pragma: no cover
# This part of the implementation does not conform to policy regarding 3rd-party libraries, and will not longer be updated.
# https://developers.home-assistant.io/docs/creating_platform_code_review/#5-communication-with-devicesservices
base_dir = config[CONF_MOUNT_DIR]
_LOGGER.debug("Initializing using OWFS %s", base_dir)
_LOGGER.warning(
"The OWFS implementation of 1-Wire sensors is deprecated, "
"and should be migrated to OWServer (on localhost:4304). "
"If migration to OWServer is not feasible on your installation, "
"please raise an issue at https://github.com/home-assistant/core/issues/new"
"?title=Unable%20to%20migrate%20onewire%20from%20OWFS%20to%20OWServer",
)
for family_file_path in glob(os.path.join(base_dir, "*", "family")):
with open(family_file_path) as family_file:
family = family_file.read()
if "EF" in family:
continue
if family in DEVICE_SENSORS:
for sensor_key, sensor_value in DEVICE_SENSORS[family].items():
sensor_id = os.path.split(os.path.split(family_file_path)[0])[1]
device_file = os.path.join(
os.path.split(family_file_path)[0], sensor_value
)
entities.append(
OneWireOWFS(
device_names.get(sensor_id, sensor_id),
device_file,
sensor_key,
)
)
return entities
class OneWireDirect(OneWire):
"""Implementation of a 1-Wire sensor directly connected to RPI GPIO."""
def __init__(self, name, device_file, sensor_type, device_info, owsensor):
"""Initialize the sensor."""
super().__init__(name, device_file, sensor_type, device_info)
self._owsensor = owsensor
def update(self):
"""Get the latest data from the device."""
value = None
try:
self._value_raw = self._owsensor.get_temperature()
value = round(float(self._value_raw), 1)
except (
FileNotFoundError,
InvalidCRCException,
UnsupportResponseException,
) as ex:
_LOGGER.warning("Cannot read from sensor %s: %s", self._device_file, ex)
self._state = value
class OneWireOWFS(OneWire): # pragma: no cover
"""Implementation of a 1-Wire sensor through owfs.
This part of the implementation does not conform to policy regarding 3rd-party libraries, and will not longer be updated.
https://developers.home-assistant.io/docs/creating_platform_code_review/#5-communication-with-devicesservices
"""
def _read_value_raw(self):
"""Read the value as it is returned by the sensor."""
with open(self._device_file) as ds_device_file:
lines = ds_device_file.readlines()
return lines
def update(self):
"""Get the latest data from the device."""
value = None
try:
value_read = self._read_value_raw()
if len(value_read) == 1:
value = round(float(value_read[0]), 1)
self._value_raw = float(value_read[0])
except ValueError:
_LOGGER.warning("Invalid value read from %s", self._device_file)
except FileNotFoundError:
_LOGGER.warning("Cannot read from sensor: %s", self._device_file)
self._state = value
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy.random as random
import numpy as np
import matplotlib.pyplot as plt
from filterpy.kalman import KalmanFilter, MMAEFilterBank
from numpy import array
from filterpy.common import Q_discrete_white_noise, Saver
import matplotlib.pyplot as plt
from numpy.random import randn
from math import sin, cos, radians
DO_PLOT = False
class NoisySensor(object):
def __init__(self, noise_factor=1):
self.noise_factor = noise_factor
def sense(self, pos):
return (pos[0] + randn()*self.noise_factor,
pos[1] + randn()*self.noise_factor)
def angle_between(x, y):
return min(y-x, y-x+360, y-x-360, key=abs)
class ManeuveringTarget(object):
def __init__(self, x0, y0, v0, heading):
self.x = x0
self.y = y0
self.vel = v0
self.hdg = heading
self.cmd_vel = v0
self.cmd_hdg = heading
self.vel_step = 0
self.hdg_step = 0
self.vel_delta = 0
self.hdg_delta = 0
def update(self):
vx = self.vel * cos(radians(90-self.hdg))
vy = self.vel * sin(radians(90-self.hdg))
self.x += vx
self.y += vy
if self.hdg_step > 0:
self.hdg_step -= 1
self.hdg += self.hdg_delta
if self.vel_step > 0:
self.vel_step -= 1
self.vel += self.vel_delta
return (self.x, self.y)
def set_commanded_heading(self, hdg_degrees, steps):
self.cmd_hdg = hdg_degrees
self.hdg_delta = angle_between(self.cmd_hdg,
self.hdg) / steps
if abs(self.hdg_delta) > 0:
self.hdg_step = steps
else:
self.hdg_step = 0
def set_commanded_speed(self, speed, steps):
self.cmd_vel = speed
self.vel_delta = (self.cmd_vel - self.vel) / steps
if abs(self.vel_delta) > 0:
self.vel_step = steps
else:
self.vel_step = 0
def make_cv_filter(dt, noise_factor):
cvfilter = KalmanFilter(dim_x = 2, dim_z=1)
cvfilter.x = array([0., 0.])
cvfilter.P *= 3
cvfilter.R *= noise_factor**2
cvfilter.F = array([[1, dt],
[0, 1]], dtype=float)
cvfilter.H = array([[1, 0]], dtype=float)
cvfilter.Q = Q_discrete_white_noise(dim=2, dt=dt, var=0.02)
return cvfilter
def make_ca_filter(dt, noise_factor):
cafilter = KalmanFilter(dim_x=3, dim_z=1)
cafilter.x = array([0., 0., 0.])
cafilter.P *= 3
cafilter.R *= noise_factor**2
cafilter.Q = Q_discrete_white_noise(dim=3, dt=dt, var=0.02)
cafilter.F = array([[1, dt, 0.5*dt*dt],
[0, 1, dt],
[0, 0, 1]], dtype=float)
cafilter.H = array([[1, 0, 0]], dtype=float)
return cafilter
def generate_data(steady_count, noise_factor):
t = ManeuveringTarget(x0=0, y0=0, v0=0.3, heading=0)
xs = []
ys = []
for i in range(30):
x, y = t.update()
xs.append(x)
ys.append(y)
t.set_commanded_heading(310, 25)
t.set_commanded_speed(1, 15)
for i in range(steady_count):
x, y = t.update()
xs.append(x)
ys.append(y)
ns = NoisySensor(noise_factor=noise_factor)
pos = array(list(zip(xs, ys)))
zs = array([ns.sense(p) for p in pos])
return pos, zs
def test_MMAE2():
dt = 0.1
pos, zs = generate_data(120, noise_factor=0.6)
z_xs = zs[:, 0]
dt = 0.1
ca = make_ca_filter(dt, noise_factor=0.6)
cv = make_ca_filter(dt, noise_factor=0.6)
cv.F[:, 2] = 0 # remove acceleration term
cv.P[2, 2] = 0
cv.Q[2, 2] = 0
filters = [cv, ca]
bank = MMAEFilterBank(filters, (0.5, 0.5), dim_x=3, H=ca.H)
xs, probs = [], []
cvxs, caxs = [], []
s = Saver(bank)
for i, z in enumerate(z_xs):
bank.predict()
bank.update(z)
xs.append(bank.x[0])
cvxs.append(cv.x[0])
caxs.append(ca.x[0])
print(i, cv.likelihood, ca.likelihood, bank.p)
s.save()
probs.append(bank.p[0] / bank.p[1])
s.to_array()
if DO_PLOT:
plt.subplot(121)
plt.plot(xs)
plt.plot(pos[:, 0])
plt.subplot(122)
plt.plot(probs)
plt.title('probability ratio p(cv)/p(ca)')
plt.figure()
plt.plot(cvxs, label='CV')
plt.plot(caxs, label='CA')
plt.plot(pos[:, 0])
plt.legend()
plt.figure()
plt.plot(xs)
plt.plot(pos[:, 0])
return bank
if __name__ == '__main__':
DO_PLOT = True
test_MMAE2()
|
from datetime import datetime, timedelta
import json
import logging
import os
import pytest
from homeassistant.components import zone
import homeassistant.components.device_tracker as device_tracker
from homeassistant.components.device_tracker import const, legacy
from homeassistant.const import (
ATTR_ENTITY_PICTURE,
ATTR_FRIENDLY_NAME,
ATTR_GPS_ACCURACY,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_PLATFORM,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.core import State, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import discovery
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import Mock, call, patch
from tests.common import (
assert_setup_component,
async_fire_time_changed,
mock_registry,
mock_restore_cache,
patch_yaml_files,
)
from tests.components.device_tracker import common
TEST_PLATFORM = {device_tracker.DOMAIN: {CONF_PLATFORM: "test"}}
_LOGGER = logging.getLogger(__name__)
@pytest.fixture(name="yaml_devices")
def mock_yaml_devices(hass):
"""Get a path for storing yaml devices."""
yaml_devices = hass.config.path(legacy.YAML_DEVICES)
if os.path.isfile(yaml_devices):
os.remove(yaml_devices)
yield yaml_devices
if os.path.isfile(yaml_devices):
os.remove(yaml_devices)
async def test_is_on(hass):
"""Test is_on method."""
entity_id = f"{const.DOMAIN}.test"
hass.states.async_set(entity_id, STATE_HOME)
assert device_tracker.is_on(hass, entity_id)
hass.states.async_set(entity_id, STATE_NOT_HOME)
assert not device_tracker.is_on(hass, entity_id)
async def test_reading_broken_yaml_config(hass):
"""Test when known devices contains invalid data."""
files = {
"empty.yaml": "",
"nodict.yaml": "100",
"badkey.yaml": "@:\n name: Device",
"noname.yaml": "my_device:\n",
"allok.yaml": "My Device:\n name: Device",
"oneok.yaml": ("My Device!:\n name: Device\nbad_device:\n nme: Device"),
}
args = {"hass": hass, "consider_home": timedelta(seconds=60)}
with patch_yaml_files(files):
assert await legacy.async_load_config("empty.yaml", **args) == []
assert await legacy.async_load_config("nodict.yaml", **args) == []
assert await legacy.async_load_config("noname.yaml", **args) == []
assert await legacy.async_load_config("badkey.yaml", **args) == []
res = await legacy.async_load_config("allok.yaml", **args)
assert len(res) == 1
assert res[0].name == "Device"
assert res[0].dev_id == "my_device"
res = await legacy.async_load_config("oneok.yaml", **args)
assert len(res) == 1
assert res[0].name == "Device"
assert res[0].dev_id == "my_device"
async def test_reading_yaml_config(hass, yaml_devices):
"""Test the rendering of the YAML configuration."""
dev_id = "test"
device = legacy.Device(
hass,
timedelta(seconds=180),
True,
dev_id,
"AB:CD:EF:GH:IJ",
"Test name",
picture="http://test.picture",
icon="mdi:kettle",
)
await hass.async_add_executor_job(
legacy.update_config, yaml_devices, dev_id, device
)
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
config = (await legacy.async_load_config(yaml_devices, hass, device.consider_home))[
0
]
assert device.dev_id == config.dev_id
assert device.track == config.track
assert device.mac == config.mac
assert device.config_picture == config.config_picture
assert device.consider_home == config.consider_home
assert device.icon == config.icon
@patch("homeassistant.components.device_tracker.const.LOGGER.warning")
async def test_duplicate_mac_dev_id(mock_warning, hass):
"""Test adding duplicate MACs or device IDs to DeviceTracker."""
devices = [
legacy.Device(
hass, True, True, "my_device", "AB:01", "My device", None, None, False
),
legacy.Device(
hass, True, True, "your_device", "AB:01", "Your device", None, None, False
),
]
legacy.DeviceTracker(hass, False, True, {}, devices)
_LOGGER.debug(mock_warning.call_args_list)
assert (
mock_warning.call_count == 1
), "The only warning call should be duplicates (check DEBUG)"
args, _ = mock_warning.call_args
assert "Duplicate device MAC" in args[0], "Duplicate MAC warning expected"
mock_warning.reset_mock()
devices = [
legacy.Device(
hass, True, True, "my_device", "AB:01", "My device", None, None, False
),
legacy.Device(
hass, True, True, "my_device", None, "Your device", None, None, False
),
]
legacy.DeviceTracker(hass, False, True, {}, devices)
_LOGGER.debug(mock_warning.call_args_list)
assert (
mock_warning.call_count == 1
), "The only warning call should be duplicates (check DEBUG)"
args, _ = mock_warning.call_args
assert "Duplicate device IDs" in args[0], "Duplicate device IDs warning expected"
async def test_setup_without_yaml_file(hass):
"""Test with no YAML file."""
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
async def test_gravatar(hass):
"""Test the Gravatar generation."""
dev_id = "test"
device = legacy.Device(
hass,
timedelta(seconds=180),
True,
dev_id,
"AB:CD:EF:GH:IJ",
"Test name",
gravatar="[email protected]",
)
gravatar_url = (
"https://www.gravatar.com/avatar/"
"55502f40dc8b7c769880b10874abc9d0.jpg?s=80&d=wavatar"
)
assert device.config_picture == gravatar_url
async def test_gravatar_and_picture(hass):
"""Test that Gravatar overrides picture."""
dev_id = "test"
device = legacy.Device(
hass,
timedelta(seconds=180),
True,
dev_id,
"AB:CD:EF:GH:IJ",
"Test name",
picture="http://test.picture",
gravatar="[email protected]",
)
gravatar_url = (
"https://www.gravatar.com/avatar/"
"55502f40dc8b7c769880b10874abc9d0.jpg?s=80&d=wavatar"
)
assert device.config_picture == gravatar_url
@patch("homeassistant.components.device_tracker.legacy.DeviceTracker.see")
@patch("homeassistant.components.demo.device_tracker.setup_scanner", autospec=True)
async def test_discover_platform(mock_demo_setup_scanner, mock_see, hass):
"""Test discovery of device_tracker demo platform."""
await discovery.async_load_platform(
hass, device_tracker.DOMAIN, "demo", {"test_key": "test_val"}, {"bla": {}}
)
await hass.async_block_till_done()
assert device_tracker.DOMAIN in hass.config.components
assert mock_demo_setup_scanner.called
assert mock_demo_setup_scanner.call_args[0] == (
hass,
{},
mock_see,
{"test_key": "test_val"},
)
async def test_update_stale(hass, mock_device_tracker_conf):
"""Test stalled update."""
scanner = getattr(hass.components, "test.device_tracker").SCANNER
scanner.reset()
scanner.come_home("DEV1")
now = dt_util.utcnow()
register_time = datetime(now.year + 1, 9, 15, 23, tzinfo=dt_util.UTC)
scan_time = datetime(now.year + 1, 9, 15, 23, 1, tzinfo=dt_util.UTC)
with patch(
"homeassistant.components.device_tracker.legacy.dt_util.utcnow",
return_value=register_time,
):
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(
hass,
device_tracker.DOMAIN,
{
device_tracker.DOMAIN: {
CONF_PLATFORM: "test",
device_tracker.CONF_CONSIDER_HOME: 59,
}
},
)
await hass.async_block_till_done()
assert STATE_HOME == hass.states.get("device_tracker.dev1").state
scanner.leave_home("DEV1")
with patch(
"homeassistant.components.device_tracker.legacy.dt_util.utcnow",
return_value=scan_time,
):
async_fire_time_changed(hass, scan_time)
await hass.async_block_till_done()
assert STATE_NOT_HOME == hass.states.get("device_tracker.dev1").state
async def test_entity_attributes(hass, mock_device_tracker_conf):
"""Test the entity attributes."""
devices = mock_device_tracker_conf
dev_id = "test_entity"
entity_id = f"{const.DOMAIN}.{dev_id}"
friendly_name = "Paulus"
picture = "http://placehold.it/200x200"
icon = "mdi:kettle"
device = legacy.Device(
hass,
timedelta(seconds=180),
True,
dev_id,
None,
friendly_name,
picture,
icon=icon,
)
devices.append(device)
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
attrs = hass.states.get(entity_id).attributes
assert friendly_name == attrs.get(ATTR_FRIENDLY_NAME)
assert icon == attrs.get(ATTR_ICON)
assert picture == attrs.get(ATTR_ENTITY_PICTURE)
@patch("homeassistant.components.device_tracker.legacy." "DeviceTracker.async_see")
async def test_see_service(mock_see, hass):
"""Test the see service with a unicode dev_id and NO MAC."""
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
params = {
"dev_id": "some_device",
"host_name": "example.com",
"location_name": "Work",
"gps": [0.3, 0.8],
"attributes": {"test": "test"},
}
common.async_see(hass, **params)
await hass.async_block_till_done()
assert mock_see.call_count == 1
assert mock_see.call_count == 1
assert mock_see.call_args == call(**params)
mock_see.reset_mock()
params["dev_id"] += chr(233) # e' acute accent from icloud
common.async_see(hass, **params)
await hass.async_block_till_done()
assert mock_see.call_count == 1
assert mock_see.call_count == 1
assert mock_see.call_args == call(**params)
async def test_see_service_guard_config_entry(hass, mock_device_tracker_conf):
"""Test the guard if the device is registered in the entity registry."""
mock_entry = Mock()
dev_id = "test"
entity_id = f"{const.DOMAIN}.{dev_id}"
mock_registry(hass, {entity_id: mock_entry})
devices = mock_device_tracker_conf
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
params = {"dev_id": dev_id, "gps": [0.3, 0.8]}
common.async_see(hass, **params)
await hass.async_block_till_done()
assert not devices
async def test_new_device_event_fired(hass, mock_device_tracker_conf):
"""Test that the device tracker will fire an event."""
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
test_events = []
@callback
def listener(event):
"""Record that our event got called."""
test_events.append(event)
hass.bus.async_listen("device_tracker_new_device", listener)
common.async_see(hass, "mac_1", host_name="hello")
common.async_see(hass, "mac_1", host_name="hello")
await hass.async_block_till_done()
assert len(test_events) == 1
# Assert we can serialize the event
json.dumps(test_events[0].as_dict(), cls=JSONEncoder)
assert test_events[0].data == {
"entity_id": "device_tracker.hello",
"host_name": "hello",
"mac": "MAC_1",
}
async def test_duplicate_yaml_keys(hass, mock_device_tracker_conf):
"""Test that the device tracker will not generate invalid YAML."""
devices = mock_device_tracker_conf
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
common.async_see(hass, "mac_1", host_name="hello")
common.async_see(hass, "mac_2", host_name="hello")
await hass.async_block_till_done()
assert len(devices) == 2
assert devices[0].dev_id != devices[1].dev_id
async def test_invalid_dev_id(hass, mock_device_tracker_conf):
"""Test that the device tracker will not allow invalid dev ids."""
devices = mock_device_tracker_conf
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
common.async_see(hass, dev_id="hello-world")
await hass.async_block_till_done()
assert not devices
async def test_see_state(hass, yaml_devices):
"""Test device tracker see records state correctly."""
assert await async_setup_component(hass, device_tracker.DOMAIN, TEST_PLATFORM)
params = {
"mac": "AA:BB:CC:DD:EE:FF",
"dev_id": "some_device",
"host_name": "example.com",
"location_name": "Work",
"gps": [0.3, 0.8],
"gps_accuracy": 1,
"battery": 100,
"attributes": {"test": "test", "number": 1},
}
common.async_see(hass, **params)
await hass.async_block_till_done()
config = await legacy.async_load_config(yaml_devices, hass, timedelta(seconds=0))
assert len(config) == 1
state = hass.states.get("device_tracker.example_com")
attrs = state.attributes
assert state.state == "Work"
assert state.object_id == "example_com"
assert state.name == "example.com"
assert attrs["friendly_name"] == "example.com"
assert attrs["battery"] == 100
assert attrs["latitude"] == 0.3
assert attrs["longitude"] == 0.8
assert attrs["test"] == "test"
assert attrs["gps_accuracy"] == 1
assert attrs["source_type"] == "gps"
assert attrs["number"] == 1
async def test_see_passive_zone_state(hass, mock_device_tracker_conf):
"""Test that the device tracker sets gps for passive trackers."""
now = dt_util.utcnow()
register_time = datetime(now.year + 1, 9, 15, 23, tzinfo=dt_util.UTC)
scan_time = datetime(now.year + 1, 9, 15, 23, 1, tzinfo=dt_util.UTC)
with assert_setup_component(1, zone.DOMAIN):
zone_info = {
"name": "Home",
"latitude": 1,
"longitude": 2,
"radius": 250,
"passive": False,
}
await async_setup_component(hass, zone.DOMAIN, {"zone": zone_info})
scanner = getattr(hass.components, "test.device_tracker").SCANNER
scanner.reset()
scanner.come_home("dev1")
with patch(
"homeassistant.components.device_tracker.legacy.dt_util.utcnow",
return_value=register_time,
):
with assert_setup_component(1, device_tracker.DOMAIN):
assert await async_setup_component(
hass,
device_tracker.DOMAIN,
{
device_tracker.DOMAIN: {
CONF_PLATFORM: "test",
device_tracker.CONF_CONSIDER_HOME: 59,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("device_tracker.dev1")
attrs = state.attributes
assert STATE_HOME == state.state
assert state.object_id == "dev1"
assert state.name == "dev1"
assert attrs.get("friendly_name") == "dev1"
assert attrs.get("latitude") == 1
assert attrs.get("longitude") == 2
assert attrs.get("gps_accuracy") == 0
assert attrs.get("source_type") == device_tracker.SOURCE_TYPE_ROUTER
scanner.leave_home("dev1")
with patch(
"homeassistant.components.device_tracker.legacy.dt_util.utcnow",
return_value=scan_time,
):
async_fire_time_changed(hass, scan_time)
await hass.async_block_till_done()
state = hass.states.get("device_tracker.dev1")
attrs = state.attributes
assert STATE_NOT_HOME == state.state
assert state.object_id == "dev1"
assert state.name == "dev1"
assert attrs.get("friendly_name") == "dev1"
assert attrs.get("latitude") is None
assert attrs.get("longitude") is None
assert attrs.get("gps_accuracy") is None
assert attrs.get("source_type") == device_tracker.SOURCE_TYPE_ROUTER
@patch("homeassistant.components.device_tracker.const.LOGGER.warning")
async def test_see_failures(mock_warning, hass, mock_device_tracker_conf):
"""Test that the device tracker see failures."""
devices = mock_device_tracker_conf
tracker = legacy.DeviceTracker(hass, timedelta(seconds=60), 0, {}, [])
# MAC is not a string (but added)
await tracker.async_see(mac=567, host_name="Number MAC")
# No device id or MAC(not added)
with pytest.raises(HomeAssistantError):
await tracker.async_see()
assert mock_warning.call_count == 0
# Ignore gps on invalid GPS (both added & warnings)
await tracker.async_see(mac="mac_1_bad_gps", gps=1)
await tracker.async_see(mac="mac_2_bad_gps", gps=[1])
await tracker.async_see(mac="mac_3_bad_gps", gps="gps")
await hass.async_block_till_done()
assert mock_warning.call_count == 3
assert len(devices) == 4
async def test_async_added_to_hass(hass):
"""Test restoring state."""
attr = {
ATTR_LONGITUDE: 18,
ATTR_LATITUDE: -33,
const.ATTR_SOURCE_TYPE: "gps",
ATTR_GPS_ACCURACY: 2,
const.ATTR_BATTERY: 100,
}
mock_restore_cache(hass, [State("device_tracker.jk", "home", attr)])
path = hass.config.path(legacy.YAML_DEVICES)
files = {path: "jk:\n name: JK Phone\n track: True"}
with patch_yaml_files(files):
assert await async_setup_component(hass, device_tracker.DOMAIN, {})
state = hass.states.get("device_tracker.jk")
assert state
assert state.state == "home"
for key, val in attr.items():
atr = state.attributes.get(key)
assert atr == val, f"{key}={atr} expected: {val}"
async def test_bad_platform(hass):
"""Test bad platform."""
config = {"device_tracker": [{"platform": "bad_platform"}]}
with assert_setup_component(0, device_tracker.DOMAIN):
assert await async_setup_component(hass, device_tracker.DOMAIN, config)
async def test_adding_unknown_device_to_config(mock_device_tracker_conf, hass):
"""Test the adding of unknown devices to configuration file."""
scanner = getattr(hass.components, "test.device_tracker").SCANNER
scanner.reset()
scanner.come_home("DEV1")
await async_setup_component(
hass, device_tracker.DOMAIN, {device_tracker.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
assert len(mock_device_tracker_conf) == 1
device = mock_device_tracker_conf[0]
assert device.dev_id == "dev1"
assert device.track
async def test_picture_and_icon_on_see_discovery(mock_device_tracker_conf, hass):
"""Test that picture and icon are set in initial see."""
tracker = legacy.DeviceTracker(hass, timedelta(seconds=60), False, {}, [])
await tracker.async_see(dev_id=11, picture="pic_url", icon="mdi:icon")
await hass.async_block_till_done()
assert len(mock_device_tracker_conf) == 1
assert mock_device_tracker_conf[0].icon == "mdi:icon"
assert mock_device_tracker_conf[0].entity_picture == "pic_url"
async def test_backward_compatibility_for_track_new(mock_device_tracker_conf, hass):
"""Test backward compatibility for track new."""
tracker = legacy.DeviceTracker(
hass, timedelta(seconds=60), False, {device_tracker.CONF_TRACK_NEW: True}, []
)
await tracker.async_see(dev_id=13)
await hass.async_block_till_done()
assert len(mock_device_tracker_conf) == 1
assert mock_device_tracker_conf[0].track is False
async def test_old_style_track_new_is_skipped(mock_device_tracker_conf, hass):
"""Test old style config is skipped."""
tracker = legacy.DeviceTracker(
hass, timedelta(seconds=60), None, {device_tracker.CONF_TRACK_NEW: False}, []
)
await tracker.async_see(dev_id=14)
await hass.async_block_till_done()
assert len(mock_device_tracker_conf) == 1
assert mock_device_tracker_conf[0].track is False
def test_see_schema_allowing_ios_calls():
"""Test SEE service schema allows extra keys.
Temp work around because the iOS app sends incorrect data.
"""
device_tracker.SERVICE_SEE_PAYLOAD_SCHEMA(
{
"dev_id": "Test",
"battery": 35,
"battery_status": "Not Charging",
"gps": [10.0, 10.0],
"gps_accuracy": 300,
"hostname": "beer",
}
)
|
from homeassistant.components import songpal
from homeassistant.setup import async_setup_component
from . import (
CONF_DATA,
_create_mocked_device,
_patch_config_flow_device,
_patch_media_player_device,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
def _patch_media_setup():
"""Patch media_player.async_setup_entry."""
async def _async_return():
return True
return patch(
"homeassistant.components.songpal.media_player.async_setup_entry",
side_effect=_async_return,
)
async def test_setup_empty(hass):
"""Test setup without any configuration."""
with _patch_media_setup() as setup:
assert await async_setup_component(hass, songpal.DOMAIN, {}) is True
await hass.async_block_till_done()
setup.assert_not_called()
async def test_setup(hass):
"""Test setup the platform."""
mocked_device = _create_mocked_device()
with _patch_config_flow_device(mocked_device), _patch_media_setup() as setup:
assert (
await async_setup_component(
hass, songpal.DOMAIN, {songpal.DOMAIN: [CONF_DATA]}
)
is True
)
await hass.async_block_till_done()
mocked_device.get_supported_methods.assert_called_once()
setup.assert_called_once()
async def test_unload(hass):
"""Test unload entity."""
entry = MockConfigEntry(domain=songpal.DOMAIN, data=CONF_DATA)
entry.add_to_hass(hass)
mocked_device = _create_mocked_device()
with _patch_config_flow_device(mocked_device), _patch_media_player_device(
mocked_device
):
assert await async_setup_component(hass, songpal.DOMAIN, {}) is True
await hass.async_block_till_done()
mocked_device.listen_notifications.assert_called_once()
assert await songpal.async_unload_entry(hass, entry)
await hass.async_block_till_done()
mocked_device.stop_listen_notifications.assert_called_once()
|
import sys
import json
import time
import signal
import os
import threading
from http import HTTPStatus
import cheroot.wsgi
import flask
app = flask.Flask(__name__)
_redirect_later_event = None
@app.route('/')
def root():
"""Show simple text."""
return flask.Response(b'qutebrowser test webserver, '
b'<a href="/user-agent">user agent</a>')
@app.route('/data/<path:path>')
@app.route('/data2/<path:path>') # for per-URL settings
def send_data(path):
"""Send a given data file to qutebrowser.
If a directory is requested, its index.html is sent.
"""
if hasattr(sys, 'frozen'):
basedir = os.path.realpath(os.path.dirname(sys.executable))
data_dir = os.path.join(basedir, 'end2end', 'data')
else:
basedir = os.path.join(os.path.realpath(os.path.dirname(__file__)),
'..')
data_dir = os.path.join(basedir, 'data')
print(basedir)
if os.path.isdir(os.path.join(data_dir, path)):
path += '/index.html'
return flask.send_from_directory(data_dir, path)
@app.route('/redirect-later')
def redirect_later():
"""302 redirect to / after the given delay.
If delay is -1, wait until a request on redirect-later-continue is done.
"""
global _redirect_later_event
delay = float(flask.request.args.get('delay', '1'))
if delay == -1:
_redirect_later_event = threading.Event()
ok = _redirect_later_event.wait(timeout=30 * 1000)
assert ok
_redirect_later_event = None
else:
time.sleep(delay)
x = flask.redirect('/')
return x
@app.route('/redirect-later-continue')
def redirect_later_continue():
"""Continue a redirect-later request."""
if _redirect_later_event is None:
return flask.Response(b'Timed out or no redirect pending.')
else:
_redirect_later_event.set()
return flask.Response(b'Continued redirect.')
@app.route('/redirect-self')
def redirect_self():
"""302 Redirects to itself."""
return app.make_response(flask.redirect(flask.url_for('redirect_self')))
@app.route('/redirect/<int:n>')
def redirect_n_times(n):
"""302 Redirects n times."""
assert n > 0
return flask.redirect(flask.url_for('redirect_n_times', n=n-1))
@app.route('/relative-redirect')
def relative_redirect():
"""302 Redirect once."""
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.url_for('root')
return response
@app.route('/absolute-redirect')
def absolute_redirect():
"""302 Redirect once."""
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.url_for('root', _external=True)
return response
@app.route('/redirect-to')
def redirect_to():
"""302/3XX Redirects to the given URL."""
# We need to build the response manually and convert to UTF-8 to prevent
# werkzeug from "fixing" the URL. This endpoint should set the Location
# header to the exact string supplied.
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.request.args['url'].encode('utf-8')
return response
@app.route('/content-size')
def content_size():
"""Send two bytes of data without a content-size."""
def generate_bytes():
yield b'*'
time.sleep(0.2)
yield b'*'
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
})
response.status_code = HTTPStatus.OK
return response
@app.route('/twenty-mb')
def twenty_mb():
"""Send 20MB of data."""
def generate_bytes():
yield b'*' * 20 * 1024 * 1024
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
"Content-Length": str(20 * 1024 * 1024),
})
response.status_code = HTTPStatus.OK
return response
@app.route('/500-inline')
def internal_error_attachment():
"""A 500 error with Content-Disposition: inline."""
response = flask.Response(b"", headers={
"Content-Type": "application/octet-stream",
"Content-Disposition": 'inline; filename="attachment.jpg"',
})
response.status_code = HTTPStatus.INTERNAL_SERVER_ERROR
return response
@app.route('/500')
def internal_error():
"""A normal 500 error."""
r = flask.make_response()
r.status_code = HTTPStatus.INTERNAL_SERVER_ERROR
return r
@app.route('/cookies')
def view_cookies():
"""Show cookies."""
return flask.jsonify(cookies=flask.request.cookies)
@app.route('/cookies/set')
def set_cookies():
"""Set cookie(s) as provided by the query string."""
r = app.make_response(flask.redirect(flask.url_for('view_cookies')))
for key, value in flask.request.args.items():
r.set_cookie(key=key, value=value)
return r
@app.route('/basic-auth/<user>/<passwd>')
def basic_auth(user='user', passwd='passwd'):
"""Prompt the user for authorization using HTTP Basic Auth."""
auth = flask.request.authorization
if not auth or auth.username != user or auth.password != passwd:
r = flask.make_response()
r.status_code = HTTPStatus.UNAUTHORIZED
r.headers = {'WWW-Authenticate': 'Basic realm="Fake Realm"'}
return r
return flask.jsonify(authenticated=True, user=user)
@app.route('/drip')
def drip():
"""Drip data over a duration."""
duration = float(flask.request.args.get('duration'))
numbytes = int(flask.request.args.get('numbytes'))
pause = duration / numbytes
def generate_bytes():
for _ in range(numbytes):
yield "*".encode('utf-8')
time.sleep(pause)
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
"Content-Length": str(numbytes),
})
response.status_code = HTTPStatus.OK
return response
@app.route('/404')
def status_404():
r = flask.make_response()
r.status_code = HTTPStatus.NOT_FOUND
return r
@app.route('/headers')
def view_headers():
"""Return HTTP headers."""
return flask.jsonify(headers=dict(flask.request.headers))
@app.route('/response-headers')
def response_headers():
"""Return a set of response headers from the query string."""
headers = flask.request.args
response = flask.jsonify(headers)
response.headers.extend(headers)
response = flask.jsonify(dict(response.headers))
response.headers.extend(headers)
return response
@app.route('/query')
def query():
return flask.jsonify(flask.request.args)
@app.route('/user-agent')
def view_user_agent():
"""Return User-Agent."""
return flask.jsonify({'user-agent': flask.request.headers['user-agent']})
@app.route('/favicon.ico')
def favicon():
basedir = os.path.join(os.path.realpath(os.path.dirname(__file__)),
'..', '..', '..')
return flask.send_from_directory(os.path.join(basedir, 'icons'),
'qutebrowser.ico',
mimetype='image/vnd.microsoft.icon')
@app.after_request
def log_request(response):
"""Log a webserver request."""
request = flask.request
data = {
'verb': request.method,
'path': request.full_path if request.query_string else request.path,
'status': response.status_code,
}
print(json.dumps(data), file=sys.stderr, flush=True)
return response
class WSGIServer(cheroot.wsgi.Server):
"""A custom WSGIServer that prints a line on stderr when it's ready.
Attributes:
_ready: Internal state for the 'ready' property.
_printed_ready: Whether the initial ready message was printed.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._ready = False
self._printed_ready = False
@property
def ready(self):
return self._ready
@ready.setter
def ready(self, value):
if value and not self._printed_ready:
print(' * Running on http://127.0.0.1:{}/ (Press CTRL+C to quit)'
.format(self.bind_addr[1]), file=sys.stderr, flush=True)
self._printed_ready = True
self._ready = value
def main():
if hasattr(sys, 'frozen'):
basedir = os.path.realpath(os.path.dirname(sys.executable))
app.template_folder = os.path.join(basedir, 'end2end', 'templates')
port = int(sys.argv[1])
server = WSGIServer(('127.0.0.1', port), app)
signal.signal(signal.SIGTERM, lambda *args: server.stop())
try:
server.start()
except KeyboardInterrupt:
server.stop()
if __name__ == '__main__':
main()
|
import pytest
from homeassistant.components.wilight.config_flow import (
CONF_MODEL_NAME,
CONF_SERIAL_NUMBER,
)
from homeassistant.components.wilight.const import DOMAIN
from homeassistant.config_entries import SOURCE_SSDP
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.wilight import (
CONF_COMPONENTS,
HOST,
MOCK_SSDP_DISCOVERY_INFO_LIGHT_FAN,
MOCK_SSDP_DISCOVERY_INFO_MISSING_MANUFACTORER,
MOCK_SSDP_DISCOVERY_INFO_P_B,
MOCK_SSDP_DISCOVERY_INFO_WRONG_MANUFACTORER,
UPNP_MODEL_NAME_P_B,
UPNP_SERIAL,
WILIGHT_ID,
)
@pytest.fixture(name="dummy_get_components_from_model_clear")
def mock_dummy_get_components_from_model():
"""Mock a clear components list."""
components = []
with patch(
"pywilight.get_components_from_model",
return_value=components,
):
yield components
async def test_show_ssdp_form(hass: HomeAssistantType) -> None:
"""Test that the ssdp confirmation form is served."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_P_B.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "confirm"
assert result["description_placeholders"] == {
CONF_NAME: f"WL{WILIGHT_ID}",
CONF_COMPONENTS: "light",
}
async def test_ssdp_not_wilight_abort_1(hass: HomeAssistantType) -> None:
"""Test that the ssdp aborts not_wilight."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_WRONG_MANUFACTORER.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "not_wilight_device"
async def test_ssdp_not_wilight_abort_2(hass: HomeAssistantType) -> None:
"""Test that the ssdp aborts not_wilight."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_MISSING_MANUFACTORER.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "not_wilight_device"
async def test_ssdp_not_wilight_abort_3(
hass: HomeAssistantType, dummy_get_components_from_model_clear
) -> None:
"""Test that the ssdp aborts not_wilight."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_P_B.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "not_wilight_device"
async def test_ssdp_not_supported_abort(hass: HomeAssistantType) -> None:
"""Test that the ssdp aborts not_supported."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_LIGHT_FAN.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "not_supported_device"
async def test_ssdp_device_exists_abort(hass: HomeAssistantType) -> None:
"""Test abort SSDP flow if WiLight already configured."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=WILIGHT_ID,
data={
CONF_HOST: HOST,
CONF_SERIAL_NUMBER: UPNP_SERIAL,
CONF_MODEL_NAME: UPNP_MODEL_NAME_P_B,
},
)
entry.add_to_hass(hass)
discovery_info = MOCK_SSDP_DISCOVERY_INFO_P_B.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_SSDP},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_ssdp_flow_implementation(hass: HomeAssistantType) -> None:
"""Test the full SSDP flow from start to finish."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO_P_B.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "confirm"
assert result["description_placeholders"] == {
CONF_NAME: f"WL{WILIGHT_ID}",
"components": "light",
}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"WL{WILIGHT_ID}"
assert result["data"]
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_SERIAL_NUMBER] == UPNP_SERIAL
assert result["data"][CONF_MODEL_NAME] == UPNP_MODEL_NAME_P_B
|
import asyncio
from redbot.core import commands
from redbot.core.i18n import Translator
from redbot.core.utils.predicates import MessagePredicate
__all__ = ["do_install_agreement"]
T_ = Translator("DownloaderChecks", __file__)
_ = lambda s: s
REPO_INSTALL_MSG = _(
"You're about to add a 3rd party repository. The creator of Red"
" and its community have no responsibility for any potential "
"damage that the content of 3rd party repositories might cause."
"\n\nBy typing '**I agree**' you declare that you have read and"
" fully understand the above message. This message won't be "
"shown again until the next reboot.\n\nYou have **30** seconds"
" to reply to this message."
)
_ = T_
async def do_install_agreement(ctx: commands.Context) -> bool:
downloader = ctx.cog
if downloader is None or downloader.already_agreed:
return True
await ctx.send(T_(REPO_INSTALL_MSG))
try:
await ctx.bot.wait_for(
"message", check=MessagePredicate.lower_equal_to("i agree", ctx), timeout=30
)
except asyncio.TimeoutError:
await ctx.send(_("Your response has timed out, please try again."))
return False
downloader.already_agreed = True
return True
|
from __future__ import print_function
import sys
import colorama
from molecule import logger
def test_info(capsys):
log = logger.get_logger(__name__)
log.info('foo')
stdout, _ = capsys.readouterr()
print('--> {}{}{}'.format(colorama.Fore.CYAN, 'foo'.rstrip(),
colorama.Style.RESET_ALL))
x, _ = capsys.readouterr()
assert x == stdout
def test_out(capsys):
log = logger.get_logger(__name__)
log.out('foo')
stdout, _ = capsys.readouterr()
assert ' foo\n' == stdout
def test_warn(capsys):
log = logger.get_logger(__name__)
log.warn('foo')
stdout, _ = capsys.readouterr()
print('{}{}{}'.format(colorama.Fore.YELLOW, 'foo'.rstrip(),
colorama.Style.RESET_ALL))
x, _ = capsys.readouterr()
assert x == stdout
def test_error(capsys):
log = logger.get_logger(__name__)
log.error('foo')
_, stderr = capsys.readouterr()
print(
'{}{}{}'.format(colorama.Fore.RED, 'foo'.rstrip(),
colorama.Style.RESET_ALL),
file=sys.stderr)
_, x = capsys.readouterr()
assert x in stderr
def test_critical(capsys):
log = logger.get_logger(__name__)
log.critical('foo')
_, stderr = capsys.readouterr()
print(
'{}ERROR: {}{}'.format(colorama.Fore.RED, 'foo'.rstrip(),
colorama.Style.RESET_ALL),
file=sys.stderr)
_, x = capsys.readouterr()
assert x in stderr
def test_success(capsys):
log = logger.get_logger(__name__)
log.success('foo')
stdout, _ = capsys.readouterr()
print('{}{}{}'.format(colorama.Fore.GREEN, 'foo'.rstrip(),
colorama.Style.RESET_ALL))
x, _ = capsys.readouterr()
assert x == stdout
def test_red_text():
x = '{}{}{}'.format(colorama.Fore.RED, 'foo', colorama.Style.RESET_ALL)
assert x == logger.red_text('foo')
def test_yellow_text():
x = '{}{}{}'.format(colorama.Fore.YELLOW, 'foo', colorama.Style.RESET_ALL)
assert x == logger.yellow_text('foo')
def test_green_text():
x = '{}{}{}'.format(colorama.Fore.GREEN, 'foo', colorama.Style.RESET_ALL)
assert x == logger.green_text('foo')
def test_cyan_text():
x = '{}{}{}'.format(colorama.Fore.CYAN, 'foo', colorama.Style.RESET_ALL)
assert x == logger.cyan_text('foo')
def test_markup_detection_pycolors0(monkeypatch):
monkeypatch.setenv('PY_COLORS', '0')
assert not logger.should_do_markup()
def test_markup_detection_pycolors1(monkeypatch):
monkeypatch.setenv('PY_COLORS', '1')
assert logger.should_do_markup()
def test_markup_detection_tty_yes(mocker):
mocker.patch('sys.stdout.isatty', return_value=True)
mocker.patch('os.environ', {'TERM': 'xterm'})
assert logger.should_do_markup()
mocker.resetall()
mocker.stopall()
def test_markup_detection_tty_no(mocker):
mocker.patch('os.environ', {})
mocker.patch('sys.stdout.isatty', return_value=False)
assert not logger.should_do_markup()
mocker.resetall()
mocker.stopall()
|
import os
import subprocess
import argparse
import sys
import pathlib
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
def find_pyqt_bundle():
"""Try to find the pyqt-bundle executable next to the current Python.
We do this instead of using $PATH so that the script can be used via
.venv/bin/python.
"""
bin_path = pathlib.Path(sys.executable).parent
path = bin_path / 'pyqt-bundle'
if not path.exists():
raise FileNotFoundError("Can't find pyqt-bundle at {}".format(path))
return path
def main():
parser = argparse.ArgumentParser()
parser.add_argument('qt_location', help='Qt compiler directory')
parser.add_argument('--wheels-dir', help='Directory to use for wheels',
default='wheels')
args = parser.parse_args()
old_cwd = pathlib.Path.cwd()
try:
pyqt_bundle = find_pyqt_bundle()
except FileNotFoundError as e:
utils.print_error(str(e))
sys.exit(1)
qt_dir = pathlib.Path(args.qt_location)
bin_dir = qt_dir / 'bin'
if not bin_dir.exists():
utils.print_error("Can't find {}".format(bin_dir))
sys.exit(1)
wheels_dir = pathlib.Path(args.wheels_dir).resolve()
wheels_dir.mkdir(exist_ok=True)
if list(wheels_dir.glob('*')):
utils.print_col("Wheels directory is not empty, "
"unexpected behavior might occur!", 'yellow')
os.chdir(wheels_dir)
utils.print_title("Downloading wheels")
subprocess.run([sys.executable, '-m', 'pip', 'download',
'--no-deps', '--only-binary', 'PyQt5,PyQtWebEngine',
'PyQt5', 'PyQtWebEngine'], check=True)
utils.print_title("Patching wheels")
input_files = wheels_dir.glob('*.whl')
for wheel in input_files:
utils.print_subtitle(wheel.stem.split('-')[0])
subprocess.run([str(pyqt_bundle),
'--qt-dir', args.qt_location,
'--ignore-missing',
str(wheel)],
check=True)
wheel.unlink()
print("Done, output files:")
for wheel in wheels_dir.glob('*.whl'):
print(wheel.relative_to(old_cwd))
if __name__ == '__main__':
main()
|
from abc import ABCMeta, abstractmethod
class TermRanker:
__metaclass__ = ABCMeta
def __init__(self, term_doc_matrix):
'''Initialize TermRanker
Parameters
----------
term_doc_matrix : TermDocMatrix
TermDocMatrix from which to find term ranks.
'''
self._term_doc_matrix = term_doc_matrix
self._use_non_text_features = False
def use_non_text_features(self):
'''
Returns
-------
TermRanker
Side Effect
-------
Use use_non_text_features instead of text
'''
self._use_non_text_features = True
return self
def are_non_text_features_in_use(self):
return self._use_non_text_features
def get_X(self):
'''
:return: term freq matrix or metadata freq matrix
'''
if self._use_non_text_features:
return self._term_doc_matrix._mX
else:
return self._term_doc_matrix._X
def _get_freq_df(self, X, label_append=' freq'):
if self._use_non_text_features:
return self._term_doc_matrix._metadata_freq_df_from_matrix(X, label_append=label_append)
else:
return self._term_doc_matrix._term_freq_df_from_matrix(X, label_append=label_append)
def _get_row_category_ids(self):
if self._use_non_text_features:
return self._term_doc_matrix._row_category_ids_for_meta()
else:
return self._term_doc_matrix._row_category_ids()
@abstractmethod
def get_ranks(self, label_append = ' freq'):
pass
|
import diamond.collector
import diamond.convertor
import os
import re
try:
import psutil
except ImportError:
psutil = None
class DiskSpaceCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(DiskSpaceCollector, self).get_default_config_help()
config_help.update({
'filesystems': "filesystems to examine",
'exclude_filters':
"A list of regex patterns. Any filesystem" +
" matching any of these patterns will be excluded from disk" +
" space metrics collection",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(DiskSpaceCollector, self).get_default_config()
config.update({
'path': 'diskspace',
# filesystems to examine
'filesystems': 'ext2, ext3, ext4, xfs, glusterfs, nfs, nfs4, ' +
' ntfs, hfs, fat32, fat16, btrfs',
# exclude_filters
# A list of regex patterns
# A filesystem matching any of these patterns will be excluded
# from disk space metrics collection.
#
# Examples:
# exclude_filters =,
# no exclude filters at all
# exclude_filters = ^/boot, ^/mnt
# exclude everything that begins /boot or /mnt
# exclude_filters = m,
# exclude everything that includes the letter "m"
'exclude_filters': ['^/export/home'],
# Default numeric output
'byte_unit': ['byte']
})
return config
def process_config(self):
super(DiskSpaceCollector, self).process_config()
# Precompile things
self.exclude_filters = self.config['exclude_filters']
if isinstance(self.exclude_filters, basestring):
self.exclude_filters = [self.exclude_filters]
if not self.exclude_filters:
self.exclude_reg = re.compile('!.*')
else:
self.exclude_reg = re.compile('|'.join(self.exclude_filters))
self.filesystems = []
if isinstance(self.config['filesystems'], basestring):
for filesystem in self.config['filesystems'].split(','):
self.filesystems.append(filesystem.strip())
elif isinstance(self.config['filesystems'], list):
self.filesystems = self.config['filesystems']
def get_disk_labels(self):
"""
Creates a mapping of device nodes to filesystem labels
"""
path = '/dev/disk/by-label/'
labels = {}
if not os.path.isdir(path):
return labels
for label in os.listdir(path):
label = label.replace('\\x2f', '/')
device = os.path.realpath(path + '/' + label)
labels[device] = label
return labels
def get_file_systems(self):
"""
Creates a map of mounted filesystems on the machine.
iostat(1): Each sector has size of 512 bytes.
Returns:
st_dev -> FileSystem(device, mount_point)
"""
result = {}
if os.access('/proc/mounts', os.R_OK):
file = open('/proc/mounts')
for line in file:
try:
mount = line.split()
device = mount[0]
mount_point = mount[1]
fs_type = mount[2]
except (IndexError, ValueError):
continue
# Skip the filesystem if it is not in the list of valid
# filesystems
if fs_type not in self.filesystems:
self.log.debug("Ignoring %s since it is of type %s " +
" which is not in the list of filesystems.",
mount_point, fs_type)
continue
# Process the filters
if self.exclude_reg.search(mount_point):
self.log.debug("Ignoring %s since it is in the " +
"exclude_filter list.", mount_point)
continue
if ((('/' in device or device == 'tmpfs') and
mount_point.startswith('/'))):
try:
stat = os.stat(mount_point)
except OSError:
self.log.debug("Path %s is not mounted - skipping.",
mount_point)
continue
if stat.st_dev in result:
continue
result[stat.st_dev] = {
'device': os.path.realpath(device),
'mount_point': mount_point,
'fs_type': fs_type
}
file.close()
else:
if not psutil:
self.log.error('Unable to import psutil')
return None
partitions = psutil.disk_partitions(False)
for partition in partitions:
result[len(result)] = {
'device': os.path.realpath(partition.device),
'mount_point': partition.mountpoint,
'fs_type': partition.fstype
}
pass
return result
def collect(self):
labels = self.get_disk_labels()
results = self.get_file_systems()
if not results:
self.log.error('No diskspace metrics retrieved')
return None
for info in results.itervalues():
if info['device'] in labels:
name = labels[info['device']]
else:
name = info['mount_point'].replace('/', '_')
name = name.replace('.', '_').replace('\\', '')
if name == '_':
name = 'root'
if name == '_tmp':
name = 'tmp'
if hasattr(os, 'statvfs'): # POSIX
try:
data = os.statvfs(info['mount_point'])
except OSError as e:
self.log.exception(e)
continue
# Changed from data.f_bsize as f_frsize seems to be a more
# accurate representation of block size on multiple POSIX
# operating systems.
block_size = data.f_frsize
blocks_total = data.f_blocks
blocks_free = data.f_bfree
blocks_avail = data.f_bavail
inodes_total = data.f_files
inodes_free = data.f_ffree
inodes_avail = data.f_favail
elif os.name == 'nt': # Windows
# fixme: used still not exact compared to disk_usage.py
# from psutil
raw_data = psutil.disk_usage(info['mount_point'])
block_size = 1 # fixme: ?
blocks_total = raw_data.total
blocks_free = raw_data.free
else:
raise NotImplementedError("platform not supported")
for unit in self.config['byte_unit']:
metric_name = '%s.%s_percentfree' % (name, unit)
metric_value = float(blocks_free) / float(
blocks_free + (blocks_total - blocks_free)) * 100
self.publish_gauge(metric_name, metric_value, 2)
metric_name = '%s.%s_used' % (name, unit)
metric_value = float(block_size) * float(
blocks_total - blocks_free)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
metric_name = '%s.%s_free' % (name, unit)
metric_value = float(block_size) * float(blocks_free)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
if os.name != 'nt':
metric_name = '%s.%s_avail' % (name, unit)
metric_value = float(block_size) * float(blocks_avail)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
if os.name != 'nt':
if float(inodes_total) > 0:
self.publish_gauge(
'%s.inodes_percentfree' % name,
float(inodes_free) / float(inodes_total) * 100)
self.publish_gauge('%s.inodes_used' % name,
inodes_total - inodes_free)
self.publish_gauge('%s.inodes_free' % name, inodes_free)
self.publish_gauge('%s.inodes_avail' % name, inodes_avail)
|
from paasta_tools.autoscaling.utils import get_autoscaling_component
from paasta_tools.autoscaling.utils import register_autoscaling_component
FORECAST_POLICY_KEY = "forecast_policy"
def get_forecast_policy(name):
"""
Returns a forecast policy matching the given name. Only used by decision policies that try to forecast load, like
the proportional decision policy.
"""
return get_autoscaling_component(name, FORECAST_POLICY_KEY)
@register_autoscaling_component("current", FORECAST_POLICY_KEY)
def current_value_forecast_policy(historical_load, **kwargs):
"""A prediction policy that assumes that the value any time in the future will be the same as the current value.
:param historical_load: a list of (timestamp, value)s, where timestamp is a unix timestamp and value is load.
"""
return historical_load[-1][1]
def window_historical_load(historical_load, window_begin, window_end):
"""Filter historical_load down to just the datapoints lying between times window_begin and window_end, inclusive."""
filtered = []
for timestamp, value in historical_load:
if timestamp >= window_begin and timestamp <= window_end:
filtered.append((timestamp, value))
return filtered
def trailing_window_historical_load(historical_load, window_size):
window_end, _ = historical_load[-1]
window_begin = window_end - window_size
return window_historical_load(historical_load, window_begin, window_end)
@register_autoscaling_component("moving_average", FORECAST_POLICY_KEY)
def moving_average_forecast_policy(
historical_load, moving_average_window_seconds=1800, **kwargs
):
"""Does a simple average of all historical load data points within the moving average window. Weights all data
points within the window equally."""
windowed_data = trailing_window_historical_load(
historical_load, moving_average_window_seconds
)
windowed_values = [value for timestamp, value in windowed_data]
return sum(windowed_values) / len(windowed_values)
@register_autoscaling_component("linreg", FORECAST_POLICY_KEY)
def linreg_forecast_policy(
historical_load,
linreg_window_seconds,
linreg_extrapolation_seconds,
linreg_default_slope=0,
**kwargs,
):
"""Does a linear regression on the load data within the last linreg_window_seconds. For every time delta in
linreg_extrapolation_seconds, forecasts the value at that time delta from now, and returns the maximum of these
predicted values. (With linear extrapolation, it doesn't make sense to forecast at more than two points, as the max
load will always be at the first or last time delta.)
:param linreg_window_seconds: Consider all data from this many seconds ago until now.
:param linreg_extrapolation_seconds: A list of floats representing a number of seconds in the future at which to
predict the load. The highest prediction will be returned.
:param linreg_default_slope: If there is only one data point within the window, the equation for slope is undefined,
so we use this value (expressed in load/second) for prediction instead. Default is
0.
"""
window = trailing_window_historical_load(historical_load, linreg_window_seconds)
loads = [load for timestamp, load in window]
times = [timestamp for timestamp, load in window]
mean_time = sum(times) / len(times)
mean_load = sum(loads) / len(loads)
if len(window) > 1:
slope = sum((t - mean_time) * (l - mean_load) for t, l in window) / sum(
(t - mean_time) ** 2 for t in times
)
else:
slope = linreg_default_slope
intercept = mean_load - slope * mean_time
def predict(timestamp):
return slope * timestamp + intercept
if isinstance(linreg_extrapolation_seconds, (int, float)):
linreg_extrapolation_seconds = [linreg_extrapolation_seconds]
now, _ = historical_load[-1]
forecasted_values = [predict(now + delta) for delta in linreg_extrapolation_seconds]
return max(forecasted_values)
|
import enum
from gi.repository import Gio, GLib, GObject, Gtk
from meld.conf import _
from meld.melddoc import LabeledObjectMixin, MeldDoc
from meld.recent import recent_comparisons
from meld.ui.util import map_widgets_into_lists
class DiffType(enum.IntEnum):
# TODO: This should probably live in MeldWindow
Unselected = -1
File = 0
Folder = 1
Version = 2
def supports_blank(self):
return self in (self.File, self.Folder)
@Gtk.Template(resource_path='/org/gnome/meld/ui/new-diff-tab.ui')
class NewDiffTab(Gtk.Alignment, LabeledObjectMixin):
__gtype_name__ = "NewDiffTab"
__gsignals__ = {
'diff-created': (GObject.SignalFlags.RUN_FIRST, None, (object,)),
}
close_signal = MeldDoc.close_signal
label_changed_signal = LabeledObjectMixin.label_changed
label_text = _("New comparison")
button_compare = Gtk.Template.Child()
button_new_blank = Gtk.Template.Child()
button_type_dir = Gtk.Template.Child()
button_type_file = Gtk.Template.Child()
button_type_vc = Gtk.Template.Child()
choosers_notebook = Gtk.Template.Child()
dir_chooser0 = Gtk.Template.Child()
dir_chooser1 = Gtk.Template.Child()
dir_chooser2 = Gtk.Template.Child()
dir_three_way_checkbutton = Gtk.Template.Child()
file_chooser0 = Gtk.Template.Child()
file_chooser1 = Gtk.Template.Child()
file_chooser2 = Gtk.Template.Child()
file_three_way_checkbutton = Gtk.Template.Child()
filechooserdialog0 = Gtk.Template.Child()
filechooserdialog1 = Gtk.Template.Child()
filechooserdialog2 = Gtk.Template.Child()
vc_chooser0 = Gtk.Template.Child()
def __init__(self, parentapp):
super().__init__()
map_widgets_into_lists(
self,
["file_chooser", "dir_chooser", "vc_chooser", "filechooserdialog"]
)
self.button_types = [
self.button_type_file,
self.button_type_dir,
self.button_type_vc,
]
self.diff_methods = {
DiffType.File: parentapp.append_filediff,
DiffType.Folder: parentapp.append_dirdiff,
DiffType.Version: parentapp.append_vcview,
}
self.diff_type = DiffType.Unselected
default_path = GLib.get_home_dir()
for chooser in self.file_chooser:
chooser.set_current_folder(default_path)
self.show()
@Gtk.Template.Callback()
def on_button_type_toggled(self, button, *args):
if not button.get_active():
if not any([b.get_active() for b in self.button_types]):
button.set_active(True)
return
for b in self.button_types:
if b is not button:
b.set_active(False)
self.diff_type = DiffType(self.button_types.index(button))
self.choosers_notebook.set_current_page(self.diff_type + 1)
# FIXME: Add support for new blank for VcView
self.button_new_blank.set_sensitive(
self.diff_type.supports_blank())
self.button_compare.set_sensitive(True)
@Gtk.Template.Callback()
def on_three_way_checkbutton_toggled(self, button, *args):
if button is self.file_three_way_checkbutton:
self.file_chooser2.set_sensitive(button.get_active())
else: # button is self.dir_three_way_checkbutton
self.dir_chooser2.set_sensitive(button.get_active())
@Gtk.Template.Callback()
def on_file_set(self, filechooser, *args):
gfile = filechooser.get_file()
if not gfile:
return
parent = gfile.get_parent()
if not parent:
return
if parent.query_file_type(
Gio.FileQueryInfoFlags.NONE, None) == Gio.FileType.DIRECTORY:
for chooser in self.file_chooser:
if not chooser.get_file():
chooser.set_current_folder_file(parent)
# TODO: We could do checks here to prevent errors: check to see if
# we've got binary files; check for null file selections; sniff text
# encodings; check file permissions.
def _get_num_paths(self):
if self.diff_type in (DiffType.File, DiffType.Folder):
three_way_buttons = (
self.file_three_way_checkbutton,
self.dir_three_way_checkbutton,
)
three_way = three_way_buttons[self.diff_type].get_active()
num_paths = 3 if three_way else 2
else: # DiffType.Version
num_paths = 1
return num_paths
@Gtk.Template.Callback()
def on_button_compare_clicked(self, *args):
type_choosers = (self.file_chooser, self.dir_chooser, self.vc_chooser)
choosers = type_choosers[self.diff_type][:self._get_num_paths()]
compare_gfiles = [chooser.get_file() for chooser in choosers]
compare_kwargs = {}
if self.diff_type == DiffType.File:
chooserdialogs = self.filechooserdialog[:self._get_num_paths()]
encodings = [chooser.get_encoding() for chooser in chooserdialogs]
compare_kwargs = {'encodings': encodings}
tab = self.diff_methods[self.diff_type](
compare_gfiles, **compare_kwargs)
recent_comparisons.add(tab)
self.emit('diff-created', tab)
@Gtk.Template.Callback()
def on_button_new_blank_clicked(self, *args):
# TODO: This doesn't work the way I'd like for DirDiff and VCView.
# It should do something similar to FileDiff; give a tab with empty
# file entries and no comparison done.
# File comparison wants None for its paths here. Folder mode
# needs an actual directory.
if self.diff_type == DiffType.File:
gfiles = [None] * self._get_num_paths()
else:
gfiles = [Gio.File.new_for_path("")] * self._get_num_paths()
tab = self.diff_methods[self.diff_type](gfiles)
self.emit('diff-created', tab)
def on_container_switch_in_event(self, *args):
self.label_changed.emit(self.label_text, self.tooltip_text)
def on_container_switch_out_event(self, *args):
pass
def on_delete_event(self, *args):
self.close_signal.emit(0)
return Gtk.ResponseType.OK
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
from compare_gan.architectures import abstract_arch
from compare_gan.architectures import arch_ops as ops
from compare_gan.architectures import resnet_ops
import gin
from six.moves import range
import tensorflow as tf
@gin.configurable
class BigGanResNetBlock(resnet_ops.ResNetBlock):
"""ResNet block with options for various normalizations.
This block uses a 1x1 convolution for the (optional) shortcut connection.
"""
def __init__(self,
add_shortcut=True,
**kwargs):
"""Constructs a new ResNet block for BigGAN.
Args:
add_shortcut: Whether to add a shortcut connection.
**kwargs: Additional arguments for ResNetBlock.
"""
super(BigGanResNetBlock, self).__init__(**kwargs)
self._add_shortcut = add_shortcut
def apply(self, inputs, z, y, is_training):
""""ResNet block containing possible down/up sampling, shared for G / D.
Args:
inputs: a 3d input tensor of feature map.
z: the latent vector for potential self-modulation. Can be None if use_sbn
is set to False.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, whether or notthis is called during the training.
Returns:
output: a 3d output tensor of feature map.
"""
if inputs.shape[-1].value != self._in_channels:
raise ValueError(
"Unexpected number of input channels (expected {}, got {}).".format(
self._in_channels, inputs.shape[-1].value))
with tf.variable_scope(self._name, values=[inputs]):
outputs = inputs
outputs = self.batch_norm(
outputs, z=z, y=y, is_training=is_training, name="bn1")
if self._layer_norm:
outputs = ops.layer_norm(outputs, is_training=is_training, scope="ln1")
outputs = tf.nn.relu(outputs)
outputs = self._get_conv(
outputs, self._in_channels, self._out_channels, self._scale1,
suffix="conv1")
outputs = self.batch_norm(
outputs, z=z, y=y, is_training=is_training, name="bn2")
if self._layer_norm:
outputs = ops.layer_norm(outputs, is_training=is_training, scope="ln2")
outputs = tf.nn.relu(outputs)
outputs = self._get_conv(
outputs, self._out_channels, self._out_channels, self._scale2,
suffix="conv2")
# Combine skip-connection with the convolved part.
if self._add_shortcut:
shortcut = self._get_conv(
inputs, self._in_channels, self._out_channels, self._scale,
kernel_size=(1, 1),
suffix="conv_shortcut")
outputs += shortcut
logging.info("[Block] %s (z=%s, y=%s) -> %s", inputs.shape,
None if z is None else z.shape,
None if y is None else y.shape, outputs.shape)
return outputs
@gin.configurable
class Generator(abstract_arch.AbstractGenerator):
"""ResNet-based generator supporting resolutions 32, 64, 128, 256, 512."""
def __init__(self,
ch=96,
blocks_with_attention="B4",
hierarchical_z=True,
embed_z=False,
embed_y=True,
embed_y_dim=128,
embed_bias=False,
**kwargs):
"""Constructor for BigGAN generator.
Args:
ch: Channel multiplier.
blocks_with_attention: Comma-separated list of blocks that are followed by
a non-local block.
hierarchical_z: Split z into chunks and only give one chunk to each.
Each chunk will also be concatenated to y, the one hot encoded labels.
embed_z: If True use a learnable embedding of z that is used instead.
The embedding will have the length of z.
embed_y: If True use a learnable embedding of y that is used instead.
embed_y_dim: Size of the embedding of y.
embed_bias: Use bias with for the embedding of z and y.
**kwargs: additional arguments past on to ResNetGenerator.
"""
super(Generator, self).__init__(**kwargs)
self._ch = ch
self._blocks_with_attention = set(blocks_with_attention.split(","))
self._hierarchical_z = hierarchical_z
self._embed_z = embed_z
self._embed_y = embed_y
self._embed_y_dim = embed_y_dim
self._embed_bias = embed_bias
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["up", "none"]:
raise ValueError(
"Unknown generator ResNet block scaling: {}.".format(scale))
return BigGanResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
is_gen_block=True,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
def _get_in_out_channels(self):
resolution = self._image_shape[0]
if resolution == 512:
channel_multipliers = [16, 16, 8, 8, 4, 2, 1, 1]
elif resolution == 256:
channel_multipliers = [16, 16, 8, 8, 4, 2, 1]
elif resolution == 128:
channel_multipliers = [16, 16, 8, 4, 2, 1]
elif resolution == 64:
channel_multipliers = [16, 16, 8, 4, 2]
elif resolution == 32:
channel_multipliers = [4, 4, 4, 4]
else:
raise ValueError("Unsupported resolution: {}".format(resolution))
in_channels = [self._ch * c for c in channel_multipliers[:-1]]
out_channels = [self._ch * c for c in channel_multipliers[1:]]
return in_channels, out_channels
def apply(self, z, y, is_training):
"""Build the generator network for the given inputs.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, are we in train or eval model.
Returns:
A tensor of size [batch_size] + self._image_shape with values in [0, 1].
"""
shape_or_none = lambda t: None if t is None else t.shape
logging.info("[Generator] inputs are z=%s, y=%s", z.shape, shape_or_none(y))
# Each block upscales by a factor of 2.
seed_size = 4
z_dim = z.shape[1].value
in_channels, out_channels = self._get_in_out_channels()
num_blocks = len(in_channels)
if self._embed_z:
z = ops.linear(z, z_dim, scope="embed_z", use_sn=False,
use_bias=self._embed_bias)
if self._embed_y:
y = ops.linear(y, self._embed_y_dim, scope="embed_y", use_sn=False,
use_bias=self._embed_bias)
y_per_block = num_blocks * [y]
if self._hierarchical_z:
z_per_block = tf.split(z, num_blocks + 1, axis=1)
z0, z_per_block = z_per_block[0], z_per_block[1:]
if y is not None:
y_per_block = [tf.concat([zi, y], 1) for zi in z_per_block]
else:
z0 = z
z_per_block = num_blocks * [z]
logging.info("[Generator] z0=%s, z_per_block=%s, y_per_block=%s",
z0.shape, [str(shape_or_none(t)) for t in z_per_block],
[str(shape_or_none(t)) for t in y_per_block])
# Map noise to the actual seed.
net = ops.linear(
z0,
in_channels[0] * seed_size * seed_size,
scope="fc_noise",
use_sn=self._spectral_norm)
# Reshape the seed to be a rank-4 Tensor.
net = tf.reshape(
net,
[-1, seed_size, seed_size, in_channels[0]],
name="fc_reshaped")
for block_idx in range(num_blocks):
name = "B{}".format(block_idx + 1)
block = self._resnet_block(
name=name,
in_channels=in_channels[block_idx],
out_channels=out_channels[block_idx],
scale="up")
net = block(
net,
z=z_per_block[block_idx],
y=y_per_block[block_idx],
is_training=is_training)
if name in self._blocks_with_attention:
logging.info("[Generator] Applying non-local block to %s", net.shape)
net = ops.non_local_block(net, "non_local_block",
use_sn=self._spectral_norm)
# Final processing of the net.
# Use unconditional batch norm.
logging.info("[Generator] before final processing: %s", net.shape)
net = ops.batch_norm(net, is_training=is_training, name="final_norm")
net = tf.nn.relu(net)
net = ops.conv2d(net, output_dim=self._image_shape[2], k_h=3, k_w=3,
d_h=1, d_w=1, name="final_conv",
use_sn=self._spectral_norm)
logging.info("[Generator] after final processing: %s", net.shape)
net = (tf.nn.tanh(net) + 1.0) / 2.0
return net
@gin.configurable
class Discriminator(abstract_arch.AbstractDiscriminator):
"""ResNet-based discriminator supporting resolutions 32, 64, 128, 256, 512."""
def __init__(self,
ch=96,
blocks_with_attention="B1",
project_y=True,
**kwargs):
"""Constructor for BigGAN discriminator.
Args:
ch: Channel multiplier.
blocks_with_attention: Comma-separated list of blocks that are followed by
a non-local block.
project_y: Add an embedding of y in the output layer.
**kwargs: additional arguments past on to ResNetDiscriminator.
"""
super(Discriminator, self).__init__(**kwargs)
self._ch = ch
self._blocks_with_attention = set(blocks_with_attention.split(","))
self._project_y = project_y
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["down", "none"]:
raise ValueError(
"Unknown discriminator ResNet block scaling: {}.".format(scale))
return BigGanResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
is_gen_block=False,
add_shortcut=in_channels != out_channels,
layer_norm=self._layer_norm,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
def _get_in_out_channels(self, colors, resolution):
if colors not in [1, 3]:
raise ValueError("Unsupported color channels: {}".format(colors))
if resolution == 512:
channel_multipliers = [1, 1, 2, 4, 8, 8, 16, 16]
elif resolution == 256:
channel_multipliers = [1, 2, 4, 8, 8, 16, 16]
elif resolution == 128:
channel_multipliers = [1, 2, 4, 8, 16, 16]
elif resolution == 64:
channel_multipliers = [2, 4, 8, 16, 16]
elif resolution == 32:
channel_multipliers = [2, 2, 2, 2]
else:
raise ValueError("Unsupported resolution: {}".format(resolution))
out_channels = [self._ch * c for c in channel_multipliers]
in_channels = [colors] + out_channels[:-1]
return in_channels, out_channels
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
logging.info("[Discriminator] inputs are x=%s, y=%s", x.shape,
None if y is None else y.shape)
resnet_ops.validate_image_inputs(x)
in_channels, out_channels = self._get_in_out_channels(
colors=x.shape[-1].value, resolution=x.shape[1].value)
num_blocks = len(in_channels)
net = x
for block_idx in range(num_blocks):
name = "B{}".format(block_idx + 1)
is_last_block = block_idx == num_blocks - 1
block = self._resnet_block(
name=name,
in_channels=in_channels[block_idx],
out_channels=out_channels[block_idx],
scale="none" if is_last_block else "down")
net = block(net, z=None, y=y, is_training=is_training)
if name in self._blocks_with_attention:
logging.info("[Discriminator] Applying non-local block to %s",
net.shape)
net = ops.non_local_block(net, "non_local_block",
use_sn=self._spectral_norm)
# Final part
logging.info("[Discriminator] before final processing: %s", net.shape)
net = tf.nn.relu(net)
h = tf.math.reduce_sum(net, axis=[1, 2])
out_logit = ops.linear(h, 1, scope="final_fc", use_sn=self._spectral_norm)
logging.info("[Discriminator] after final processing: %s", net.shape)
if self._project_y:
if y is None:
raise ValueError("You must provide class information y to project.")
with tf.variable_scope("embedding_fc"):
y_embedding_dim = out_channels[-1]
# We do not use ops.linear() below since it does not have an option to
# override the initializer.
kernel = tf.get_variable(
"kernel", [y.shape[1], y_embedding_dim], tf.float32,
initializer=tf.initializers.glorot_normal())
if self._spectral_norm:
kernel = ops.spectral_norm(kernel)
embedded_y = tf.matmul(y, kernel)
logging.info("[Discriminator] embedded_y for projection: %s",
embedded_y.shape)
out_logit += tf.reduce_sum(embedded_y * h, axis=1, keepdims=True)
out = tf.nn.sigmoid(out_logit)
return out, out_logit, h
|
from datetime import timedelta
import logging
from apcaccess import status
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 3551
DOMAIN = "apcupsd"
KEY_STATUS = "STATFLAG"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
VALUE_ONLINE = 8
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Use config values to set up a function enabling status retrieval."""
conf = config[DOMAIN]
host = conf[CONF_HOST]
port = conf[CONF_PORT]
apcups_data = APCUPSdData(host, port)
hass.data[DOMAIN] = apcups_data
# It doesn't really matter why we're not able to get the status, just that
# we can't.
try:
apcups_data.update(no_throttle=True)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Failure while testing APCUPSd status retrieval")
return False
return True
class APCUPSdData:
"""Stores the data retrieved from APCUPSd.
For each entity to use, acts as the single point responsible for fetching
updates from the server.
"""
def __init__(self, host, port):
"""Initialize the data object."""
self._host = host
self._port = port
self._status = None
self._get = status.get
self._parse = status.parse
@property
def status(self):
"""Get latest update if throttle allows. Return status."""
self.update()
return self._status
def _get_status(self):
"""Get the status from APCUPSd and parse it into a dict."""
return self._parse(self._get(host=self._host, port=self._port))
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self, **kwargs):
"""Fetch the latest status from APCUPSd."""
self._status = self._get_status()
|
import logging
from aladdin_connect import AladdinConnectClient
import voluptuous as vol
from homeassistant.components.cover import (
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
CoverEntity,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
NOTIFICATION_ID = "aladdin_notification"
NOTIFICATION_TITLE = "Aladdin Connect Cover Setup"
STATES_MAP = {
"open": STATE_OPEN,
"opening": STATE_OPENING,
"closed": STATE_CLOSED,
"closing": STATE_CLOSING,
}
SUPPORTED_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Aladdin Connect platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
acc = AladdinConnectClient(username, password)
try:
if not acc.login():
raise ValueError("Username or Password is incorrect")
add_entities(AladdinDevice(acc, door) for door in acc.get_doors())
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class AladdinDevice(CoverEntity):
"""Representation of Aladdin Connect cover."""
def __init__(self, acc, device):
"""Initialize the cover."""
self._acc = acc
self._device_id = device["device_id"]
self._number = device["door_number"]
self._name = device["name"]
self._status = STATES_MAP.get(device["status"])
@property
def device_class(self):
"""Define this cover as a garage door."""
return "garage"
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._device_id}-{self._number}"
@property
def name(self):
"""Return the name of the garage door."""
return self._name
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._status == STATE_OPENING
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._status == STATE_CLOSING
@property
def is_closed(self):
"""Return None if status is unknown, True if closed, else False."""
if self._status is None:
return None
return self._status == STATE_CLOSED
def close_cover(self, **kwargs):
"""Issue close command to cover."""
self._acc.close_door(self._device_id, self._number)
def open_cover(self, **kwargs):
"""Issue open command to cover."""
self._acc.open_door(self._device_id, self._number)
def update(self):
"""Update status of cover."""
acc_status = self._acc.get_door_status(self._device_id, self._number)
self._status = STATES_MAP.get(acc_status)
|
import os
import sys
from .list_mount_points import os_mount_points
from .trash import version, home_trash_dir, volume_trash_dir1, volume_trash_dir2
from .fstab import volume_of
from .fs import contents_of, list_files_in_dir
from .trash import backup_file_path_from
from . import fs, trash
try:
my_range = xrange
except NameError:
my_range = range
class Sequences:
def __init__(self, sequences):
self.sequences = sequences
def __repr__(self):
return "Sequences(%s)" % repr(self.sequences)
def all_indexes(self):
for sequence in self.sequences:
for index in sequence:
yield index
def __eq__(self, other):
if type(other) != type(self):
return False
if self.sequences != other.sequences:
return False
return True
class Single:
def __init__(self, index):
self.index = index
def __eq__(self, other):
if type(other) != type(self):
return False
if self.index != other.index:
return False
return True
def __iter__(self):
return iter([self.index])
def __repr__(self):
return "Single(%s)" % self.index
class Range:
def __init__(self, start, stop):
self.start = start
self.stop = stop
def __eq__(self, other):
if type(other) != type(self):
return False
if self.start != other.start:
return False
if self.stop != other.stop:
return False
return True
def __iter__(self):
return iter(my_range(self.start, self.stop + 1))
def __repr__(self):
return "Range(%s, %s)" % (self.start, self.stop)
class FileSystem:
def path_exists(self, path):
return os.path.exists(path)
def mkdirs(self, path):
return fs.mkdirs(path)
def move(self, path, dest):
return fs.move(path, dest)
def remove_file(self, path):
return fs.remove_file(path)
def main():
try:
input23 = raw_input # Python 2
except NameError:
input23 = input # Python 3
trash_directories = make_trash_directories()
trashed_files = TrashedFiles(trash_directories, TrashDirectory(),
contents_of)
RestoreCmd(
stdout = sys.stdout,
stderr = sys.stderr,
exit = sys.exit,
input = input23,
trashed_files=trashed_files,
mount_points=os_mount_points,
fs=FileSystem()
).run(sys.argv)
def getcwd_as_realpath():
return os.path.realpath(os.curdir)
class Command:
PrintVersion = "Command.PrintVersion"
RunRestore = "Command.RunRestore"
def parse_args(sys_argv, curdir):
import argparse
parser = argparse.ArgumentParser(
description='Restores from trash chosen file',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('path',
default=curdir, nargs='?',
help='Restore files from given path instead of current '
'directory')
parser.add_argument('--sort',
choices=['date', 'path', 'none'],
default='date',
help='Sort list of restore candidates by given field')
parser.add_argument('--trash-dir',
action='store',
dest='trash_dir',
help=argparse.SUPPRESS)
parser.add_argument('--version', action='store_true', default=False)
parsed = parser.parse_args(sys_argv[1:])
if parsed.version:
return Command.PrintVersion, None
else:
return Command.RunRestore, {'path': parsed.path,
'sort': parsed.sort,
'trash_dir': parsed.trash_dir}
class TrashedFiles:
def __init__(self, trash_directories, trash_directory, contents_of):
self.trash_directories = trash_directories
self.trash_directory = trash_directory
self.contents_of = contents_of
def all_trashed_files(self, volumes, trash_dir_from_cli):
logger = trash.logger
for path, volume in self.trash_directories.trash_directories_or_user(
volumes, trash_dir_from_cli):
for type, info_file in self.trash_directory.all_info_files(path):
if type == 'non_trashinfo':
logger.warning("Non .trashinfo file in info dir")
elif type == 'trashinfo':
try:
trash_info = TrashInfoParser(self.contents_of(info_file),
volume)
original_location = trash_info.original_location()
deletion_date = trash_info.deletion_date()
backup_file_path = backup_file_path_from(info_file)
trashedfile = TrashedFile(original_location,
deletion_date,
info_file,
backup_file_path)
yield trashedfile
except ValueError:
logger.warning("Non parsable trashinfo file: %s" % info_file)
except IOError as e:
logger.warning(str(e))
else:
logger.error("Unexpected file type: %s: %s",
type, info_file)
class RestoreAskingTheUser(object):
def __init__(self, input, println, restore, die):
self.input = input
self.println = println
self.restore = restore
self.die = die
def restore_asking_the_user(self, trashed_files):
try:
user_input = self.input("What file to restore [0..%d]: " % (len(trashed_files) - 1))
except KeyboardInterrupt:
return self.die("")
if user_input == "":
self.println("Exiting")
else:
try:
sequences = parse_indexes(user_input, len(trashed_files))
except InvalidEntry as e:
self.die("Invalid entry: %s" % e)
else:
try:
for index in sequences.all_indexes():
trashed_file = trashed_files[index]
self.restore(trashed_file)
except IOError as e:
self.die(e)
class InvalidEntry(Exception):
pass
def parse_indexes(user_input, len_trashed_files):
indexes = user_input.split(',')
sequences = []
for index in indexes:
if "-" in index:
first, last = index.split("-", 2)
if first == "" or last == "":
raise InvalidEntry("open interval: %s" % index)
split = list(map(parse_int_index, (first, last)))
sequences.append(Range(split[0], split[1]))
else:
index = parse_int_index(index)
sequences.append(Single(index))
result = Sequences(sequences)
acceptable_values = my_range(0, len_trashed_files)
for index in result.all_indexes():
if not index in acceptable_values:
raise InvalidEntry(
"out of range %s..%s: %s" %
(acceptable_values[0], acceptable_values[-1], index))
return result
def parse_int_index(text):
try:
return int(text)
except ValueError:
raise InvalidEntry("not an index: %s" % text)
class Restorer(object):
def __init__(self, fs):
self.fs = fs
def restore_trashed_file(self, trashed_file):
restore(trashed_file, self.fs)
class RestoreCmd(object):
def __init__(self, stdout, stderr, exit, input,
curdir = getcwd_as_realpath, version = version,
trashed_files=None, mount_points=None, fs=None):
self.out = stdout
self.err = stderr
self.exit = exit
self.input = input
self.curdir = curdir
self.version = version
self.fs = fs
self.trashed_files = trashed_files
self.mount_points = mount_points
def run(self, argv):
cmd, args = parse_args(argv, self.curdir() + os.path.sep)
if cmd == Command.PrintVersion:
command = os.path.basename(argv[0])
self.println('%s %s' % (command, self.version))
return
elif cmd == Command.RunRestore:
trash_dir_from_cli = args['trash_dir']
trashed_files = list(self.all_files_trashed_from_path(
args['path'], trash_dir_from_cli))
if args['sort'] == 'path':
trashed_files = sorted(trashed_files, key=lambda x: x.original_location + str(x.deletion_date))
elif args['sort'] == 'date':
trashed_files = sorted(trashed_files, key=lambda x: x.deletion_date)
self.handle_trashed_files(trashed_files)
def handle_trashed_files(self,trashed_files):
if not trashed_files:
self.report_no_files_found()
else :
for i, trashedfile in enumerate(trashed_files):
self.println("%4d %s %s" % (i, trashedfile.deletion_date, trashedfile.original_location))
self.restore_asking_the_user(trashed_files)
def restore_asking_the_user(self, trashed_files):
restore_asking_the_user = RestoreAskingTheUser(self.input,
self.println,
self.restore,
self.die)
restore_asking_the_user.restore_asking_the_user(trashed_files)
def die(self, error):
self.printerr(error)
self.exit(1)
def restore(self, trashed_file):
restorer = Restorer(self.fs)
restorer.restore_trashed_file(trashed_file)
def all_files_trashed_from_path(self, path, trash_dir_from_cli):
def is_trashed_from_curdir(trashed_file):
return trashed_file.original_location.startswith(path)
for trashed_file in self.trashed_files.all_trashed_files(
self.mount_points(), trash_dir_from_cli):
if is_trashed_from_curdir(trashed_file):
yield trashed_file
def report_no_files_found(self):
self.println("No files trashed from current dir ('%s')" % self.curdir())
def println(self, line):
self.out.write(line + '\n')
def printerr(self, msg):
self.err.write('%s\n' % msg)
def parse_additional_volumes(volume_from_args):
if not volume_from_args:
return []
return volume_from_args
from .trash import parse_path
from .trash import parse_deletion_date
class TrashInfoParser:
def __init__(self, contents, volume_path):
self.contents = contents
self.volume_path = volume_path
def deletion_date(self):
return parse_deletion_date(self.contents)
def original_location(self):
path = parse_path(self.contents)
return os.path.join(self.volume_path, path)
class TrashDirectories2:
def __init__(self, volume_of, trash_directories):
self.volume_of = volume_of
self.trash_directories = trash_directories
def trash_directories_or_user(self, volumes, trash_dir_from_cli):
if trash_dir_from_cli:
return [(trash_dir_from_cli, self.volume_of(trash_dir_from_cli))]
return self.trash_directories.all_trash_directories(volumes)
def make_trash_directories():
trash_directories = TrashDirectories(volume_of, os.getuid, os.environ)
return TrashDirectories2(volume_of, trash_directories)
class TrashDirectories:
def __init__(self, volume_of, getuid, environ):
self.volume_of = volume_of
self.getuid = getuid
self.environ = environ
def all_trash_directories(self, volumes):
for path1, volume1 in home_trash_dir(self.environ, self.volume_of):
yield path1, volume1
for volume in volumes:
for path1, volume1 in volume_trash_dir1(volume, self.getuid):
yield path1, volume1
for path1, volume1 in volume_trash_dir2(volume, self.getuid):
yield path1, volume1
class TrashedFile:
"""
Represent a trashed file.
Each trashed file is persisted in two files:
- $trash_dir/info/$id.trashinfo
- $trash_dir/files/$id
Properties:
- path : the original path from where the file has been trashed
- deletion_date : the time when the file has been trashed (instance of
datetime)
- info_file : the file that contains information (instance of Path)
- original_file : the path where the trashed file has been placed after the
trash opeartion (instance of Path)
"""
def __init__(self, original_location,
deletion_date,
info_file,
original_file):
self.original_location = original_location
self.deletion_date = deletion_date
self.info_file = info_file
self.original_file = original_file
def restore(trashed_file, fs):
if fs.path_exists(trashed_file.original_location):
raise IOError('Refusing to overwrite existing file "%s".' % os.path.basename(trashed_file.original_location))
else:
parent = os.path.dirname(trashed_file.original_location)
fs.mkdirs(parent)
fs.move(trashed_file.original_file, trashed_file.original_location)
fs.remove_file(trashed_file.info_file)
class TrashDirectory:
def all_info_files(self, path) :
norm_path = os.path.normpath(path)
info_dir = os.path.join(norm_path, 'info')
try :
for info_file in list_files_in_dir(info_dir):
if not os.path.basename(info_file).endswith('.trashinfo') :
yield ('non_trashinfo', info_file)
else :
yield ('trashinfo', info_file)
except OSError: # when directory does not exist
pass
|
import diamond.collector
try:
import ldap
except ImportError:
ldap = None
class OpenLDAPCollector(diamond.collector.Collector):
STATS = {
'conns.total': {
'base': 'cn=Total,cn=Connections,cn=Monitor',
'attr': 'monitorCounter'},
'conns.current': {
'base': 'cn=Current,cn=Connections,cn=Monitor',
'attr': 'monitorCounter'},
'ops.total': {
'base': 'cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.total_initiated': {
'base': 'cn=Operations,cn=Monitor',
'attr': 'monitorOpInitiated'},
'ops.bind': {
'base': 'cn=Bind,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.unbind': {
'base': 'cn=Unbind,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.delete': {
'base': 'cn=Delete,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.modify': {
'base': 'cn=Modify,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.modrdn': {
'base': 'cn=Modrdn,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.compare': {
'base': 'cn=Compare,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.search': {
'base': 'cn=Search,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.extended': {
'base': 'cn=Extended,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'ops.abandon': {
'base': 'cn=Abandon,cn=Operations,cn=Monitor',
'attr': 'monitorOpCompleted'},
'waiter.read': {
'base': 'cn=Read,cn=Waiters,cn=Monitor',
'attr': 'monitorCounter'},
'waiter.write': {
'base': 'cn=Write,cn=Waiters,cn=Monitor',
'attr': 'monitorCounter'},
'stats.bytes': {
'base': 'cn=Bytes,cn=Statistics,cn=Monitor',
'attr': 'monitorCounter'},
'stats.pdu': {
'base': 'cn=PDU,cn=Statistics,cn=Monitor',
'attr': 'monitorCounter'},
'stats.referrals': {
'base': 'cn=Referrals,cn=Statistics,cn=Monitor',
'attr': 'monitorCounter'},
'stats.entries': {
'base': 'cn=Entries,cn=Statistics,cn=Monitor',
'attr': 'monitorCounter'},
'threads.open': {
'base': 'cn=Open,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.starting': {
'base': 'cn=Starting,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.active': {
'base': 'cn=Active,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.max': {
'base': 'cn=Max,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.max_pending': {
'base': 'cn=Max Pending,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.pending': {
'base': 'cn=Pending,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
'threads.backload': {
'base': 'cn=Backload,cn=Threads,cn=Monitor',
'attr': 'monitoredInfo'},
}
def get_default_config_help(self):
config_help = super(OpenLDAPCollector, self).get_default_config_help()
config_help.update({
'host': 'Hostname to collect from',
'port': 'Port number to collect from',
'username': 'DN of user we connect with',
'password': 'Password of user we connect with',
})
return config_help
def get_default_config(self):
"""
Return default config
:rtype: dict
"""
config = super(OpenLDAPCollector, self).get_default_config()
config.update({
'path': 'openldap',
'host': 'localhost',
'port': 389,
'username': 'cn=monitor',
'password': 'password',
})
return config
def get_datapoints(self, ldap_url, username, password):
datapoints = {}
conn = ldap.initialize(ldap_url)
conn.start_tls_s()
conn.simple_bind_s(username, password)
try:
for key in self.STATS.keys():
base = self.STATS[key]['base']
attr = self.STATS[key]['attr']
num = conn.search(base, ldap.SCOPE_BASE,
'objectClass=*', [attr])
result_type, result_data = conn.result(num, 0)
datapoints[key] = int(result_data[0][1][attr][0])
except:
self.log.warn('Unable to query ldap base=%s, attr=%s'
% (base, attr))
raise
return datapoints
def collect(self):
if ldap is None:
self.log.error('Unable to import module ldap')
return {}
ldap_url = 'ldap://%s:%d' % (self.config['host'],
int(self.config['port']))
try:
datapoints = self.get_datapoints(ldap_url,
self.config['username'],
self.config['password'])
except Exception as e:
self.log.error('Unable to query %s: %s' % (ldap_url, e))
return {}
for name, value in datapoints.items():
self.publish(name, value)
|
from typing import Union, Text
from tensornetwork.backends.tensorflow import tensorflow_backend
from tensornetwork.backends.numpy import numpy_backend
from tensornetwork.backends.jax import jax_backend
from tensornetwork.backends.pytorch import pytorch_backend
from tensornetwork.backends.symmetric import symmetric_backend
from tensornetwork.backends import abstract_backend
_BACKENDS = {
"tensorflow": tensorflow_backend.TensorFlowBackend,
"numpy": numpy_backend.NumPyBackend,
"jax": jax_backend.JaxBackend,
"pytorch": pytorch_backend.PyTorchBackend,
"symmetric": symmetric_backend.SymmetricBackend
}
#we instantiate each backend only once and store it here
_INSTANTIATED_BACKENDS = dict()
def get_backend(
backend: Union[Text, abstract_backend.AbstractBackend]
) -> abstract_backend.AbstractBackend:
if isinstance(backend, abstract_backend.AbstractBackend):
return backend
if backend not in _BACKENDS:
raise ValueError("Backend '{}' does not exist".format(backend))
if backend in _INSTANTIATED_BACKENDS:
return _INSTANTIATED_BACKENDS[backend]
_INSTANTIATED_BACKENDS[backend] = _BACKENDS[backend]()
return _INSTANTIATED_BACKENDS[backend]
|
from datetime import datetime, timedelta
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, HTTP_OK, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_RESOURCE = "https://data.dublinked.ie/cgi-bin/rtpi/realtimebusinformation"
ATTR_STOP_ID = "Stop ID"
ATTR_ROUTE = "Route"
ATTR_DUE_IN = "Due in"
ATTR_DUE_AT = "Due at"
ATTR_NEXT_UP = "Later Bus"
ATTRIBUTION = "Data provided by data.dublinked.ie"
CONF_STOP_ID = "stopid"
CONF_ROUTE = "route"
DEFAULT_NAME = "Next Bus"
ICON = "mdi:bus"
SCAN_INTERVAL = timedelta(minutes=1)
TIME_STR_FORMAT = "%H:%M"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ROUTE, default=""): cv.string,
}
)
def due_in_minutes(timestamp):
"""Get the time in minutes from a timestamp.
The timestamp should be in the format day/month/year hour/minute/second
"""
diff = datetime.strptime(timestamp, "%d/%m/%Y %H:%M:%S") - dt_util.now().replace(
tzinfo=None
)
return str(int(diff.total_seconds() / 60))
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dublin public transport sensor."""
name = config[CONF_NAME]
stop = config[CONF_STOP_ID]
route = config[CONF_ROUTE]
data = PublicTransportData(stop, route)
add_entities([DublinPublicTransportSensor(data, stop, route, name)], True)
class DublinPublicTransportSensor(Entity):
"""Implementation of an Dublin public transport sensor."""
def __init__(self, data, stop, route, name):
"""Initialize the sensor."""
self.data = data
self._name = name
self._stop = stop
self._route = route
self._times = self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._times is not None:
next_up = "None"
if len(self._times) > 1:
next_up = f"{self._times[1][ATTR_ROUTE]} in "
next_up += self._times[1][ATTR_DUE_IN]
return {
ATTR_DUE_IN: self._times[0][ATTR_DUE_IN],
ATTR_DUE_AT: self._times[0][ATTR_DUE_AT],
ATTR_STOP_ID: self._stop,
ATTR_ROUTE: self._times[0][ATTR_ROUTE],
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_NEXT_UP: next_up,
}
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_MINUTES
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data from opendata.ch and update the states."""
self.data.update()
self._times = self.data.info
try:
self._state = self._times[0][ATTR_DUE_IN]
except TypeError:
pass
class PublicTransportData:
"""The Class for handling the data retrieval."""
def __init__(self, stop, route):
"""Initialize the data object."""
self.stop = stop
self.route = route
self.info = [{ATTR_DUE_AT: "n/a", ATTR_ROUTE: self.route, ATTR_DUE_IN: "n/a"}]
def update(self):
"""Get the latest data from opendata.ch."""
params = {}
params["stopid"] = self.stop
if self.route:
params["routeid"] = self.route
params["maxresults"] = 2
params["format"] = "json"
response = requests.get(_RESOURCE, params, timeout=10)
if response.status_code != HTTP_OK:
self.info = [
{ATTR_DUE_AT: "n/a", ATTR_ROUTE: self.route, ATTR_DUE_IN: "n/a"}
]
return
result = response.json()
if str(result["errorcode"]) != "0":
self.info = [
{ATTR_DUE_AT: "n/a", ATTR_ROUTE: self.route, ATTR_DUE_IN: "n/a"}
]
return
self.info = []
for item in result["results"]:
due_at = item.get("departuredatetime")
route = item.get("route")
if due_at is not None and route is not None:
bus_data = {
ATTR_DUE_AT: due_at,
ATTR_ROUTE: route,
ATTR_DUE_IN: due_in_minutes(due_at),
}
self.info.append(bus_data)
if not self.info:
self.info = [
{ATTR_DUE_AT: "n/a", ATTR_ROUTE: self.route, ATTR_DUE_IN: "n/a"}
]
|
from collections import defaultdict
import json
import logging
import re
from aiohttp.web import json_response
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import mqtt
from homeassistant.const import (
ATTR_GPS_ACCURACY,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_WEBHOOK_ID,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.setup import async_when_setup
from .config_flow import CONF_SECRET
from .const import DOMAIN
from .messages import async_handle_message, encrypt_message
_LOGGER = logging.getLogger(__name__)
CONF_MAX_GPS_ACCURACY = "max_gps_accuracy"
CONF_WAYPOINT_IMPORT = "waypoints"
CONF_WAYPOINT_WHITELIST = "waypoint_whitelist"
CONF_MQTT_TOPIC = "mqtt_topic"
CONF_REGION_MAPPING = "region_mapping"
CONF_EVENTS_ONLY = "events_only"
BEACON_DEV_ID = "beacon"
DEFAULT_OWNTRACKS_TOPIC = "owntracks/#"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN, default={}): {
vol.Optional(CONF_MAX_GPS_ACCURACY): vol.Coerce(float),
vol.Optional(CONF_WAYPOINT_IMPORT, default=True): cv.boolean,
vol.Optional(CONF_EVENTS_ONLY, default=False): cv.boolean,
vol.Optional(
CONF_MQTT_TOPIC, default=DEFAULT_OWNTRACKS_TOPIC
): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WAYPOINT_WHITELIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SECRET): vol.Any(
vol.Schema({vol.Optional(cv.string): cv.string}), cv.string
),
vol.Optional(CONF_REGION_MAPPING, default={}): dict,
vol.Optional(CONF_WEBHOOK_ID): cv.string,
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize OwnTracks component."""
hass.data[DOMAIN] = {"config": config[DOMAIN], "devices": {}, "unsub": None}
if not hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up OwnTracks entry."""
config = hass.data[DOMAIN]["config"]
max_gps_accuracy = config.get(CONF_MAX_GPS_ACCURACY)
waypoint_import = config.get(CONF_WAYPOINT_IMPORT)
waypoint_whitelist = config.get(CONF_WAYPOINT_WHITELIST)
secret = config.get(CONF_SECRET) or entry.data[CONF_SECRET]
region_mapping = config.get(CONF_REGION_MAPPING)
events_only = config.get(CONF_EVENTS_ONLY)
mqtt_topic = config.get(CONF_MQTT_TOPIC)
context = OwnTracksContext(
hass,
secret,
max_gps_accuracy,
waypoint_import,
waypoint_whitelist,
region_mapping,
events_only,
mqtt_topic,
)
webhook_id = config.get(CONF_WEBHOOK_ID) or entry.data[CONF_WEBHOOK_ID]
hass.data[DOMAIN]["context"] = context
async_when_setup(hass, "mqtt", async_connect_mqtt)
hass.components.webhook.async_register(
DOMAIN, "OwnTracks", webhook_id, handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "device_tracker")
)
hass.data[DOMAIN]["unsub"] = hass.helpers.dispatcher.async_dispatcher_connect(
DOMAIN, async_handle_message
)
return True
async def async_unload_entry(hass, entry):
"""Unload an OwnTracks config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
await hass.config_entries.async_forward_entry_unload(entry, "device_tracker")
hass.data[DOMAIN]["unsub"]()
return True
async def async_remove_entry(hass, entry):
"""Remove an OwnTracks config entry."""
if not entry.data.get("cloudhook"):
return
await hass.components.cloud.async_delete_cloudhook(entry.data[CONF_WEBHOOK_ID])
async def async_connect_mqtt(hass, component):
"""Subscribe to MQTT topic."""
context = hass.data[DOMAIN]["context"]
async def async_handle_mqtt_message(msg):
"""Handle incoming OwnTracks message."""
try:
message = json.loads(msg.payload)
except ValueError:
# If invalid JSON
_LOGGER.error("Unable to parse payload as JSON: %s", msg.payload)
return
message["topic"] = msg.topic
hass.helpers.dispatcher.async_dispatcher_send(DOMAIN, hass, context, message)
await hass.components.mqtt.async_subscribe(
context.mqtt_topic, async_handle_mqtt_message, 1
)
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle webhook callback.
iOS sets the "topic" as part of the payload.
Android does not set a topic but adds headers to the request.
"""
context = hass.data[DOMAIN]["context"]
topic_base = re.sub("/#$", "", context.mqtt_topic)
try:
message = await request.json()
except ValueError:
_LOGGER.warning("Received invalid JSON from OwnTracks")
return json_response([])
# Android doesn't populate topic
if "topic" not in message:
headers = request.headers
user = headers.get("X-Limit-U")
device = headers.get("X-Limit-D", user)
if user:
message["topic"] = f"{topic_base}/{user}/{device}"
elif message["_type"] != "encrypted":
_LOGGER.warning(
"No topic or user found in message. If on Android,"
" set a username in Connection -> Identification"
)
# Keep it as a 200 response so the incorrect packet is discarded
return json_response([])
hass.helpers.dispatcher.async_dispatcher_send(DOMAIN, hass, context, message)
response = []
for person in hass.states.async_all("person"):
if "latitude" in person.attributes and "longitude" in person.attributes:
response.append(
{
"_type": "location",
"lat": person.attributes["latitude"],
"lon": person.attributes["longitude"],
"tid": "".join(p[0] for p in person.name.split(" ")[:2]),
"tst": int(person.last_updated.timestamp()),
}
)
if message["_type"] == "encrypted" and context.secret:
return json_response(
{
"_type": "encrypted",
"data": encrypt_message(
context.secret, message["topic"], json.dumps(response)
),
}
)
return json_response(response)
class OwnTracksContext:
"""Hold the current OwnTracks context."""
def __init__(
self,
hass,
secret,
max_gps_accuracy,
import_waypoints,
waypoint_whitelist,
region_mapping,
events_only,
mqtt_topic,
):
"""Initialize an OwnTracks context."""
self.hass = hass
self.secret = secret
self.max_gps_accuracy = max_gps_accuracy
self.mobile_beacons_active = defaultdict(set)
self.regions_entered = defaultdict(list)
self.import_waypoints = import_waypoints
self.waypoint_whitelist = waypoint_whitelist
self.region_mapping = region_mapping
self.events_only = events_only
self.mqtt_topic = mqtt_topic
self._pending_msg = []
@callback
def async_valid_accuracy(self, message):
"""Check if we should ignore this message."""
acc = message.get("acc")
if acc is None:
return False
try:
acc = float(acc)
except ValueError:
return False
if acc == 0:
_LOGGER.warning(
"Ignoring %s update because GPS accuracy is zero: %s",
message["_type"],
message,
)
return False
if self.max_gps_accuracy is not None and acc > self.max_gps_accuracy:
_LOGGER.info(
"Ignoring %s update because expected GPS accuracy %s is not met: %s",
message["_type"],
self.max_gps_accuracy,
message,
)
return False
return True
@callback
def set_async_see(self, func):
"""Set a new async_see function."""
self.async_see = func
for msg in self._pending_msg:
func(**msg)
self._pending_msg.clear()
# pylint: disable=method-hidden
@callback
def async_see(self, **data):
"""Send a see message to the device tracker."""
self._pending_msg.append(data)
@callback
def async_see_beacons(self, hass, dev_id, kwargs_param):
"""Set active beacons to the current location."""
kwargs = kwargs_param.copy()
# Mobile beacons should always be set to the location of the
# tracking device. I get the device state and make the necessary
# changes to kwargs.
device_tracker_state = hass.states.get(f"device_tracker.{dev_id}")
if device_tracker_state is not None:
acc = device_tracker_state.attributes.get(ATTR_GPS_ACCURACY)
lat = device_tracker_state.attributes.get(ATTR_LATITUDE)
lon = device_tracker_state.attributes.get(ATTR_LONGITUDE)
if lat is not None and lon is not None:
kwargs["gps"] = (lat, lon)
kwargs["gps_accuracy"] = acc
else:
kwargs["gps"] = None
kwargs["gps_accuracy"] = None
# the battery state applies to the tracking device, not the beacon
# kwargs location is the beacon's configured lat/lon
kwargs.pop("battery", None)
for beacon in self.mobile_beacons_active[dev_id]:
kwargs["dev_id"] = f"{BEACON_DEV_ID}_{beacon}"
kwargs["host_name"] = beacon
self.async_see(**kwargs)
|
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.components.device_tracker.const import SOURCE_TYPE_GPS
from homeassistant.helpers.restore_state import RestoreEntity
from .account import StarlineAccount, StarlineDevice
from .const import DOMAIN
from .entity import StarlineEntity
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up StarLine entry."""
account: StarlineAccount = hass.data[DOMAIN][entry.entry_id]
entities = []
for device in account.api.devices.values():
if device.support_position:
entities.append(StarlineDeviceTracker(account, device))
async_add_entities(entities)
class StarlineDeviceTracker(StarlineEntity, TrackerEntity, RestoreEntity):
"""StarLine device tracker."""
def __init__(self, account: StarlineAccount, device: StarlineDevice):
"""Set up StarLine entity."""
super().__init__(account, device, "location", "Location")
@property
def device_state_attributes(self):
"""Return device specific attributes."""
return self._account.gps_attrs(self._device)
@property
def battery_level(self):
"""Return the battery level of the device."""
return self._device.battery_level
@property
def location_accuracy(self):
"""Return the gps accuracy of the device."""
return self._device.position["r"] if "r" in self._device.position else 0
@property
def latitude(self):
"""Return latitude value of the device."""
return self._device.position["x"]
@property
def longitude(self):
"""Return longitude value of the device."""
return self._device.position["y"]
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return "mdi:map-marker-outline"
|
from smart_meter_texas import Meter
from homeassistant.const import CONF_ADDRESS, ENERGY_KILO_WATT_HOUR
from homeassistant.core import callback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import (
DATA_COORDINATOR,
DATA_SMART_METER,
DOMAIN,
ELECTRIC_METER,
ESIID,
METER_NUMBER,
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Smart Meter Texas sensors."""
coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR]
meters = hass.data[DOMAIN][config_entry.entry_id][DATA_SMART_METER].meters
async_add_entities(
[SmartMeterTexasSensor(meter, coordinator) for meter in meters], False
)
class SmartMeterTexasSensor(CoordinatorEntity, RestoreEntity):
"""Representation of an Smart Meter Texas sensor."""
def __init__(self, meter: Meter, coordinator: DataUpdateCoordinator):
"""Initialize the sensor."""
super().__init__(coordinator)
self.meter = meter
self._state = None
self._available = False
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ENERGY_KILO_WATT_HOUR
@property
def name(self):
"""Device Name."""
return f"{ELECTRIC_METER} {self.meter.meter}"
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.meter.esiid}_{self.meter.meter}"
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def state(self):
"""Get the latest reading."""
return self._state
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attributes = {
METER_NUMBER: self.meter.meter,
ESIID: self.meter.esiid,
CONF_ADDRESS: self.meter.address,
}
return attributes
@callback
def _state_update(self):
"""Call when the coordinator has an update."""
self._available = self.coordinator.last_update_success
if self._available:
self._state = self.meter.reading
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self.async_on_remove(self.coordinator.async_add_listener(self._state_update))
# If the background update finished before
# we added the entity, there is no need to restore
# state.
if self.coordinator.last_update_success:
return
last_state = await self.async_get_last_state()
if last_state:
self._state = last_state.state
self._available = True
|
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.owntracks import config_flow
from homeassistant.components.owntracks.config_flow import CONF_CLOUDHOOK, CONF_SECRET
from homeassistant.components.owntracks.const import DOMAIN
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import MockConfigEntry
CONF_WEBHOOK_URL = "webhook_url"
BASE_URL = "http://example.com"
CLOUDHOOK = False
SECRET = "test-secret"
WEBHOOK_ID = "webhook_id"
WEBHOOK_URL = f"{BASE_URL}/api/webhook/webhook_id"
@pytest.fixture(name="webhook_id")
def mock_webhook_id():
"""Mock webhook_id."""
with patch(
"homeassistant.components.webhook.async_generate_id", return_value=WEBHOOK_ID
):
yield
@pytest.fixture(name="secret")
def mock_secret():
"""Mock secret."""
with patch("secrets.token_hex", return_value=SECRET):
yield
@pytest.fixture(name="not_supports_encryption")
def mock_not_supports_encryption():
"""Mock non successful nacl import."""
with patch(
"homeassistant.components.owntracks.config_flow.supports_encryption",
return_value=False,
):
yield
async def init_config_flow(hass):
"""Init a configuration flow."""
await async_process_ha_core_config(
hass,
{"external_url": BASE_URL},
)
flow = config_flow.OwnTracksFlow()
flow.hass = hass
return flow
async def test_user(hass, webhook_id, secret):
"""Test user step."""
flow = await init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user({})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "OwnTracks"
assert result["data"][CONF_WEBHOOK_ID] == WEBHOOK_ID
assert result["data"][CONF_SECRET] == SECRET
assert result["data"][CONF_CLOUDHOOK] == CLOUDHOOK
assert result["description_placeholders"][CONF_WEBHOOK_URL] == WEBHOOK_URL
async def test_import(hass, webhook_id, secret):
"""Test import step."""
flow = await init_config_flow(hass)
result = await flow.async_step_import({})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "OwnTracks"
assert result["data"][CONF_WEBHOOK_ID] == WEBHOOK_ID
assert result["data"][CONF_SECRET] == SECRET
assert result["data"][CONF_CLOUDHOOK] == CLOUDHOOK
assert result["description_placeholders"] is None
async def test_import_setup(hass):
"""Test that we automatically create a config flow."""
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com"},
)
assert not hass.config_entries.async_entries(DOMAIN)
assert await async_setup_component(hass, DOMAIN, {"owntracks": {}})
await hass.async_block_till_done()
assert hass.config_entries.async_entries(DOMAIN)
async def test_abort_if_already_setup(hass):
"""Test that we can't add more than one instance."""
flow = await init_config_flow(hass)
MockConfigEntry(domain=DOMAIN, data={}).add_to_hass(hass)
assert hass.config_entries.async_entries(DOMAIN)
# Should fail, already setup (import)
result = await flow.async_step_import({})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
# Should fail, already setup (flow)
result = await flow.async_step_user({})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_user_not_supports_encryption(hass, not_supports_encryption):
"""Test user step."""
flow = await init_config_flow(hass)
result = await flow.async_step_user({})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert (
result["description_placeholders"]["secret"]
== "Encryption is not supported because nacl is not installed."
)
async def test_unload(hass):
"""Test unloading a config flow."""
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com"},
)
with patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup"
) as mock_forward:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={}
)
assert len(mock_forward.mock_calls) == 1
entry = result["result"]
assert mock_forward.mock_calls[0][1][0] is entry
assert mock_forward.mock_calls[0][1][1] == "device_tracker"
assert entry.data["webhook_id"] in hass.data["webhook"]
with patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_unload",
return_value=None,
) as mock_unload:
assert await hass.config_entries.async_unload(entry.entry_id)
assert len(mock_unload.mock_calls) == 1
assert mock_forward.mock_calls[0][1][0] is entry
assert mock_forward.mock_calls[0][1][1] == "device_tracker"
assert entry.data["webhook_id"] not in hass.data["webhook"]
async def test_with_cloud_sub(hass):
"""Test creating a config flow while subscribed."""
hass.config.components.add("cloud")
with patch(
"homeassistant.components.cloud.async_active_subscription", return_value=True
), patch(
"homeassistant.components.cloud.async_create_cloudhook",
return_value="https://hooks.nabu.casa/ABCD",
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data={}
)
entry = result["result"]
assert entry.data["cloudhook"]
assert (
result["description_placeholders"]["webhook_url"]
== "https://hooks.nabu.casa/ABCD"
)
|
from django.utils.translation import gettext_lazy as _, override
from cms.menu_bases import CMSAttachMenu
from menus.base import NavigationNode
from menus.menu_pool import menu_pool
class CatalogMenu(CMSAttachMenu):
name = _("Catalog Menu")
def get_nodes(self, request):
try:
if self.instance.publisher_is_draft:
productpage_set = self.instance.publisher_public.productpage_set
else:
productpage_set = self.instance.productpage_set
except AttributeError:
return []
nodes = []
with override(request.LANGUAGE_CODE):
for id, productpage in enumerate(productpage_set.all(), 1):
node = NavigationNode(
title=productpage.product.product_name,
url=productpage.product.get_absolute_url(),
id=id,
)
if hasattr(productpage.product, 'slug'):
node.path = productpage.product.slug
nodes.append(node)
return nodes
menu_pool.register_menu(CatalogMenu)
|
import pytest
from PyQt5.QtSql import QSqlError
from qutebrowser.misc import sql
pytestmark = pytest.mark.usefixtures('init_sql')
@pytest.mark.parametrize('klass', [sql.KnownError, sql.BugError])
def test_sqlerror(klass):
text = "Hello World"
err = klass(text)
assert str(err) == text
assert err.text() == text
class TestSqlError:
@pytest.mark.parametrize('error_code, exception', [
(sql.SqliteErrorCode.BUSY, sql.KnownError),
(sql.SqliteErrorCode.CONSTRAINT, sql.BugError),
])
def test_known(self, error_code, exception):
sql_err = QSqlError("driver text", "db text", QSqlError.UnknownError,
error_code)
with pytest.raises(exception):
sql.raise_sqlite_error("Message", sql_err)
def test_logging(self, caplog):
sql_err = QSqlError("driver text", "db text", QSqlError.UnknownError, '23')
with pytest.raises(sql.BugError):
sql.raise_sqlite_error("Message", sql_err)
expected = ['SQL error:',
'type: UnknownError',
'database text: db text',
'driver text: driver text',
'error code: 23']
assert caplog.messages == expected
@pytest.mark.parametrize('klass', [sql.KnownError, sql.BugError])
def test_text(self, klass):
sql_err = QSqlError("driver text", "db text")
err = klass("Message", sql_err)
assert err.text() == "db text"
def test_init():
sql.SqlTable('Foo', ['name', 'val', 'lucky'])
# should not error if table already exists
sql.SqlTable('Foo', ['name', 'val', 'lucky'])
def test_insert(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
with qtbot.waitSignal(table.changed):
table.insert({'name': 'one', 'val': 1, 'lucky': False})
with qtbot.waitSignal(table.changed):
table.insert({'name': 'wan', 'val': 1, 'lucky': False})
def test_insert_replace(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'],
constraints={'name': 'PRIMARY KEY'})
with qtbot.waitSignal(table.changed):
table.insert({'name': 'one', 'val': 1, 'lucky': False}, replace=True)
with qtbot.waitSignal(table.changed):
table.insert({'name': 'one', 'val': 11, 'lucky': True}, replace=True)
assert list(table) == [('one', 11, True)]
with pytest.raises(sql.BugError):
table.insert({'name': 'one', 'val': 11, 'lucky': True}, replace=False)
def test_insert_batch(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
with qtbot.waitSignal(table.changed):
table.insert_batch({'name': ['one', 'nine', 'thirteen'],
'val': [1, 9, 13],
'lucky': [False, False, True]})
assert list(table) == [('one', 1, False),
('nine', 9, False),
('thirteen', 13, True)]
def test_insert_batch_replace(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'],
constraints={'name': 'PRIMARY KEY'})
with qtbot.waitSignal(table.changed):
table.insert_batch({'name': ['one', 'nine', 'thirteen'],
'val': [1, 9, 13],
'lucky': [False, False, True]})
with qtbot.waitSignal(table.changed):
table.insert_batch({'name': ['one', 'nine'],
'val': [11, 19],
'lucky': [True, True]},
replace=True)
assert list(table) == [('thirteen', 13, True),
('one', 11, True),
('nine', 19, True)]
with pytest.raises(sql.BugError):
table.insert_batch({'name': ['one', 'nine'],
'val': [11, 19],
'lucky': [True, True]})
def test_iter():
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
table.insert({'name': 'one', 'val': 1, 'lucky': False})
table.insert({'name': 'nine', 'val': 9, 'lucky': False})
table.insert({'name': 'thirteen', 'val': 13, 'lucky': True})
assert list(table) == [('one', 1, False),
('nine', 9, False),
('thirteen', 13, True)]
@pytest.mark.parametrize('rows, sort_by, sort_order, limit, result', [
([{"a": 2, "b": 5}, {"a": 1, "b": 6}, {"a": 3, "b": 4}], 'a', 'asc', 5,
[(1, 6), (2, 5), (3, 4)]),
([{"a": 2, "b": 5}, {"a": 1, "b": 6}, {"a": 3, "b": 4}], 'a', 'desc', 3,
[(3, 4), (2, 5), (1, 6)]),
([{"a": 2, "b": 5}, {"a": 1, "b": 6}, {"a": 3, "b": 4}], 'b', 'desc', 2,
[(1, 6), (2, 5)]),
([{"a": 2, "b": 5}, {"a": 1, "b": 6}, {"a": 3, "b": 4}], 'a', 'asc', -1,
[(1, 6), (2, 5), (3, 4)]),
])
def test_select(rows, sort_by, sort_order, limit, result):
table = sql.SqlTable('Foo', ['a', 'b'])
for row in rows:
table.insert(row)
assert list(table.select(sort_by, sort_order, limit)) == result
def test_delete(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
table.insert({'name': 'one', 'val': 1, 'lucky': False})
table.insert({'name': 'nine', 'val': 9, 'lucky': False})
table.insert({'name': 'thirteen', 'val': 13, 'lucky': True})
with pytest.raises(KeyError):
table.delete('name', 'nope')
with qtbot.waitSignal(table.changed):
table.delete('name', 'thirteen')
assert list(table) == [('one', 1, False), ('nine', 9, False)]
with qtbot.waitSignal(table.changed):
table.delete('lucky', False)
assert not list(table)
def test_len():
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
assert len(table) == 0
table.insert({'name': 'one', 'val': 1, 'lucky': False})
assert len(table) == 1
table.insert({'name': 'nine', 'val': 9, 'lucky': False})
assert len(table) == 2
table.insert({'name': 'thirteen', 'val': 13, 'lucky': True})
assert len(table) == 3
def test_contains():
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
table.insert({'name': 'one', 'val': 1, 'lucky': False})
table.insert({'name': 'nine', 'val': 9, 'lucky': False})
table.insert({'name': 'thirteen', 'val': 13, 'lucky': True})
name_query = table.contains_query('name')
val_query = table.contains_query('val')
lucky_query = table.contains_query('lucky')
assert name_query.run(val='one').value()
assert name_query.run(val='thirteen').value()
assert val_query.run(val=9).value()
assert lucky_query.run(val=False).value()
assert lucky_query.run(val=True).value()
assert not name_query.run(val='oone').value()
assert not name_query.run(val=1).value()
assert not name_query.run(val='*').value()
assert not val_query.run(val=10).value()
def test_delete_all(qtbot):
table = sql.SqlTable('Foo', ['name', 'val', 'lucky'])
table.insert({'name': 'one', 'val': 1, 'lucky': False})
table.insert({'name': 'nine', 'val': 9, 'lucky': False})
table.insert({'name': 'thirteen', 'val': 13, 'lucky': True})
with qtbot.waitSignal(table.changed):
table.delete_all()
assert list(table) == []
def test_version():
assert isinstance(sql.version(), str)
class TestSqlQuery:
def test_prepare_error(self):
with pytest.raises(sql.BugError) as excinfo:
sql.Query('invalid')
expected = ('Failed to prepare query "invalid": "near "invalid": '
'syntax error Unable to execute statement"')
assert str(excinfo.value) == expected
@pytest.mark.parametrize('forward_only', [True, False])
def test_forward_only(self, forward_only):
q = sql.Query('SELECT 0 WHERE 0', forward_only=forward_only)
assert q.query.isForwardOnly() == forward_only
def test_iter_inactive(self):
q = sql.Query('SELECT 0')
with pytest.raises(sql.BugError,
match='Cannot iterate inactive query'):
next(iter(q))
def test_iter_empty(self):
q = sql.Query('SELECT 0 AS col WHERE 0')
q.run()
with pytest.raises(StopIteration):
next(iter(q))
def test_iter(self):
q = sql.Query('SELECT 0 AS col')
q.run()
result = next(iter(q))
assert result.col == 0
def test_iter_multiple(self):
q = sql.Query('VALUES (1), (2), (3);')
res = list(q.run())
assert len(res) == 3
assert res[0].column1 == 1
def test_run_binding(self):
q = sql.Query('SELECT :answer')
q.run(answer=42)
assert q.value() == 42
def test_run_missing_binding(self):
q = sql.Query('SELECT :answer')
with pytest.raises(sql.BugError, match='Missing bound values!'):
q.run()
def test_run_batch(self):
q = sql.Query('SELECT :answer')
q.run_batch(values={'answer': [42]})
assert q.value() == 42
def test_run_batch_missing_binding(self):
q = sql.Query('SELECT :answer')
with pytest.raises(sql.BugError, match='Missing bound values!'):
q.run_batch(values={})
def test_value_missing(self):
q = sql.Query('SELECT 0 WHERE 0')
q.run()
with pytest.raises(sql.BugError,
match='No result for single-result query'):
q.value()
def test_num_rows_affected(self):
q = sql.Query('SELECT 0')
q.run()
assert q.rows_affected() == 0
def test_bound_values(self):
q = sql.Query('SELECT :answer')
q.run(answer=42)
assert q.bound_values() == {':answer': 42}
|
from cerberus import errors
from cerberus.tests import assert_fail
def test_itemsrules():
assert_fail(document={'a_list_of_integers': [34, 'not an integer']})
def test_itemsrules_with_schema(validator):
field = 'a_list_of_dicts'
mapping_schema = {
'sku': {'type': ('string',)},
'price': {'type': ('integer',), 'required': True},
}
itemsrules = {'type': ('dict',), 'schema': mapping_schema}
assert_fail(
schema={field: {'type': 'list', 'itemsrules': itemsrules}},
document={field: [{'sku': 'KT123', 'price': '100'}]},
validator=validator,
error=(field, (field, 'itemsrules'), errors.ITEMSRULES, itemsrules),
child_errors=[
((field, 0), (field, 'itemsrules', 'schema'), errors.SCHEMA, mapping_schema)
],
)
assert field in validator.errors
assert 0 in validator.errors[field][-1]
assert 'price' in validator.errors[field][-1][0][-1]
exp_msg = errors.BasicErrorHandler.messages[errors.TYPE.code].format(
constraint=('integer',)
)
assert exp_msg in validator.errors[field][-1][0][-1]['price']
#
assert_fail(
document={field: ["not a dict"]},
error=(field, (field, 'itemsrules'), errors.ITEMSRULES, itemsrules),
child_errors=[
((field, 0), (field, 'itemsrules', 'type'), errors.TYPE, ('dict',), ())
],
)
|
import inspect
import os
import shutil
import unittest
import mock
from kalliope import SettingLoader
from kalliope.core.ConfigurationManager import SettingEditor
from kalliope.core.Models.settings.Player import Player
from kalliope.core.Models.settings.Stt import Stt
from kalliope.core.Models.settings.Trigger import Trigger
from kalliope.core.Models.settings.Tts import Tts
class TestSettingEditor(unittest.TestCase):
"""
Test class for the ~SettingEditor class and methods
"""
def setUp(self):
# get current script directory path. We are in /an/unknown/path/kalliope/core/tests
cur_script_directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# get parent dir. Now we are in /an/unknown/path/kalliope
root_dir = os.path.normpath(cur_script_directory + os.sep + os.pardir)
self.settings_file_to_test = root_dir + os.sep + "Tests/settings/settings_test.yml"
# Init the folders, otherwise it raises an exceptions
os.makedirs("/tmp/kalliope/tests/kalliope_resources_dir/neurons")
os.makedirs("/tmp/kalliope/tests/kalliope_resources_dir/stt")
os.makedirs("/tmp/kalliope/tests/kalliope_resources_dir/tts")
os.makedirs("/tmp/kalliope/tests/kalliope_resources_dir/trigger")
self.sl = SettingLoader(file_path=self.settings_file_to_test)
def tearDown(self):
# Cleanup
shutil.rmtree('/tmp/kalliope/tests/kalliope_resources_dir')
def test_set_mute_status(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_mute_status(mute=True)
self.assertTrue(self.sl.settings.options.mute)
def test_set_deaf_status(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_deaf_status(mock.Mock(), deaf=False)
self.assertFalse(self.sl.settings.options.deaf)
def test_set_recognizer_multiplier(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_recognizer_multiplier(600.0)
self.assertEqual(600.0, self.sl.settings.options.recognizer_multiplier)
def test_set_recognizer_energy_ratio(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_recognizer_energy_ratio(600.0)
self.assertEqual(600.0, self.sl.settings.options.recognizer_energy_ratio)
def test_set_recognizer_recording_timeout(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_recognizer_recording_timeout(600.0)
self.assertEqual(600.0, self.sl.settings.options.recognizer_recording_timeout)
def test_set_recognizer_recording_timeout_with_silence(self):
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_recognizer_recording_timeout_with_silence(600.0)
self.assertEqual(600.0, self.sl.settings.options.recognizer_recording_timeout_with_silence)
def test_set_default_player(self):
default_name = "NamePlayer"
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_default_player(default_name)
self.assertEqual("mplayer", self.sl.settings.default_player_name) # not existing in the list, not updated !
default_name = "pyalsaaudio"
SettingEditor.set_default_player(default_name)
self.assertEqual(default_name, self.sl.settings.default_player_name) # Updated
def test_set_players(self):
new_player = Player(name="totoplayer", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_players(new_player)
self.assertIn(new_player, self.sl.settings.players)
def test_set_default_trigger(self):
default_name = "NameTrigger"
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_default_trigger(default_name)
self.assertEqual("snowboy",
self.sl.settings.default_trigger_name) # not existing in the list, not updated !
default_name = "snowboy"
SettingEditor.set_default_trigger(default_name)
self.assertEqual(default_name, self.sl.settings.default_trigger_name)
def test_set_triggers(self):
new_trigger = Trigger(name="tototrigger", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_trigger(new_trigger)
self.assertIn(new_trigger, self.sl.settings.triggers)
def test_set_default_stt(self):
default_name = "NameStt"
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_default_stt(default_name)
self.assertEqual("google", self.sl.settings.default_stt_name) # not updated because not in the list
default_name = "google"
SettingEditor.set_default_stt(default_name)
self.assertEqual(default_name, self.sl.settings.default_stt_name) # updated
def test_set_stts(self):
new_stt = Stt(name="totoStt", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_stts(new_stt)
self.assertIn(new_stt, self.sl.settings.stts)
def test_set_default_tts(self):
default_name = "NameTts"
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_default_tts(default_name)
self.assertEqual("pico2wave", self.sl.settings.default_tts_name)
default_name = "googletts"
SettingEditor.set_default_tts(default_name)
self.assertEqual(default_name, self.sl.settings.default_tts_name)
def test_set_ttss(self):
new_tts = Tts(name="totoTss", parameters={})
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_ttss(new_tts)
self.assertIn(new_tts, self.sl.settings.ttss)
def test_set_hooks(self):
default_hooks = {"on_deaf": "randomSynapse"}
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_hooks(default_hooks)
self.assertEqual("randomSynapse", self.sl.settings.hooks["on_deaf"])
# self.assertTrue(set(default_hooks.items()).issubset(set(self.sl.settings.hooks))) # Not working for non hashable values
def test_set_variabless(self):
default_variables = {"coucou": "hello"}
with mock.patch("kalliope.core.ConfigurationManager.SettingLoader") as mock_setting_loader:
mock_setting_loader.return_value(self.sl)
SettingEditor.set_variables(default_variables)
self.assertEqual("hello", self.sl.settings.variables["coucou"])
|
import json
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from homeassistant.components.zwave import DATA_NETWORK, const
from homeassistant.const import HTTP_NOT_FOUND
from tests.async_mock import MagicMock, patch
from tests.mock.zwave import MockEntityValues, MockNode, MockValue
VIEW_NAME = "api:config:zwave:device_config"
@pytest.fixture
def client(loop, hass, hass_client):
"""Client to communicate with Z-Wave config views."""
with patch.object(config, "SECTIONS", ["zwave"]):
loop.run_until_complete(async_setup_component(hass, "config", {}))
return loop.run_until_complete(hass_client())
async def test_get_device_config(client):
"""Test getting device config."""
def mock_read(path):
"""Mock reading data."""
return {"hello.beer": {"free": "beer"}, "other.entity": {"do": "something"}}
with patch("homeassistant.components.config._read", mock_read):
resp = await client.get("/api/config/zwave/device_config/hello.beer")
assert resp.status == 200
result = await resp.json()
assert result == {"free": "beer"}
async def test_update_device_config(client):
"""Test updating device config."""
orig_data = {
"hello.beer": {"ignored": True},
"other.entity": {"polling_intensity": 2},
}
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
):
resp = await client.post(
"/api/config/zwave/device_config/hello.beer",
data=json.dumps({"polling_intensity": 2}),
)
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
orig_data["hello.beer"]["polling_intensity"] = 2
assert written[0] == orig_data
async def test_update_device_config_invalid_key(client):
"""Test updating device config."""
resp = await client.post(
"/api/config/zwave/device_config/invalid_entity",
data=json.dumps({"polling_intensity": 2}),
)
assert resp.status == 400
async def test_update_device_config_invalid_data(client):
"""Test updating device config."""
resp = await client.post(
"/api/config/zwave/device_config/hello.beer",
data=json.dumps({"invalid_option": 2}),
)
assert resp.status == 400
async def test_update_device_config_invalid_json(client):
"""Test updating device config."""
resp = await client.post(
"/api/config/zwave/device_config/hello.beer", data="not json"
)
assert resp.status == 400
async def test_get_values(hass, client):
"""Test getting values on node."""
node = MockNode(node_id=1)
value = MockValue(
value_id=123456,
node=node,
label="Test Label",
instance=1,
index=2,
poll_intensity=4,
)
values = MockEntityValues(primary=value)
node2 = MockNode(node_id=2)
value2 = MockValue(value_id=234567, node=node2, label="Test Label 2")
values2 = MockEntityValues(primary=value2)
hass.data[const.DATA_ENTITY_VALUES] = [values, values2]
resp = await client.get("/api/zwave/values/1")
assert resp.status == 200
result = await resp.json()
assert result == {
"123456": {
"label": "Test Label",
"instance": 1,
"index": 2,
"poll_intensity": 4,
}
}
async def test_get_groups(hass, client):
"""Test getting groupdata on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
node.groups.associations = "assoc"
node.groups.associations_instances = "inst"
node.groups.label = "the label"
node.groups.max_associations = "max"
node.groups = {1: node.groups}
network.nodes = {2: node}
resp = await client.get("/api/zwave/groups/2")
assert resp.status == 200
result = await resp.json()
assert result == {
"1": {
"association_instances": "inst",
"associations": "assoc",
"label": "the label",
"max_associations": "max",
}
}
async def test_get_groups_nogroups(hass, client):
"""Test getting groupdata on node with no groups."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
resp = await client.get("/api/zwave/groups/2")
assert resp.status == 200
result = await resp.json()
assert result == {}
async def test_get_groups_nonode(hass, client):
"""Test getting groupdata on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = await client.get("/api/zwave/groups/2")
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert result == {"message": "Node not found"}
async def test_get_config(hass, client):
"""Test getting config on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
value = MockValue(index=12, command_class=const.COMMAND_CLASS_CONFIGURATION)
value.label = "label"
value.help = "help"
value.type = "type"
value.data = "data"
value.data_items = ["item1", "item2"]
value.max = "max"
value.min = "min"
node.values = {12: value}
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = await client.get("/api/zwave/config/2")
assert resp.status == 200
result = await resp.json()
assert result == {
"12": {
"data": "data",
"data_items": ["item1", "item2"],
"help": "help",
"label": "label",
"max": "max",
"min": "min",
"type": "type",
}
}
async def test_get_config_noconfig_node(hass, client):
"""Test getting config on node without config."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = await client.get("/api/zwave/config/2")
assert resp.status == 200
result = await resp.json()
assert result == {}
async def test_get_config_nonode(hass, client):
"""Test getting config on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = await client.get("/api/zwave/config/2")
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert result == {"message": "Node not found"}
async def test_get_usercodes_nonode(hass, client):
"""Test getting usercodes on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = await client.get("/api/zwave/usercodes/2")
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert result == {"message": "Node not found"}
async def test_get_usercodes(hass, client):
"""Test getting usercodes on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(index=0, command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_USER
value.label = "label"
value.data = "1234"
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = await client.get("/api/zwave/usercodes/18")
assert resp.status == 200
result = await resp.json()
assert result == {"0": {"code": "1234", "label": "label", "length": 4}}
async def test_get_usercode_nousercode_node(hass, client):
"""Test getting usercodes on node without usercodes."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = await client.get("/api/zwave/usercodes/18")
assert resp.status == 200
result = await resp.json()
assert result == {}
async def test_get_usercodes_no_genreuser(hass, client):
"""Test getting usercodes on node missing genre user."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(index=0, command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_SYSTEM
value.label = "label"
value.data = "1234"
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = await client.get("/api/zwave/usercodes/18")
assert resp.status == 200
result = await resp.json()
assert result == {}
async def test_save_config_no_network(hass, client):
"""Test saving configuration without network data."""
resp = await client.post("/api/zwave/saveconfig")
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert result == {"message": "No Z-Wave network data found"}
async def test_save_config(hass, client):
"""Test saving configuration."""
network = hass.data[DATA_NETWORK] = MagicMock()
resp = await client.post("/api/zwave/saveconfig")
assert resp.status == 200
result = await resp.json()
assert network.write_config.called
assert result == {"message": "Z-Wave configuration saved to file"}
async def test_get_protection_values(hass, client):
"""Test getting protection values on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION,
)
value.label = "Protection Test"
value.data_items = [
"Unprotected",
"Protection by Sequence",
"No Operation Possible",
]
value.data = "Unprotected"
network.nodes = {18: node}
node.value = value
node.get_protection_item.return_value = "Unprotected"
node.get_protection_items.return_value = value.data_items
node.get_protections.return_value = {value.value_id: "Object"}
resp = await client.get("/api/zwave/protection/18")
assert resp.status == 200
result = await resp.json()
assert node.get_protections.called
assert node.get_protection_item.called
assert node.get_protection_items.called
assert result == {
"value_id": "123456",
"selected": "Unprotected",
"options": ["Unprotected", "Protection by Sequence", "No Operation Possible"],
}
async def test_get_protection_values_nonexisting_node(hass, client):
"""Test getting protection values on node with wrong nodeid."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION,
)
value.label = "Protection Test"
value.data_items = [
"Unprotected",
"Protection by Sequence",
"No Operation Possible",
]
value.data = "Unprotected"
network.nodes = {17: node}
node.value = value
resp = await client.get("/api/zwave/protection/18")
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {"message": "Node not found"}
async def test_get_protection_values_without_protectionclass(hass, client):
"""Test getting protection values on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
value = MockValue(value_id=123456, index=0, instance=1)
network.nodes = {18: node}
node.value = value
resp = await client.get("/api/zwave/protection/18")
assert resp.status == 200
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {}
async def test_set_protection_value(hass, client):
"""Test setting protection value on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION,
)
value.label = "Protection Test"
value.data_items = [
"Unprotected",
"Protection by Sequence",
"No Operation Possible",
]
value.data = "Unprotected"
network.nodes = {18: node}
node.value = value
resp = await client.post(
"/api/zwave/protection/18",
data=json.dumps({"value_id": "123456", "selection": "Protection by Sequence"}),
)
assert resp.status == 200
result = await resp.json()
assert node.set_protection.called
assert result == {"message": "Protection setting succsessfully set"}
async def test_set_protection_value_failed(hass, client):
"""Test setting protection value failed on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18, command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION,
)
value.label = "Protection Test"
value.data_items = [
"Unprotected",
"Protection by Sequence",
"No Operation Possible",
]
value.data = "Unprotected"
network.nodes = {18: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
"/api/zwave/protection/18",
data=json.dumps({"value_id": "123456", "selection": "Protecton by Sequence"}),
)
assert resp.status == 202
result = await resp.json()
assert node.set_protection.called
assert result == {"message": "Protection setting did not complete"}
async def test_set_protection_value_nonexisting_node(hass, client):
"""Test setting protection value on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17, command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION,
)
value.label = "Protection Test"
value.data_items = [
"Unprotected",
"Protection by Sequence",
"No Operation Possible",
]
value.data = "Unprotected"
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
"/api/zwave/protection/18",
data=json.dumps({"value_id": "123456", "selection": "Protecton by Sequence"}),
)
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert not node.set_protection.called
assert result == {"message": "Node not found"}
async def test_set_protection_value_missing_class(hass, client):
"""Test setting protection value on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17)
value = MockValue(value_id=123456, index=0, instance=1)
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
"/api/zwave/protection/17",
data=json.dumps({"value_id": "123456", "selection": "Protecton by Sequence"}),
)
assert resp.status == HTTP_NOT_FOUND
result = await resp.json()
assert not node.set_protection.called
assert result == {"message": "No protection commandclass on this node"}
|
import diamond.collector
import urllib2
class AuroraCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(AuroraCollector,
self).get_default_config_help()
config_help.update({
'host': 'Scheduler Hostname',
'port': 'Scheduler HTTP Metrics Port',
'path': 'Collector path. Defaults to "aurora"',
'scheme': 'http'
})
return config_help
def get_default_config(self):
config = super(AuroraCollector, self).get_default_config()
config.update({
'path': 'aurora',
'host': 'localhost',
'port': 8081,
'scheme': 'http'
})
return config
def collect(self):
url = "%s://%s:%s/vars" % (self.config['scheme'],
self.config['host'],
self.config['port'])
response = urllib2.urlopen(url)
for line in response.readlines():
properties = line.split()
# Not all lines returned will have a numeric metric.
# To account for this, we attempt to cast the 'value'
# portion as a float. If that's not possible, NBD, we
# just move on.
try:
if len(properties) > 1:
subpath = properties[0].replace('/', '.').replace('_', '.')
value = float(properties[1])
self.publish(subpath, value)
except ValueError:
continue
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
from absl import flags
from perfkitbenchmarker import events
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.traces import base_collector
import six
flags.DEFINE_boolean(
'mpstat', False, 'Run mpstat (https://linux.die.net/man/1/mpstat) '
'to collect system performance metrics during benchmark run.')
flags.DEFINE_enum(
'mpstat_breakdown', 'SUM', ['SUM', 'CPU', 'ALL'],
'Level of aggregation for statistics. Accepted '
'values are "SUM", "CPU", "ALL". Defaults to SUM. See '
'https://linux.die.net/man/1/mpstat for details.')
flags.DEFINE_string(
'mpstat_cpus', 'ALL', 'Comma delimited string of CPU ids or ALL. '
'Defaults to ALL.')
flags.DEFINE_integer(
'mpstat_interval', 1,
'The amount of time in seconds between each mpstat report.'
'Defaults to 1.')
flags.DEFINE_integer(
'mpstat_count', 1, 'The number of reports generated at interval apart.'
'Defaults to 1.')
flags.DEFINE_boolean('mpstat_publish', False,
'Whether to publish mpstat statistics.')
FLAGS = flags.FLAGS
def _ParsePercentageUse(rows, metadata):
"""Parse a CPU percentage use data chunk.
Args:
rows: List of mpstat CPU percentage lines.
metadata: metadata of the sample.
Yields:
List of samples
input data:
Average: CPU %usr %nice %sys %iowait %irq %soft %steal %guest %gnice %idle
Average: all 1.82 0.11 0.84 0.05 0.00 0.31 0.00 0.00 0.00 96.88
Average: 0 1.77 0.09 0.82 0.07 0.00 2.21 0.00 0.00 0.00 95.04
Average: 1 1.85 0.12 0.83 0.06 0.00 0.65 0.00 0.00 0.00 96.49
...
"""
header_row = rows[0]
headers = [header.strip('%') for header in header_row.split()]
for row in rows[1:]:
data = row.split()
name_value_pairs = list(zip(headers, data))
cpu_id_pair = name_value_pairs[1]
for header, value in name_value_pairs[2:]:
meta = metadata.copy()
if 'all' in cpu_id_pair:
metric_name = 'mpstat_avg_' + header
cpu_id = -1
else:
metric_name = 'mpstat_' + header
cpu_id = int(cpu_id_pair[1])
meta['mpstat_cpu_id'] = cpu_id
yield sample.Sample(
metric=metric_name, value=float(value), unit='%', metadata=meta)
def _ParseInterruptsPerSec(rows, metadata):
"""Parse a interrput/sec data chunk.
Args:
rows: List of mpstat interrupts per second lines.
metadata: metadata of the sample.
Yields:
List of samples
input data:
Average: CPU intr/s
Average: all 3371.98
Average: 0 268.54
Average: 1 265.59
...
"""
for row in rows[1:]: # skipping first header row
data = row.split()
meta = metadata.copy()
if 'all' in data:
metric_name = 'mpstat_avg_intr'
cpu_id = -1
else:
metric_name = 'mpstat_intr'
cpu_id = int(data[1])
meta['mpstat_cpu_id'] = cpu_id
yield sample.Sample(metric=metric_name, value=float(data[2]),
unit='interrupts/sec', metadata=meta)
def _MpstatResults(metadata, output):
"""Parses and appends mpstat results to the samples list.
Args:
metadata: metadata of the sample.
output: output of mpstat
Returns:
List of samples.
"""
samples = []
paragraphs = output.split('\n\n')
for paragraph in paragraphs:
lines = paragraph.rstrip().split('\n')
if lines and 'Average' in lines[0] and '%irq' in lines[0]:
samples += _ParsePercentageUse(lines, metadata)
elif lines and 'Average' in lines[0] and 'intr/s' in lines[0]:
samples += _ParseInterruptsPerSec(lines, metadata)
elif lines and 'Average' in lines[0]:
logging.debug('Skipping aggregated metrics: %s', lines[0])
return samples
class MpstatCollector(base_collector.BaseCollector):
"""mpstat collector.
Installs and runs mpstat on a collection of VMs.
"""
def _CollectorName(self):
return 'mpstat'
def _InstallCollector(self, vm):
vm.InstallPackages('sysstat')
def _CollectorRunCommand(self, vm, collector_file):
return ('mpstat -I {breakdown} -u -P {processor_number} {interval} {count} '
'> {output} 2>&1 &'.format(
breakdown=FLAGS.mpstat_breakdown,
processor_number=FLAGS.mpstat_cpus,
interval=self.interval,
count=FLAGS.mpstat_count,
output=collector_file))
def Analyze(self, sender, benchmark_spec, samples):
"""Analyze mpstat file and record samples.
Args:
sender: event sender for collecting stats.
benchmark_spec: benchmark_spec of this run.
samples: samples to add stats to.
"""
def _Analyze(role, output):
"""Parse file and record samples."""
with open(
os.path.join(self.output_directory, os.path.basename(output)),
'r') as fp:
output = fp.read()
metadata = {
'event': 'mpstat',
'sender': 'run',
'role': role,
}
samples.extend(_MpstatResults(metadata, output))
vm_util.RunThreaded(
_Analyze, [((k, w), {}) for k, w in six.iteritems(self._role_mapping)])
def Register(parsed_flags):
"""Registers the mpstat collector if FLAGS.mpstat is set."""
if not parsed_flags.mpstat:
return
logging.debug('Registering mpstat collector.')
collector = MpstatCollector(interval=parsed_flags.mpstat_interval)
events.before_phase.connect(collector.Start, events.RUN_PHASE, weak=False)
events.after_phase.connect(collector.Stop, events.RUN_PHASE, weak=False)
if parsed_flags.mpstat_publish:
events.samples_created.connect(
collector.Analyze, events.RUN_PHASE, weak=False)
|
import base64
import os
import random
import string
_basedir = os.path.abspath(os.path.dirname(__file__))
# generate random secrets for unittest
def get_random_secret(length):
secret_key = ''.join(random.choice(string.ascii_uppercase) for x in range(round(length / 4)))
secret_key = secret_key + ''.join(random.choice("~!@#$%^&*()_+") for x in range(round(length / 4)))
secret_key = secret_key + ''.join(random.choice(string.ascii_lowercase) for x in range(round(length / 4)))
return secret_key + ''.join(random.choice(string.digits) for x in range(round(length / 4)))
THREADS_PER_PAGE = 8
# General
# These will need to be set to `True` if you are developing locally
CORS = False
debug = False
TESTING = True
# this is the secret key used by flask session management (utf8 encoded)
SECRET_KEY = get_random_secret(length=32).encode('utf8')
# You should consider storing these separately from your config (should be URL-safe)
LEMUR_TOKEN_SECRET = "test"
LEMUR_ENCRYPTION_KEYS = base64.urlsafe_b64encode(get_random_secret(length=32).encode('utf8'))
# List of domain regular expressions that non-admin users can issue
LEMUR_ALLOWED_DOMAINS = [
r"^[a-zA-Z0-9-]+\.example\.com$",
r"^[a-zA-Z0-9-]+\.example\.org$",
r"^example\d+\.long\.com$",
]
# Mail Server
# Lemur currently only supports SES for sending email, this address
# needs to be verified
LEMUR_EMAIL = "[email protected]"
LEMUR_SECURITY_TEAM_EMAIL = ["[email protected]"]
LEMUR_HOSTNAME = "lemur.example.com"
# Logging
LOG_LEVEL = "DEBUG"
LOG_FILE = "lemur.log"
LEMUR_DEFAULT_COUNTRY = "US"
LEMUR_DEFAULT_STATE = "California"
LEMUR_DEFAULT_LOCATION = "Los Gatos"
LEMUR_DEFAULT_ORGANIZATION = "Example, Inc."
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = "Example"
LEMUR_ALLOW_WEEKEND_EXPIRATION = False
# Database
# modify this if you are not using a local database. Do not use any development or production DBs,
# as Unit Tests drop the whole schema, recreate and again drop everything at the end
SQLALCHEMY_DATABASE_URI = os.getenv(
"SQLALCHEMY_DATABASE_URI", "postgresql://lemur:lemur@localhost:5432/lemur"
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# AWS
LEMUR_INSTANCE_PROFILE = "Lemur"
# Issuers
# These will be dependent on which 3rd party that Lemur is
# configured to use.
# CLOUDCA_URL = ''
# CLOUDCA_PEM_PATH = ''
# CLOUDCA_BUNDLE = ''
# number of years to issue if not specified
# CLOUDCA_DEFAULT_VALIDITY = 2
DIGICERT_URL = "mock://www.digicert.com"
DIGICERT_ORDER_TYPE = "ssl_plus"
DIGICERT_API_KEY = "api-key"
DIGICERT_ORG_ID = 111111
DIGICERT_ROOT = "ROOT"
DIGICERT_CIS_URL = "mock://www.digicert.com"
DIGICERT_CIS_PROFILE_NAMES = {"sha2-rsa-ecc-root": "ssl_plus"}
DIGICERT_CIS_API_KEY = "api-key"
DIGICERT_CIS_ROOTS = {"root": "ROOT"}
VERISIGN_URL = "http://example.com"
VERISIGN_PEM_PATH = "~/"
VERISIGN_FIRST_NAME = "Jim"
VERISIGN_LAST_NAME = "Bob"
VERSIGN_EMAIL = "[email protected]"
ACME_AWS_ACCOUNT_NUMBER = "11111111111"
ACME_PRIVATE_KEY = """
-----BEGIN RSA PRIVATE KEY-----
MIIJJwIBAAKCAgEA0+jySNCc1i73LwDZEuIdSkZgRYQ4ZQVIioVf38RUhDElxy51
4gdWZwp8/TDpQ8cVXMj6QhdRpTVLluOz71hdvBAjxXTISRCRlItzizTgBD9CLXRh
vPLIMPvAJH7JZxp9xW5oVYUcHBveQJ5tQvnP7RgPykejl7DPKm/SGKYealnoGPcP
U9ipz2xXlVlx7ZKivLbaijh2kD/QE9pC//CnP31g3QFCsxOTLAWtICz5VbvaWuTT
whqFs5cT3kKYAW/ccPcty573AX/9Y/UZ4+B3wxXY3/6GYPMcINRuu/7Srs3twlNu
udoTNdM9SztWMYUzz1SMYad9v9LLGTrv+5Tog4YsqMFxyKrBBBz8/bf1lKwyfAW+
okvVe+1bUY8iSDuDx1O0iMyHe5w8lxsoTy91ujjr1cQDyJR70TKQpeBmfNtBVnW+
D8E6Xw2yCuL9XTyBApldzQ/J1ObPd1Hv+yzhEx4VD9QOmQPn7doiapTDYfW51o1O
Mo+zuZgsclhePvzqN4/6VYXZnPE68uqx982u0W82tCorRUtzfFoO0plNRCjmV7cw
0fp0ie3VczUOH9gj4emmdQd1tVA/Esuh3XnzZ2ANwohtPytn+I3MX0Q+5k7AcRlt
AyI80x8CSiDStI6pj3BlPJgma9G8u7r3E2aqW6qXCexElTCaH2t8A7JWI80CAwEA
AQKCAgBDXLyQGwiQKXPYFDvs/cXz03VNA9/tdQV/SzCT8FQxhXIN5B4DEPQNY08i
KUctjX6j9RtgoQsKKmvx9kY/omaBntvQK/RzDXpJrx62tMM1dmpyCpn7N24d7BlD
QK6DQO+UMCmobdzmrpEzF2mCLelD5C84zRca5FCmm888mKn4gsX+EaNksu4gCr+4
sSs/KyriNHo6EALYjgB2Hx7HP1fbHd8JwhnS1TkmeFN1c/Z6o3GhDTancEjqMu9U
6vRpGIcJvflnzguVBXumJ8boInXPpQVBBybucLmTUhQ1XKbafInFCUKcf881gAXv
AVi/+yjiEm1hqZ2WucpoJc0du1NBz/MP+/MxHGQ/5eaEMIz5X2QcXzQ4xn5ym0sk
Hy0SmH3v/9by1GkK5eH/RTV/8bmtb8Qt0+auLQ6/ummFDjPw866Or4FdL3tx2gug
fONjaZqypee+EmlLG1UmMejjCblmh0bymAHnFkf7tAJsLGd8I00PQiObEqaqd03o
xiYUvrbDpCHah4gB7Uv3AgrHVTbcHsEWmXuNDooD0sSXCFMf3cA81M8vGfkypqi/
ixxZtxtdTU5oCFwI9zEjnQvdA1IZMUAmz8vLwn/fKgENek9PAV3voQr1c0ctZPvy
S/k7HgJt+2Wj7Pqb4mwPgxeYVSBEM7ygOq6Gdisyhi8DP0A2fQKCAQEA6iIrSqQM
pVDqhQsk9Cc0b4kdsG/EM66M7ND5Q2GLiPPFrR59Hm7ViG6h2DhwqSnSRigiO+TN
jIuvD/O0kbmCUZSar19iKPiJipENN+AX3MBm1cS5Oxp6jgY+3jj4KgDQPYmL49fJ
CojnmLKjrAPoUi4f/7s4O1rEAghXPrf5/9coaRPORiNi+bZK0bReJwf1GE/9CPqs
FiZrQNz+/w/1MwFisG6+g0/58fp9j9r6l8JXETjpyO5F+8W8bg8M4V7aoYt5Ec2X
+BG6Gq06Tvm2UssYa6iEVNSKF39ssBzKKALi4we/fcfwjq4bCTKMCjV0Tp3zY/FG
1VyDtMGKrlPnOwKCAQEA57Nw+qdh2wbihz1uKffcoDoW6Q3Ws0mu8ml+UvBn48Ur
41PKrvIb8lhVY7ZiF2/iRyodua9ztE4zvgGs7UqyHaSYHR+3mWeOAE2Hb/XiNVgu
JVupTXLpx3y7d9FxvrU/27KUxhJgcbVpIGRiMn5dmY2S86EYKX1ObjZKmwvFc6+n
1YWgtI2+VOKe5+0ttig6CqzL9qJLZfL6QeAy0yTp/Wz+G1c06XTL87QNeU7CXN00
rB7I4n1Xn422rZnE64MOsARVChyE2fUC9syfimoryR9yIL2xor9QdjL2tK6ziyPq
WgedY4bDjZLM5KbcHcRng0j5WCJV+pX9Hh1c4n5AlwKCAQAxjun68p56n5YEc0dv
Jp1CvpM6NW4iQmAyAEnCqXMPmgnNixaQyoUIS+KWEdxG8kM/9l7IrrWTej2j8sHV
1p5vBjV3yYjNg04ZtnpFyXlDkLYzqWBL0l7+kPPdtdFRkrqBTAwAPjyfrjrXZ3id
gHY8bub3CnnsllnG1F0jOW4BaVl0ZGzVC8h3cs6DdNo5CMYoT0YQEH88cQVixWR0
OLx9/10UW1yYDuWpAoxxVriURt6HFrTlgwntMP2hji37xkggyZTm3827BIWP//rH
nLOq8rJIl3LrQdG5B4/J904TCglcZNdzmE6i5Nd0Ku7ZelcUDPrnvLpxjxORvyXL
oJbhAoIBAD7QV9WsIQxG7oypa7828foCJYni9Yy/cg1H6jZD9HY8UuybH7yT6F2n
8uZIYIloDJksYsifNyfvd3mQbLgb4vPEVnS2z4hoGYgdfJUuvLeng0MfeWOEvroV
J6GRB1wjOP+vh0O3YawR+UEN1c1Iksl5JxijWLCOxv97+nfUFiCJw19QjcPFFY9f
rKLFmvniJ/IS7GydjQFDgPLw+/Zf8IuCy9TPrImJ32zfKDP11R1l3sy2v9EfF+0q
dxbTNB6A9i9jzUYjeyS3lqkfyjS1Gc+5lbAonQq5APA6WsWbAxO6leL4Y4PC2ir8
XE20qsHrKADgfLCXBmYb2XYbkb3ZalsCggEAfOuB9/eLMSmtney3vDdZNF8fvEad
DF+8ss8yITNQQuC0nGdXioRuvSyejOxtjHplMT5GXsgLp1vAujDQmGTv/jK+EXsU
cRe4df5/EbRiUOyx/ZBepttB1meTnsH6cGPN0JnmTMQHQvanL3jjtjrC13408ONK
1yK2S4xJjKYFLT86SjKvV6g5k49ntLYk59nviqHl8bYzAVMoEjb62Z+hERwd/2hx
omsEEjDt4qVqGvSyy+V/1EhqGPzm9ri3zapnorf69rscuXYYsMBZ8M6AtSio4ldB
LjCRNS1lR6/mV8AqUNR9Kn2NLQyJ76yDoEVLulKZqGUsC9STN4oGJLUeFw==
-----END RSA PRIVATE KEY-----
"""
ACME_ROOT = """
-----BEGIN CERTIFICATE-----
MIIFjTCCA3WgAwIBAgIRANOxciY0IzLc9AUoUSrsnGowDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTYxMDA2MTU0MzU1
WhcNMjExMDA2MTU0MzU1WjBKMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg
RW5jcnlwdDEjMCEGA1UEAxMaTGV0J3MgRW5jcnlwdCBBdXRob3JpdHkgWDMwggEi
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCc0wzwWuUuR7dyXTeDs2hjMOrX
NSYZJeG9vjXxcJIvt7hLQQWrqZ41CFjssSrEaIcLo+N15Obzp2JxunmBYB/XkZqf
89B4Z3HIaQ6Vkc/+5pnpYDxIzH7KTXcSJJ1HG1rrueweNwAcnKx7pwXqzkrrvUHl
Npi5y/1tPJZo3yMqQpAMhnRnyH+lmrhSYRQTP2XpgofL2/oOVvaGifOFP5eGr7Dc
Gu9rDZUWfcQroGWymQQ2dYBrrErzG5BJeC+ilk8qICUpBMZ0wNAxzY8xOJUWuqgz
uEPxsR/DMH+ieTETPS02+OP88jNquTkxxa/EjQ0dZBYzqvqEKbbUC8DYfcOTAgMB
AAGjggFnMIIBYzAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADBU
BgNVHSAETTBLMAgGBmeBDAECATA/BgsrBgEEAYLfEwEBATAwMC4GCCsGAQUFBwIB
FiJodHRwOi8vY3BzLnJvb3QteDEubGV0c2VuY3J5cHQub3JnMB0GA1UdDgQWBBSo
SmpjBH3duubRObemRWXv86jsoTAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8vY3Js
LnJvb3QteDEubGV0c2VuY3J5cHQub3JnMHIGCCsGAQUFBwEBBGYwZDAwBggrBgEF
BQcwAYYkaHR0cDovL29jc3Aucm9vdC14MS5sZXRzZW5jcnlwdC5vcmcvMDAGCCsG
AQUFBzAChiRodHRwOi8vY2VydC5yb290LXgxLmxldHNlbmNyeXB0Lm9yZy8wHwYD
VR0jBBgwFoAUebRZ5nu25eQBc4AIiMgaWPbpm24wDQYJKoZIhvcNAQELBQADggIB
ABnPdSA0LTqmRf/Q1eaM2jLonG4bQdEnqOJQ8nCqxOeTRrToEKtwT++36gTSlBGx
A/5dut82jJQ2jxN8RI8L9QFXrWi4xXnA2EqA10yjHiR6H9cj6MFiOnb5In1eWsRM
UM2v3e9tNsCAgBukPHAg1lQh07rvFKm/Bz9BCjaxorALINUfZ9DD64j2igLIxle2
DPxW8dI/F2loHMjXZjqG8RkqZUdoxtID5+90FgsGIfkMpqgRS05f4zPbCEHqCXl1
eO5HyELTgcVlLXXQDgAWnRzut1hFJeczY1tjQQno6f6s+nMydLN26WuU4s3UYvOu
OsUxRlJu7TSRHqDC3lSE5XggVkzdaPkuKGQbGpny+01/47hfXXNB7HntWNZ6N2Vw
p7G6OfY+YQrZwIaQmhrIqJZuigsrbe3W+gdn5ykE9+Ky0VgVUsfxo52mwFYs1JKY
2PGDuWx8M6DlS6qQkvHaRUo0FMd8TsSlbF0/v965qGFKhSDeQoMpYnwcmQilRh/0
ayLThlHLN81gSkJjVrPI0Y8xCVPB4twb1PFUd2fPM3sA1tJ83sZ5v8vgFv2yofKR
PB0t6JzUA81mSqM3kxl5e+IZwhYAyO0OTg3/fs8HqGTNKd9BqoUwSRBzp06JMg5b
rUCGwbCUDI0mxadJ3Bz4WxR6fyNpBK2yAinWEsikxqEt
-----END CERTIFICATE-----
"""
ACME_URL = "https://acme-v01.api.letsencrypt.org"
ACME_EMAIL = "[email protected]"
ACME_TEL = "4088675309"
ACME_DIRECTORY_URL = "https://acme-v01.api.letsencrypt.org"
ACME_DISABLE_AUTORESOLVE = True
LDAP_AUTH = True
LDAP_BIND_URI = "ldap://localhost"
LDAP_BASE_DN = "dc=example,dc=com"
LDAP_EMAIL_DOMAIN = "example.com"
LDAP_REQUIRED_GROUP = "Lemur Access"
LDAP_DEFAULT_ROLE = "role1"
ALLOW_CERT_DELETION = True
ENTRUST_API_CERT = "api-cert"
ENTRUST_API_KEY = get_random_secret(32)
ENTRUST_API_USER = "user"
ENTRUST_API_PASS = get_random_secret(32)
ENTRUST_URL = "https://api.entrust.net/enterprise/v2"
ENTRUST_ROOT = """
-----BEGIN CERTIFICATE-----
MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
-----END CERTIFICATE-----
"""
ENTRUST_NAME = "lemur"
ENTRUST_EMAIL = "[email protected]"
ENTRUST_PHONE = "123456"
ENTRUST_ISSUING = ""
ENTRUST_PRODUCT_ENTRUST = "ADVANTAGE_SSL"
|
import unittest
from datetime import datetime
from mock import MagicMock
from trashcli.trash import ParseTrashInfo
class Test_describe_ParseTrashInfo2(unittest.TestCase):
def test_it_should_parse_date(self):
out = MagicMock()
parse = ParseTrashInfo(on_deletion_date = out)
parse('[Trash Info]\n'
'Path=foo\n'
'DeletionDate=1970-01-01T00:00:00\n')
out.assert_called_with(datetime(1970,1,1,0,0,0))
def test_it_should_parse_path(self):
out = MagicMock()
self.parse = ParseTrashInfo(on_path = out)
self.parse( '[Trash Info]\n'
'Path=foo\n'
'DeletionDate=1970-01-01T00:00:00\n')
out.assert_called_with('foo')
from trashcli.trash import parse_deletion_date
from trashcli.trash import parse_path
def test_how_to_parse_date_from_trashinfo():
from datetime import datetime
assert datetime(2000,12,31,23,59,58) == parse_deletion_date('DeletionDate=2000-12-31T23:59:58')
assert datetime(2000,12,31,23,59,58) == parse_deletion_date('DeletionDate=2000-12-31T23:59:58\n')
assert datetime(2000,12,31,23,59,58) == parse_deletion_date('[Trash Info]\nDeletionDate=2000-12-31T23:59:58')
from trashcli.trash import maybe_parse_deletion_date
UNKNOWN_DATE='????-??-?? ??:??:??'
class Test_describe_maybe_parse_deletion_date(unittest.TestCase):
def test_on_trashinfo_without_date_parse_to_unknown_date(self):
assert (UNKNOWN_DATE ==
maybe_parse_deletion_date(a_trashinfo_without_deletion_date()))
def test_on_trashinfo_with_date_parse_to_date(self):
from datetime import datetime
example_date_as_string='2001-01-01T00:00:00'
same_date_as_datetime=datetime(2001,1,1)
assert (same_date_as_datetime ==
maybe_parse_deletion_date(make_trashinfo(example_date_as_string)))
def test_on_trashinfo_with_invalid_date_parse_to_unknown_date(self):
invalid_date='A long time ago'
assert (UNKNOWN_DATE ==
maybe_parse_deletion_date(make_trashinfo(invalid_date)))
def test_how_to_parse_original_path():
assert 'foo.txt' == parse_path('Path=foo.txt')
assert '/path/to/be/escaped' == parse_path('Path=%2Fpath%2Fto%2Fbe%2Fescaped')
from trashcli.restore import TrashInfoParser
from trashcli.trash import ParseError
class TestParsing(unittest.TestCase):
def test_1(self):
parser = TrashInfoParser("[Trash Info]\n"
"Path=/foo.txt\n", volume_path = '/')
assert '/foo.txt' == parser.original_location()
class TestTrashInfoParser_with_empty_trashinfo(unittest.TestCase):
def setUp(self):
self.parser = TrashInfoParser(contents=an_empty_trashinfo(),
volume_path='/')
def test_it_raises_error_on_parsing_original_location(self):
with self.assertRaises(ParseError):
self.parser.original_location()
def a_trashinfo_without_deletion_date():
return ("[Trash Info]\n"
"Path=foo.txt\n")
def make_trashinfo(date):
return ("[Trash Info]\n"
"Path=foo.txt\n"
"DeletionDate=%s" % date)
def an_empty_trashinfo():
return ''
|
from PyQt5.QtCore import QBuffer, QIODevice, QUrl
from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler,
QWebEngineUrlRequestJob,
QWebEngineUrlScheme)
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, qtutils
class QuteSchemeHandler(QWebEngineUrlSchemeHandler):
"""Handle qute://* requests on QtWebEngine."""
def install(self, profile):
"""Install the handler for qute:// URLs on the given profile."""
if QWebEngineUrlScheme is not None:
assert QWebEngineUrlScheme.schemeByName(b'qute') is not None
profile.installUrlSchemeHandler(b'qute', self)
def _check_initiator(self, job):
"""Check whether the initiator of the job should be allowed.
Only the browser itself or qute:// pages should access any of those
URLs. The request interceptor further locks down qute://settings/set.
Args:
job: QWebEngineUrlRequestJob
Return:
True if the initiator is allowed, False if it was blocked.
"""
initiator = job.initiator()
request_url = job.requestUrl()
# https://codereview.qt-project.org/#/c/234849/
is_opaque = initiator == QUrl('null')
target = request_url.scheme(), request_url.host()
if target == ('qute', 'testdata') and is_opaque:
# Allow requests to qute://testdata, as this is needed for all tests to work
# properly. No qute://testdata handler is installed outside of tests.
return True
if initiator.isValid() and initiator.scheme() != 'qute':
log.network.warning("Blocking malicious request from {} to {}"
.format(initiator.toDisplayString(),
request_url.toDisplayString()))
job.fail(QWebEngineUrlRequestJob.RequestDenied)
return False
return True
def requestStarted(self, job):
"""Handle a request for a qute: scheme.
This method must be reimplemented by all custom URL scheme handlers.
The request is asynchronous and does not need to be handled right away.
Args:
job: QWebEngineUrlRequestJob
"""
url = job.requestUrl()
if not self._check_initiator(job):
return
if job.requestMethod() != b'GET':
job.fail(QWebEngineUrlRequestJob.RequestDenied)
return
assert url.scheme() == 'qute'
log.network.debug("Got request for {}".format(url.toDisplayString()))
try:
mimetype, data = qutescheme.data_for_url(url)
except qutescheme.Error as e:
errors = {
qutescheme.NotFoundError:
QWebEngineUrlRequestJob.UrlNotFound,
qutescheme.UrlInvalidError:
QWebEngineUrlRequestJob.UrlInvalid,
qutescheme.RequestDeniedError:
QWebEngineUrlRequestJob.RequestDenied,
qutescheme.SchemeOSError:
QWebEngineUrlRequestJob.UrlNotFound,
qutescheme.Error:
QWebEngineUrlRequestJob.RequestFailed,
}
exctype = type(e)
log.network.error("{} while handling qute://* URL".format(
exctype.__name__))
job.fail(errors[exctype])
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
job.redirect(e.url)
else:
log.network.debug("Returning {} data".format(mimetype))
# We can't just use the QBuffer constructor taking a QByteArray,
# because that somehow segfaults...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
buf = QBuffer(parent=self)
buf.open(QIODevice.WriteOnly)
buf.write(data)
buf.seek(0)
buf.close()
job.reply(mimetype.encode('ascii'), buf)
def init():
"""Register the qute:// scheme.
Note this needs to be called early, before constructing any QtWebEngine
classes.
"""
if QWebEngineUrlScheme is not None:
assert not QWebEngineUrlScheme.schemeByName(b'qute').name()
scheme = QWebEngineUrlScheme(b'qute')
scheme.setFlags(
QWebEngineUrlScheme.LocalScheme | # type: ignore[arg-type]
QWebEngineUrlScheme.LocalAccessAllowed)
QWebEngineUrlScheme.registerScheme(scheme)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.metrics import eval_task
import numpy as np
import scipy.spatial
class FractalDimensionTask(eval_task.EvalTask):
"""Fractal dimension metric."""
_LABEL = "fractal_dimension"
def run_after_session(self, options, eval_data_fake, eval_data_real=None):
print(eval_data_fake)
score = compute_fractal_dimension(eval_data_fake.images)
return {self._LABEL: score}
def compute_fractal_dimension(fake_images,
num_fd_seeds=100,
n_bins=1000,
scale=0.1):
"""Compute Fractal Dimension of fake_images.
Args:
fake_images: an np array of datapoints, the dimensionality and scaling of
images can be arbitrary
num_fd_seeds: number of random centers from which fractal dimension
computation is performed
n_bins: number of bins to split the range of distance values into
scale: the scale of the y interval in the log-log plot for which we apply a
linear regression fit
Returns:
fractal dimension of the dataset.
"""
assert len(fake_images.shape) >= 2
assert fake_images.shape[0] >= num_fd_seeds
num_images = fake_images.shape[0]
# In order to apply scipy function we need to flatten the number of dimensions
# to 2
fake_images = np.reshape(fake_images, (num_images, -1))
fake_images_subset = fake_images[np.random.randint(
num_images, size=num_fd_seeds)]
distances = scipy.spatial.distance.cdist(fake_images,
fake_images_subset).flatten()
min_distance = np.min(distances[np.nonzero(distances)])
max_distance = np.max(distances)
buckets = min_distance * (
(max_distance / min_distance)**np.linspace(0, 1, n_bins))
# Create a table where first column corresponds to distances r
# and second column corresponds to number of points N(r) that lie
# within distance r from the random seeds
fd_result = np.zeros((n_bins - 1, 2))
fd_result[:, 0] = buckets[1:]
fd_result[:, 1] = np.sum(np.less.outer(distances, buckets[1:]), axis=0)
# We compute the slope of the log-log plot at the middle y value
# which is stored in y_val; the linear regression fit is computed on
# the part of the plot that corresponds to an interval around y_val
# whose size is 2*scale*(total width of the y axis)
max_y = np.log(num_images * num_fd_seeds)
min_y = np.log(num_fd_seeds)
x = np.log(fd_result[:, 0])
y = np.log(fd_result[:, 1])
y_width = max_y - min_y
y_val = min_y + 0.5 * y_width
start = np.argmax(y > y_val - scale * y_width)
end = np.argmax(y > y_val + scale * y_width)
slope = np.linalg.lstsq(
a=np.vstack([x[start:end], np.ones(end - start)]).transpose(),
b=y[start:end].reshape(end - start, 1))[0][0][0]
return slope
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from jcollectd import JCollectdCollector, sanitize_word
###############################################################################
class TestJCollectdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('JCollectdCollector', {
})
self.collector = JCollectdCollector(config, None)
def test_import(self):
self.assertTrue(JCollectdCollector)
def test_sanitize(self):
self.assertEqual(sanitize_word('bla'), 'bla')
self.assertEqual(sanitize_word('bla:'), 'bla')
self.assertEqual(sanitize_word('foo:bar'), 'foo_bar')
self.assertEqual(sanitize_word('foo:!bar'), 'foo_bar')
self.assertEqual(sanitize_word('"ou812"'), 'ou812')
self.assertEqual(sanitize_word('Aap! N@@t mi_es'), 'Aap_N_t_mi_es')
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from datetime import timedelta
import functools as ft
import logging
from typing import Optional
import voluptuous as vol
from homeassistant.const import (
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
DOMAIN = "fan"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# Bitfield of features supported by the fan entity
SUPPORT_SET_SPEED = 1
SUPPORT_OSCILLATE = 2
SUPPORT_DIRECTION = 4
SERVICE_SET_SPEED = "set_speed"
SERVICE_OSCILLATE = "oscillate"
SERVICE_SET_DIRECTION = "set_direction"
SPEED_OFF = "off"
SPEED_LOW = "low"
SPEED_MEDIUM = "medium"
SPEED_HIGH = "high"
DIRECTION_FORWARD = "forward"
DIRECTION_REVERSE = "reverse"
ATTR_SPEED = "speed"
ATTR_SPEED_LIST = "speed_list"
ATTR_OSCILLATING = "oscillating"
ATTR_DIRECTION = "direction"
@bind_hass
def is_on(hass, entity_id: str) -> bool:
"""Return if the fans are on based on the statemachine."""
state = hass.states.get(entity_id)
if ATTR_SPEED in state.attributes:
return state.attributes[ATTR_SPEED] not in [SPEED_OFF, None]
return state.state == STATE_ON
async def async_setup(hass, config: dict):
"""Expose fan control via statemachine and services."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(
SERVICE_TURN_ON, {vol.Optional(ATTR_SPEED): cv.string}, "async_turn_on"
)
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
component.async_register_entity_service(
SERVICE_SET_SPEED,
{vol.Required(ATTR_SPEED): cv.string},
"async_set_speed",
[SUPPORT_SET_SPEED],
)
component.async_register_entity_service(
SERVICE_OSCILLATE,
{vol.Required(ATTR_OSCILLATING): cv.boolean},
"async_oscillate",
[SUPPORT_OSCILLATE],
)
component.async_register_entity_service(
SERVICE_SET_DIRECTION,
{vol.Optional(ATTR_DIRECTION): cv.string},
"async_set_direction",
[SUPPORT_DIRECTION],
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class FanEntity(ToggleEntity):
"""Representation of a fan."""
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
raise NotImplementedError()
async def async_set_speed(self, speed: str):
"""Set the speed of the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
else:
await self.hass.async_add_executor_job(self.set_speed, speed)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
raise NotImplementedError()
async def async_set_direction(self, direction: str):
"""Set the direction of the fan."""
await self.hass.async_add_executor_job(self.set_direction, direction)
# pylint: disable=arguments-differ
def turn_on(self, speed: Optional[str] = None, **kwargs) -> None:
"""Turn on the fan."""
raise NotImplementedError()
# pylint: disable=arguments-differ
async def async_turn_on(self, speed: Optional[str] = None, **kwargs):
"""Turn on the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
else:
await self.hass.async_add_executor_job(
ft.partial(self.turn_on, speed, **kwargs)
)
def oscillate(self, oscillating: bool) -> None:
"""Oscillate the fan."""
async def async_oscillate(self, oscillating: bool):
"""Oscillate the fan."""
await self.hass.async_add_executor_job(self.oscillate, oscillating)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.speed not in [SPEED_OFF, None]
@property
def speed(self) -> Optional[str]:
"""Return the current speed."""
return None
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return []
@property
def current_direction(self) -> Optional[str]:
"""Return the current direction of the fan."""
return None
@property
def oscillating(self):
"""Return whether or not the fan is currently oscillating."""
return None
@property
def capability_attributes(self):
"""Return capability attributes."""
if self.supported_features & SUPPORT_SET_SPEED:
return {ATTR_SPEED_LIST: self.speed_list}
return {}
@property
def state_attributes(self) -> dict:
"""Return optional state attributes."""
data = {}
supported_features = self.supported_features
if supported_features & SUPPORT_DIRECTION:
data[ATTR_DIRECTION] = self.current_direction
if supported_features & SUPPORT_OSCILLATE:
data[ATTR_OSCILLATING] = self.oscillating
if supported_features & SUPPORT_SET_SPEED:
data[ATTR_SPEED] = self.speed
return data
@property
def supported_features(self) -> int:
"""Flag supported features."""
return 0
|
import argparse
import os
import re
import yaml
def replace(s, values):
s = re.sub(
r"<%(.*?)%>",
lambda x: values.get(
x.group(0).replace("<%", "").replace("%>", ""), x.group(0)
),
s,
)
return re.sub(
r"\$\((.*?)\)",
lambda x: os.environ.get(
x.group(0).replace("$(", "").replace(")", ""), x.group(0)
),
s,
)
def render_file(src, dst, values):
basename = os.path.basename(src)
new_name = replace(basename, values)
with open(f"{dst}/{new_name}", "w") as new:
with open(f"{src}", "r") as old:
new.write(replace(old.read(), values))
def render(src, dst, values={}, exclude={}):
if os.path.isfile(src):
render_file(src, dst, values)
return
for f in os.scandir(src):
if f.name.startswith(".") or f.path in exclude:
continue
if os.path.isfile(f.path):
render_file(f.path, dst, values)
else:
new_dst = replace(f"{dst}/{f.name}", values)
try:
os.mkdir(new_dst)
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
render(f.path, new_dst, values, exclude)
def parse_args():
parser = argparse.ArgumentParser(
description="Replaces all <%%> in all files in src with values provided, and writes the results to dst folder. $() is reserved for environment variables. File/dir that starts with . are ignored"
)
parser.add_argument(
"-s",
"--src",
type=str,
dest="src",
required=True,
help="src can be either a valid folder of directory. Note that src directory itself is not rendered. .* files/dirs are ignored.",
)
parser.add_argument(
"-d",
"--dst",
type=str,
dest="dst",
required=True,
help="Dst needs to be a directory.",
)
parser.add_argument(
"-v",
"--values",
type=str,
dest="values",
default=None,
help="values need to be valid file if provided",
)
args = parser.parse_args()
return args
def main():
args = parse_args()
src = os.path.abspath(args.src)
dst = os.path.abspath(args.dst)
values = args.values
if values is not None:
values = os.path.abspath(values)
# Validate src and values. Dst needs to be a directory. src can be either a valid folder of directory. values need to be valid file if provided.
if not os.path.exists(src):
raise Exception("src path is invalid")
if not os.path.exists(dst) or not os.path.isdir(dst):
raise Exception("dst path is invalid")
if values and (not os.path.exists(values) or not os.path.isfile(values)):
raise Exception("values path is invalid")
# Lookup for values.yaml in src folder if values is not provided
if os.path.isdir(src) and values is None and os.path.exists(f"{src}/values.yaml"):
values = f"{src}/values.yaml"
config_dict = {}
if values is not None:
with open(values) as f:
config_dict = yaml.safe_load(f)
# Replace environment variables in values.yaml with environment variables
for k, v in config_dict.items():
config_dict[k] = re.sub(
r"\$\((.*?)\)",
lambda x: os.environ.get(
x.group(0).replace("$(", "").replace(")", ""), x.group(0)
),
v,
)
render(src, dst, config_dict, {values})
if __name__ == "__main__":
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import scipy.misc
import tensorflow as tf
def check_folder(log_dir):
if not tf.gfile.IsDirectory(log_dir):
tf.gfile.MakeDirs(log_dir)
return log_dir
def save_images(images, image_path):
with tf.gfile.Open(image_path, "wb") as f:
scipy.misc.imsave(f, images * 255.0)
def rotate_images(images, rot90_scalars=(0, 1, 2, 3)):
"""Return the input image and its 90, 180, and 270 degree rotations."""
images_rotated = [
images, # 0 degree
tf.image.flip_up_down(tf.image.transpose_image(images)), # 90 degrees
tf.image.flip_left_right(tf.image.flip_up_down(images)), # 180 degrees
tf.image.transpose_image(tf.image.flip_up_down(images)) # 270 degrees
]
results = tf.stack([images_rotated[i] for i in rot90_scalars])
results = tf.reshape(results,
[-1] + images.get_shape().as_list()[1:])
return results
def gaussian(batch_size, n_dim, mean=0., var=1.):
return np.random.normal(mean, var, (batch_size, n_dim)).astype(np.float32)
|
import unittest
import numpy as np
import PIL
import chainer
from chainer import testing
from chainercv.transforms import resize
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
@testing.parameterize(*testing.product({
'interpolation': [PIL.Image.NEAREST, PIL.Image.BILINEAR,
PIL.Image.BICUBIC, PIL.Image.LANCZOS],
'backend': ['cv2', 'PIL', None],
}))
class TestResize(unittest.TestCase):
def test_resize_color(self):
if self.backend == 'cv2' and not _cv2_available:
return
img = np.random.uniform(size=(3, 24, 32))
with chainer.using_config('cv_resize_backend', self.backend):
out = resize(img, size=(32, 64), interpolation=self.interpolation)
self.assertEqual(out.shape, (3, 32, 64))
def test_resize_grayscale(self):
if self.backend == 'cv2' and not _cv2_available:
return
img = np.random.uniform(size=(1, 24, 32))
with chainer.using_config('cv_resize_backend', self.backend):
out = resize(img, size=(32, 64), interpolation=self.interpolation)
self.assertEqual(out.shape, (1, 32, 64))
def test_zero_length_img(self):
if self.backend == 'cv2' and not _cv2_available:
return
img = np.random.uniform(size=(0, 24, 32))
with chainer.using_config('cv_resize_backend', self.backend):
out = resize(img, size=(32, 64), interpolation=self.interpolation)
self.assertEqual(out.shape, (0, 32, 64))
class TestResizeWithInvalidConfig(unittest.TestCase):
def test_invalid_backend(self):
img = np.random.uniform(size=(3, 24, 32))
with chainer.using_config('cv_resize_backend', 'PII'):
with self.assertRaises(ValueError):
resize(img, size=(32, 64))
@unittest.skipUnless(not _cv2_available, 'cv2 is installed')
class TestResizeRaiseErrorWithCv2(unittest.TestCase):
def test_resize_raise_error_with_cv2(self):
img = np.random.uniform(size=(3, 24, 32))
with chainer.using_config('cv_resize_backend', 'cv2'):
with self.assertRaises(ValueError):
resize(img, size=(32, 64))
testing.run_module(__name__, __file__)
|
from gi.repository import Gtk
def tree_path_as_tuple(path):
"""Get the path indices as a tuple
This helper only exists because we often want to use tree paths
as set members or dictionary keys, and this is a convenient option.
"""
return tuple(path.get_indices())
def tree_path_prev(path):
if not path or path[-1] == 0:
return None
return path[:-1] + [path[-1] - 1]
def tree_path_up(path):
if not path:
return None
return path[:-1]
def valid_path(model, path):
try:
model.get_iter(path)
return True
except ValueError:
return False
def refocus_deleted_path(model, path):
# Since the passed path has been deleted, either the path is now a
# valid successor, or there are no successors. If valid, return it.
# If not, and the path has a predecessor sibling (immediate or
# otherwise), then return that. If there are no siblings, traverse
# parents until we get a valid path, and return that.
if valid_path(model, path):
return path
new_path = tree_path_prev(path)
while new_path:
if valid_path(model, new_path):
return new_path
new_path = tree_path_prev(new_path)
new_path = tree_path_up(path)
while new_path:
if valid_path(model, new_path):
return new_path
new_path = tree_path_up(new_path)
class SearchableTreeStore(Gtk.TreeStore):
def inorder_search_down(self, it):
while it:
child = self.iter_children(it)
if child:
it = child
else:
next_it = self.iter_next(it)
if next_it:
it = next_it
else:
while True:
it = self.iter_parent(it)
if not it:
return
next_it = self.iter_next(it)
if next_it:
it = next_it
break
yield it
def inorder_search_up(self, it):
while it:
path = self.get_path(it)
if path[-1]:
path = path[:-1] + [path[-1] - 1]
it = self.get_iter(path)
while 1:
nc = self.iter_n_children(it)
if nc:
it = self.iter_nth_child(it, nc - 1)
else:
break
else:
up = self.iter_parent(it)
if up:
it = up
else:
return
yield it
def get_previous_next_paths(self, path, match_func):
prev_path, next_path = None, None
try:
start_iter = self.get_iter(path)
except ValueError:
# Invalid tree path
return None, None
for it in self.inorder_search_up(start_iter):
if match_func(it):
prev_path = self.get_path(it)
break
for it in self.inorder_search_down(start_iter):
if match_func(it):
next_path = self.get_path(it)
break
return prev_path, next_path
|
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Lock states."""
hass.states.async_set("lock.entity_locked", "locked", {})
hass.states.async_set("lock.entity_unlocked", "unlocked", {})
lock_calls = async_mock_service(hass, "lock", "lock")
unlock_calls = async_mock_service(hass, "lock", "unlock")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("lock.entity_locked", "locked"),
State("lock.entity_unlocked", "unlocked", {}),
],
)
assert len(lock_calls) == 0
assert len(unlock_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("lock.entity_locked", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(lock_calls) == 0
assert len(unlock_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("lock.entity_locked", "unlocked"),
State("lock.entity_unlocked", "locked"),
# Should not raise
State("lock.non_existing", "on"),
],
)
assert len(lock_calls) == 1
assert lock_calls[0].domain == "lock"
assert lock_calls[0].data == {"entity_id": "lock.entity_unlocked"}
assert len(unlock_calls) == 1
assert unlock_calls[0].domain == "lock"
assert unlock_calls[0].data == {"entity_id": "lock.entity_locked"}
|
import logging
from limitlessled import Color
from limitlessled.bridge import Bridge
from limitlessled.group.dimmer import DimmerGroup
from limitlessled.group.rgbw import RgbwGroup
from limitlessled.group.rgbww import RgbwwGroup
from limitlessled.group.white import WhiteGroup
from limitlessled.pipeline import Pipeline
from limitlessled.presets import COLORLOOP
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
EFFECT_COLORLOOP,
EFFECT_WHITE,
FLASH_LONG,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_TYPE, STATE_ON
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.util.color import color_hs_to_RGB, color_temperature_mired_to_kelvin
_LOGGER = logging.getLogger(__name__)
CONF_BRIDGES = "bridges"
CONF_GROUPS = "groups"
CONF_NUMBER = "number"
CONF_VERSION = "version"
CONF_FADE = "fade"
DEFAULT_LED_TYPE = "rgbw"
DEFAULT_PORT = 5987
DEFAULT_TRANSITION = 0
DEFAULT_VERSION = 6
DEFAULT_FADE = False
LED_TYPE = ["rgbw", "rgbww", "white", "bridge-led", "dimmer"]
EFFECT_NIGHT = "night"
MIN_SATURATION = 10
WHITE = [0, 0]
SUPPORT_LIMITLESSLED_WHITE = (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_TRANSITION
)
SUPPORT_LIMITLESSLED_DIMMER = SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
SUPPORT_LIMITLESSLED_RGB = (
SUPPORT_BRIGHTNESS
| SUPPORT_EFFECT
| SUPPORT_FLASH
| SUPPORT_COLOR
| SUPPORT_TRANSITION
)
SUPPORT_LIMITLESSLED_RGBWW = (
SUPPORT_BRIGHTNESS
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_FLASH
| SUPPORT_COLOR
| SUPPORT_TRANSITION
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_BRIDGES): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(
CONF_VERSION, default=DEFAULT_VERSION
): cv.positive_int,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Required(CONF_GROUPS): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(
CONF_TYPE, default=DEFAULT_LED_TYPE
): vol.In(LED_TYPE),
vol.Required(CONF_NUMBER): cv.positive_int,
vol.Optional(
CONF_FADE, default=DEFAULT_FADE
): cv.boolean,
}
],
),
}
],
)
}
)
def rewrite_legacy(config):
"""Rewrite legacy configuration to new format."""
bridges = config.get(CONF_BRIDGES, [config])
new_bridges = []
for bridge_conf in bridges:
groups = []
if "groups" in bridge_conf:
groups = bridge_conf["groups"]
else:
_LOGGER.warning("Legacy configuration format detected")
for i in range(1, 5):
name_key = "group_%d_name" % i
if name_key in bridge_conf:
groups.append(
{
"number": i,
"type": bridge_conf.get(
"group_%d_type" % i, DEFAULT_LED_TYPE
),
"name": bridge_conf.get(name_key),
}
)
new_bridges.append(
{
"host": bridge_conf.get(CONF_HOST),
"version": bridge_conf.get(CONF_VERSION),
"port": bridge_conf.get(CONF_PORT),
"groups": groups,
}
)
return {"bridges": new_bridges}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the LimitlessLED lights."""
# Two legacy configuration formats are supported to maintain backwards
# compatibility.
config = rewrite_legacy(config)
# Use the expanded configuration format.
lights = []
for bridge_conf in config.get(CONF_BRIDGES):
bridge = Bridge(
bridge_conf.get(CONF_HOST),
port=bridge_conf.get(CONF_PORT, DEFAULT_PORT),
version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION),
)
for group_conf in bridge_conf.get(CONF_GROUPS):
group = bridge.add_group(
group_conf.get(CONF_NUMBER),
group_conf.get(CONF_NAME),
group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE),
)
lights.append(LimitlessLEDGroup(group, {"fade": group_conf[CONF_FADE]}))
add_entities(lights)
def state(new_state):
"""State decorator.
Specify True (turn on) or False (turn off).
"""
def decorator(function):
"""Set up the decorator function."""
def wrapper(self, **kwargs):
"""Wrap a group state change."""
pipeline = Pipeline()
transition_time = DEFAULT_TRANSITION
if self._effect == EFFECT_COLORLOOP:
self.group.stop()
self._effect = None
# Set transition time.
if ATTR_TRANSITION in kwargs:
transition_time = int(kwargs[ATTR_TRANSITION])
# Do group type-specific work.
function(self, transition_time, pipeline, **kwargs)
# Update state.
self._is_on = new_state
self.group.enqueue(pipeline)
self.schedule_update_ha_state()
return wrapper
return decorator
class LimitlessLEDGroup(LightEntity, RestoreEntity):
"""Representation of a LimitessLED group."""
def __init__(self, group, config):
"""Initialize a group."""
if isinstance(group, WhiteGroup):
self._supported = SUPPORT_LIMITLESSLED_WHITE
self._effect_list = [EFFECT_NIGHT]
elif isinstance(group, DimmerGroup):
self._supported = SUPPORT_LIMITLESSLED_DIMMER
self._effect_list = []
elif isinstance(group, RgbwGroup):
self._supported = SUPPORT_LIMITLESSLED_RGB
self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE]
elif isinstance(group, RgbwwGroup):
self._supported = SUPPORT_LIMITLESSLED_RGBWW
self._effect_list = [EFFECT_COLORLOOP, EFFECT_NIGHT, EFFECT_WHITE]
self.group = group
self.config = config
self._is_on = False
self._brightness = None
self._temperature = None
self._color = None
self._effect = None
async def async_added_to_hass(self):
"""Handle entity about to be added to hass event."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if last_state:
self._is_on = last_state.state == STATE_ON
self._brightness = last_state.attributes.get("brightness")
self._temperature = last_state.attributes.get("color_temp")
self._color = last_state.attributes.get("hs_color")
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def assumed_state(self):
"""Return True because unable to access real state of the entity."""
return True
@property
def name(self):
"""Return the name of the group."""
return self.group.name
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
@property
def brightness(self):
"""Return the brightness property."""
if self._effect == EFFECT_NIGHT:
return 1
return self._brightness
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return 154
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return 370
@property
def color_temp(self):
"""Return the temperature property."""
if self.hs_color is not None:
return None
return self._temperature
@property
def hs_color(self):
"""Return the color property."""
if self._effect == EFFECT_NIGHT:
return None
if self._color is None or self._color[1] == 0:
return None
return self._color
@property
def supported_features(self):
"""Flag supported features."""
return self._supported
@property
def effect(self):
"""Return the current effect for this light."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects for this light."""
return self._effect_list
# pylint: disable=arguments-differ
@state(False)
def turn_off(self, transition_time, pipeline, **kwargs):
"""Turn off a group."""
if self.config[CONF_FADE]:
pipeline.transition(transition_time, brightness=0.0)
pipeline.off()
# pylint: disable=arguments-differ
@state(True)
def turn_on(self, transition_time, pipeline, **kwargs):
"""Turn on (or adjust property of) a group."""
# The night effect does not need a turned on light
if kwargs.get(ATTR_EFFECT) == EFFECT_NIGHT:
if EFFECT_NIGHT in self._effect_list:
pipeline.night_light()
self._effect = EFFECT_NIGHT
return
pipeline.on()
# Set up transition.
args = {}
if self.config[CONF_FADE] and not self.is_on and self._brightness:
args["brightness"] = self.limitlessled_brightness()
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
args["brightness"] = self.limitlessled_brightness()
if ATTR_HS_COLOR in kwargs and self._supported & SUPPORT_COLOR:
self._color = kwargs[ATTR_HS_COLOR]
# White is a special case.
if self._color[1] < MIN_SATURATION:
pipeline.white()
self._color = WHITE
else:
args["color"] = self.limitlessled_color()
if ATTR_COLOR_TEMP in kwargs:
if self._supported & SUPPORT_COLOR:
pipeline.white()
self._color = WHITE
if self._supported & SUPPORT_COLOR_TEMP:
self._temperature = kwargs[ATTR_COLOR_TEMP]
args["temperature"] = self.limitlessled_temperature()
if args:
pipeline.transition(transition_time, **args)
# Flash.
if ATTR_FLASH in kwargs and self._supported & SUPPORT_FLASH:
duration = 0
if kwargs[ATTR_FLASH] == FLASH_LONG:
duration = 1
pipeline.flash(duration=duration)
# Add effects.
if ATTR_EFFECT in kwargs and self._effect_list:
if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP:
self._effect = EFFECT_COLORLOOP
pipeline.append(COLORLOOP)
if kwargs[ATTR_EFFECT] == EFFECT_WHITE:
pipeline.white()
self._color = WHITE
def limitlessled_temperature(self):
"""Convert Home Assistant color temperature units to percentage."""
max_kelvin = color_temperature_mired_to_kelvin(self.min_mireds)
min_kelvin = color_temperature_mired_to_kelvin(self.max_mireds)
width = max_kelvin - min_kelvin
kelvin = color_temperature_mired_to_kelvin(self._temperature)
temperature = (kelvin - min_kelvin) / width
return max(0, min(1, temperature))
def limitlessled_brightness(self):
"""Convert Home Assistant brightness units to percentage."""
return self._brightness / 255
def limitlessled_color(self):
"""Convert Home Assistant HS list to RGB Color tuple."""
return Color(*color_hs_to_RGB(*tuple(self._color)))
|
import logging
import W800rf32 as w800
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_DEVICES, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, event as evt
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import dt as dt_util
from . import W800RF32_DEVICE
_LOGGER = logging.getLogger(__name__)
CONF_OFF_DELAY = "off_delay"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICES): {
cv.string: vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_OFF_DELAY): vol.All(
cv.time_period, cv.positive_timedelta
),
}
)
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Binary Sensor platform to w800rf32."""
binary_sensors = []
# device_id --> "c1 or a3" X10 device. entity (type dictionary)
# --> name, device_class etc
for device_id, entity in config[CONF_DEVICES].items():
_LOGGER.debug(
"Add %s w800rf32.binary_sensor (class %s)",
entity[CONF_NAME],
entity.get(CONF_DEVICE_CLASS),
)
device = W800rf32BinarySensor(
device_id,
entity.get(CONF_NAME),
entity.get(CONF_DEVICE_CLASS),
entity.get(CONF_OFF_DELAY),
)
binary_sensors.append(device)
add_entities(binary_sensors)
class W800rf32BinarySensor(BinarySensorEntity):
"""A representation of a w800rf32 binary sensor."""
def __init__(self, device_id, name, device_class=None, off_delay=None):
"""Initialize the w800rf32 sensor."""
self._signal = W800RF32_DEVICE.format(device_id)
self._name = name
self._device_class = device_class
self._off_delay = off_delay
self._state = False
self._delay_listener = None
@callback
def _off_delay_listener(self, now):
"""Switch device off after a delay."""
self._delay_listener = None
self.update_state(False)
@property
def name(self):
"""Return the device name."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_class(self):
"""Return the sensor class."""
return self._device_class
@property
def is_on(self):
"""Return true if the sensor state is True."""
return self._state
@callback
def binary_sensor_update(self, event):
"""Call for control updates from the w800rf32 gateway."""
if not isinstance(event, w800.W800rf32Event):
return
dev_id = event.device
command = event.command
_LOGGER.debug(
"BinarySensor update (Device ID: %s Command %s ...)", dev_id, command
)
# Update the w800rf32 device state
if command in ("On", "Off"):
is_on = command == "On"
self.update_state(is_on)
if self.is_on and self._off_delay is not None and self._delay_listener is None:
self._delay_listener = evt.async_track_point_in_time(
self.hass, self._off_delay_listener, dt_util.utcnow() + self._off_delay
)
def update_state(self, state):
"""Update the state of the device."""
self._state = state
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Register update callback."""
async_dispatcher_connect(self.hass, self._signal, self.binary_sensor_update)
|
from cerberus import Validator, errors
from cerberus.tests import assert_fail, assert_has_error, assert_normalized
def test_coerce():
assert_normalized(
schema={'amount': {'coerce': int}},
document={'amount': '1'},
expected={'amount': 1},
)
def test_coerce_chain():
drop_prefix = lambda x: x[2:] # noqa: E731
upper = lambda x: x.upper() # noqa: E731
assert_normalized(
schema={'foo': {'coerce': [hex, drop_prefix, upper]}},
document={'foo': 15},
expected={'foo': 'F'},
)
def test_coerce_chain_aborts(validator):
def dont_do_me(value):
raise AssertionError('The coercion chain did not abort after an error.')
validator({'foo': '0'}, schema={'foo': {'coerce': [hex, dont_do_me]}})
assert errors.COERCION_FAILED in validator._errors
def test_coerce_does_not_input_data():
validator = Validator({'amount': {'coerce': int}})
document = {'amount': '1'}
validator.validate(document)
assert validator.document is not document
def test_coerce_in_allow_unknown():
assert_normalized(
schema={'foo': {'schema': {}, 'allow_unknown': {'coerce': int}}},
document={'foo': {'bar': '0'}},
expected={'foo': {'bar': 0}},
)
def test_coerce_in_items():
schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}}
document = {'things': ['1', 2]}
expected = {'things': [1, '2']}
assert_normalized(document, expected, schema)
validator = Validator(schema)
document['things'].append(3)
assert not validator(document)
assert validator.document['things'] == document['things']
def test_coercion_of_sequence_items_with_float_values(validator):
# https://github.com/pyeve/cerberus/issues/161
assert_normalized(
schema={
'a_list': {'type': 'list', 'itemsrules': {'type': 'float', 'coerce': float}}
},
document={'a_list': [3, 4, 5]},
expected={'a_list': [3.0, 4.0, 5.0]},
validator=validator,
)
def test_coerce_in_itemsrules_with_integer_values():
assert_normalized(
schema={'things': {'type': 'list', 'itemsrules': {'coerce': int}}},
document={'things': ['1', '2', '3']},
expected={'things': [1, 2, 3]},
)
def test_coerce_in_itemsrules_fails(validator):
# https://github.com/pyeve/cerberus/issues/211
schema = {
'data': {'type': 'list', 'itemsrules': {'type': 'integer', 'coerce': int}}
}
document = {'data': ['q']}
assert validator.validated(document, schema) is None
assert (
validator.validated(document, schema, always_return_document=True) == document
) # noqa: W503
def test_coerce_in_keysrules():
# https://github.com/pyeve/cerberus/issues/155
assert_normalized(
schema={
'thing': {'type': 'dict', 'keysrules': {'coerce': int, 'type': 'integer'}}
},
document={'thing': {'5': 'foo'}},
expected={'thing': {5: 'foo'}},
)
def test_coerce_in_schema():
assert_normalized(
schema={'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}},
document={'thing': {'amount': '2'}},
expected={'thing': {'amount': 2}},
)
def test_coerce_in_schema_in_itemsrules():
assert_normalized(
schema={
'things': {
'type': 'list',
'itemsrules': {'type': 'dict', 'schema': {'amount': {'coerce': int}}},
}
},
document={'things': [{'amount': '2'}]},
expected={'things': [{'amount': 2}]},
)
def test_coerce_in_valuesrules():
# https://github.com/pyeve/cerberus/issues/155
assert_normalized(
schema={
'thing': {'type': 'dict', 'valuesrules': {'coerce': int, 'type': 'integer'}}
},
document={'thing': {'amount': '2'}},
expected={'thing': {'amount': 2}},
)
def test_coerce_catches_ValueError():
schema = {'amount': {'coerce': int}}
_errors = assert_fail({'amount': 'not_a_number'}, schema)
_errors[0].info = () # ignore exception message here
assert_has_error(
_errors, 'amount', ('amount', 'coerce'), errors.COERCION_FAILED, int
)
def test_coerce_in_listitems_catches_ValueError():
schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}}
document = {'things': ['not_a_number', 2]}
_errors = assert_fail(document, schema)
_errors[0].info = () # ignore exception message here
assert_has_error(
_errors,
('things', 0),
('things', 'items', 'coerce'),
errors.COERCION_FAILED,
int,
)
def test_coerce_catches_TypeError():
schema = {'name': {'coerce': str.lower}}
_errors = assert_fail({'name': 1234}, schema)
_errors[0].info = () # ignore exception message here
assert_has_error(
_errors, 'name', ('name', 'coerce'), errors.COERCION_FAILED, str.lower
)
def test_coerce_in_listitems_catches_TypeError():
schema = {
'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str.lower}]}
}
document = {'things': ['1', 2]}
_errors = assert_fail(document, schema)
_errors[0].info = () # ignore exception message here
assert_has_error(
_errors,
('things', 1),
('things', 'items', 'coerce'),
errors.COERCION_FAILED,
str.lower,
)
def test_custom_coerce_and_rename():
class MyNormalizer(Validator):
def __init__(self, multiplier, *args, **kwargs):
super().__init__(*args, **kwargs)
self.multiplier = multiplier
def _normalize_coerce_multiply(self, value):
return value * self.multiplier
v = MyNormalizer(2, {'foo': {'coerce': 'multiply'}})
assert v.normalized({'foo': 2})['foo'] == 4
v = MyNormalizer(3, allow_unknown={'rename_handler': 'multiply'})
assert v.normalized({3: None}) == {9: None}
|
import unittest
from kalliope.core.NeuronParameterLoader import NeuronParameterLoader
class TestNeuronParameterLoader(unittest.TestCase):
def test_get_parameters(self):
synapse_order = "this is the {{ sentence }}"
user_order = "this is the value"
expected_result = {'sentence': 'value'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result,
"Fail to retrieve 'the params' of the synapse_order from the order")
# Multiple match
synapse_order = "this is the {{ sentence }}"
user_order = "this is the value with multiple words"
expected_result = {'sentence': 'value with multiple words'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result,
"Fail to retrieve the 'multiple words params' of the synapse_order from the order")
# Multiple params
synapse_order = "this is the {{ sentence }} with multiple {{ params }}"
user_order = "this is the value with multiple words"
expected_result = {'sentence': 'value',
'params': 'words'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result,
"Fail to retrieve the 'multiple params' of the synapse_order from the order")
# Multiple params with multiple words
synapse_order = "this is the {{ sentence }} with multiple {{ params }}"
user_order = "this is the multiple values with multiple values as words"
expected_result = {'sentence': 'multiple values',
'params': 'values as words'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result)
# params at the begining of the sentence
synapse_order = "{{ sentence }} this is the sentence"
user_order = "hello world this is the multiple values with multiple values as words"
expected_result = {'sentence': 'hello world'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result)
# all of the sentence is a variable
synapse_order = "{{ sentence }}"
user_order = "this is the all sentence is a variable"
expected_result = {'sentence': 'this is the all sentence is a variable'}
self.assertEqual(NeuronParameterLoader.get_parameters(synapse_order=synapse_order, user_order=user_order),
expected_result)
def test_associate_order_params_to_values(self):
##
# Testing the brackets position behaviour
##
# Success
order_brain = "This is the {{ variable }}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Success
order_brain = "This is the {{variable }}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Success
order_brain = "This is the {{ variable}}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Success
order_brain = "This is the {{variable}}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Fail
order_brain = "This is the {variable}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertNotEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Fail
order_brain = "This is the { variable}}"
order_user = "This is the value"
expected_result = {'variable': 'value'}
self.assertNotEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
##
# Testing the brackets position in the sentence
##
# Success
order_brain = "{{ variable }} This is the"
order_user = "value This is the"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Success
order_brain = "This is {{ variable }} the"
order_user = " This is value the"
expected_result = {'variable': 'value'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
##
# Testing multi variables
##
# Success
order_brain = "This is {{ variable }} the {{ variable2 }}"
order_user = "This is value the value2"
expected_result = {'variable': 'value',
'variable2': 'value2'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
##
# Testing multi words in variable
##
# Success
order_brain = "This is the {{ variable }}"
order_user = "This is the value with multiple words"
expected_result = {'variable': 'value with multiple words'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Success
order_brain = "This is the {{ variable }} and {{ variable2 }}"
order_user = "This is the value with multiple words and second value multiple"
expected_result = {'variable': 'value with multiple words',
'variable2': 'second value multiple'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
##
# Specific Behaviour
##
# Upper/Lower case
order_brain = "This Is The {{ variable }}"
order_user = "ThiS is tHe VAlue"
expected_result = {'variable': 'VAlue'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Upper/Lower case between multiple variables
order_brain = "This Is The {{ variable }} And The {{ variable2 }}"
order_user = "ThiS is tHe VAlue aND tHE vAlUe2"
expected_result = {'variable': 'VAlue',
'variable2': 'vAlUe2'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Upper/Lower case between multiple variables and at the End
order_brain = "This Is The {{ variable }} And The {{ variable2 }} And Again"
order_user = "ThiS is tHe VAlue aND tHE vAlUe2 and aGAIN"
expected_result = {'variable': 'VAlue',
'variable2': 'vAlUe2'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# integers variables
order_brain = "This Is The {{ variable }} And The {{ variable2 }}"
order_user = "ThiS is tHe 1 aND tHE 2"
expected_result = {'variable': '1',
'variable2': '2'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# ##
# # More words in the order brain.
# # /!\ Not working but not needed !
# ##
#
# # more words in the middle of order but matching
# order_brain = "this is the {{ variable }} and the {{ variable2 }}"
# order_user = "this the foo and the bar" # missing "is" but matching because all words are present !
# expected_result = {'variable': 'foo',
# 'variable2': 'bar'}
# self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
# expected_result)
#
# # more words in the beginning of order but matching + bonus with mixed uppercases
# order_brain = "blaBlabla bla This Is The {{ variable }} And The {{ variable2 }}"
# order_user = "ThiS is tHe foo aND tHE bar"
# expected_result = {'variable': 'foo',
# 'variable2': 'bar'}
# self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
# expected_result)
#
# # more words in the end of order but matching + bonus with mixed uppercases
# order_brain = "This Is The bla BLa bla BLa {{ variable }} And The {{ variable2 }}"
# order_user = "ThiS is tHe foo aND tHE bar"
# expected_result = {'variable': 'foo',
# 'variable2': 'bar'}
# self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
# expected_result)
#
# # complex more words in the end of order but matching + bonus with mixed uppercases
# order_brain = "Hi theRe This Is bla BLa The bla BLa {{ variable }} And The {{ variable2 }}"
# order_user = "ThiS is tHe foo aND tHE bar"
# expected_result = {'variable': 'foo',
# 'variable2': 'bar'}
# self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
# expected_result)
#
# # complex more words everywhere in the order but matching + bonus with mixed uppercases
# order_brain = "Hi theRe This Is bla BLa The bla BLa {{ variable }} And Oops The {{ variable2 }} Oopssss"
# order_user = "ThiS is tHe foo aND tHE bar"
# expected_result = {'variable': 'foo',
# 'variable2': 'bar'}
# self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
# expected_result)
#
##
# More words in the user order brain
##
# 1 not matching word in the middle of user order but matching
order_brain = "this the {{ variable }} and the {{ variable2 }}"
order_user = "this is the foo and the bar" # adding "is" but matching because all words are present !
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# 2 not matching words in the middle of user order but matching
order_brain = "this the {{ variable }} and the {{ variable2 }}"
order_user = "this is Fake the foo and the bar"
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# 1 not matching word at the beginning and 1 not matching word in the middle of user order but matching
order_brain = "this the {{ variable }} and the {{ variable2 }}"
order_user = "Oops this is the foo and the bar"
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# 2 not matching words at the beginning and 2 not matching words in the middle of user order but matching
order_brain = "this the {{ variable }} and the {{ variable2 }}"
order_user = "Oops Oops this is BlaBla the foo and the bar"
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Adding complex not matching words in the middle of user order and between variable but matching
order_brain = "this the {{ variable }} and the {{ variable2 }}"
order_user = "Oops Oops this is BlaBla the foo and ploup ploup the bar"
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
# Adding complex not matching words in the middle of user order and between variable and at the end but matching
order_brain = "this the {{ variable }} and the {{ variable2 }} hello"
order_user = "Oops Oops this is BlaBla the foo and ploup ploup the bar hello test"
expected_result = {'variable': 'foo',
'variable2': 'bar'}
self.assertEqual(NeuronParameterLoader._associate_order_params_to_values(order_user, order_brain),
expected_result)
if __name__ == '__main__':
unittest.main()
|
from shop.conf import app_settings
from shop.models.customer import CustomerModel, VisitingCustomer
def customer(request):
"""
Add the customer to the RequestContext
"""
msg = "The request object does not contain a customer. Edit your MIDDLEWARE_CLASSES setting to insert 'shop.middlerware.CustomerMiddleware'."
assert hasattr(request, 'customer'), msg
customer = request.customer
if request.user.is_staff:
try:
customer = CustomerModel.objects.get(pk=request.session['emulate_user_id'])
except CustomerModel.DoesNotExist:
customer = VisitingCustomer()
except (AttributeError, KeyError):
pass
return {'customer': customer}
def shop_settings(request):
"""
Add configuration settings to the context to customize the shop's settings in templates
"""
from rest_auth.app_settings import LoginSerializer
return {
'site_header': app_settings.APP_LABEL.capitalize(),
'EDITCART_NG_MODEL_OPTIONS': app_settings.EDITCART_NG_MODEL_OPTIONS,
'ADD2CART_NG_MODEL_OPTIONS': app_settings.ADD2CART_NG_MODEL_OPTIONS,
'ALLOW_SHORT_SESSIONS': 'stay_logged_in' in LoginSerializer().fields,
'LINK_TO_EMPTY_CART': app_settings.LINK_TO_EMPTY_CART,
}
|
from datetime import timedelta
import logging
import pytest
from homeassistant.components.timer import (
ATTR_DURATION,
CONF_DURATION,
CONF_ICON,
CONF_NAME,
DEFAULT_DURATION,
DOMAIN,
EVENT_TIMER_CANCELLED,
EVENT_TIMER_FINISHED,
EVENT_TIMER_PAUSED,
EVENT_TIMER_RESTARTED,
EVENT_TIMER_STARTED,
SERVICE_CANCEL,
SERVICE_FINISH,
SERVICE_PAUSE,
SERVICE_START,
STATUS_ACTIVE,
STATUS_IDLE,
STATUS_PAUSED,
_format_timedelta,
)
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_ID,
ATTR_NAME,
CONF_ENTITY_ID,
EVENT_STATE_CHANGED,
SERVICE_RELOAD,
)
from homeassistant.core import Context, CoreState
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed
_LOGGER = logging.getLogger(__name__)
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
ATTR_ID: "from_storage",
ATTR_NAME: "timer from storage",
ATTR_DURATION: "0:00:00",
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
async def test_config(hass):
"""Test config."""
invalid_configs = [None, 1, {}, {"name with space": None}]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_config_options(hass):
"""Test configuration options."""
count_start = len(hass.states.async_entity_ids())
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
config = {
DOMAIN: {
"test_1": {},
"test_2": {
CONF_NAME: "Hello World",
CONF_ICON: "mdi:work",
CONF_DURATION: 10,
},
"test_3": None,
}
}
assert await async_setup_component(hass, "timer", config)
await hass.async_block_till_done()
assert count_start + 3 == len(hass.states.async_entity_ids())
await hass.async_block_till_done()
state_1 = hass.states.get("timer.test_1")
state_2 = hass.states.get("timer.test_2")
state_3 = hass.states.get("timer.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is not None
assert STATUS_IDLE == state_1.state
assert ATTR_ICON not in state_1.attributes
assert ATTR_FRIENDLY_NAME not in state_1.attributes
assert STATUS_IDLE == state_2.state
assert "Hello World" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work" == state_2.attributes.get(ATTR_ICON)
assert "0:00:10" == state_2.attributes.get(ATTR_DURATION)
assert STATUS_IDLE == state_3.state
assert str(cv.time_period(DEFAULT_DURATION)) == state_3.attributes.get(
CONF_DURATION
)
async def test_methods_and_events(hass):
"""Test methods and events."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_DURATION: 10}}})
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
results = []
def fake_event_listener(event):
"""Fake event listener for trigger."""
results.append(event)
hass.bus.async_listen(EVENT_TIMER_STARTED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_RESTARTED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_PAUSED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_FINISHED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_CANCELLED, fake_event_listener)
steps = [
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_STARTED},
{"call": SERVICE_PAUSE, "state": STATUS_PAUSED, "event": EVENT_TIMER_PAUSED},
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_RESTARTED},
{"call": SERVICE_CANCEL, "state": STATUS_IDLE, "event": EVENT_TIMER_CANCELLED},
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_STARTED},
{"call": SERVICE_FINISH, "state": STATUS_IDLE, "event": EVENT_TIMER_FINISHED},
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_STARTED},
{"call": SERVICE_PAUSE, "state": STATUS_PAUSED, "event": EVENT_TIMER_PAUSED},
{"call": SERVICE_CANCEL, "state": STATUS_IDLE, "event": EVENT_TIMER_CANCELLED},
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_STARTED},
{"call": SERVICE_START, "state": STATUS_ACTIVE, "event": EVENT_TIMER_RESTARTED},
]
expectedEvents = 0
for step in steps:
if step["call"] is not None:
await hass.services.async_call(
DOMAIN, step["call"], {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
if step["state"] is not None:
assert state.state == step["state"]
if step["event"] is not None:
expectedEvents += 1
assert results[-1].event_type == step["event"]
assert len(results) == expectedEvents
async def test_wait_till_timer_expires(hass):
"""Test for a timer to end."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_DURATION: 10}}})
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
results = []
def fake_event_listener(event):
"""Fake event listener for trigger."""
results.append(event)
hass.bus.async_listen(EVENT_TIMER_STARTED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_PAUSED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_FINISHED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_CANCELLED, fake_event_listener)
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_TIMER_STARTED
assert len(results) == 1
async_fire_time_changed(hass, utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
assert results[-1].event_type == EVENT_TIMER_FINISHED
assert len(results) == 2
async def test_no_initial_state_and_no_restore_state(hass):
"""Ensure that entity is create without initial and restore feature."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_DURATION: 10}}})
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
async def test_config_reload(hass, hass_admin_user, hass_read_only_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
ent_reg = await entity_registry.async_get_registry(hass)
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
config = {
DOMAIN: {
"test_1": {},
"test_2": {
CONF_NAME: "Hello World",
CONF_ICON: "mdi:work",
CONF_DURATION: 10,
},
}
}
assert await async_setup_component(hass, "timer", config)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
await hass.async_block_till_done()
state_1 = hass.states.get("timer.test_1")
state_2 = hass.states.get("timer.test_2")
state_3 = hass.states.get("timer.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is None
assert STATUS_IDLE == state_1.state
assert ATTR_ICON not in state_1.attributes
assert ATTR_FRIENDLY_NAME not in state_1.attributes
assert STATUS_IDLE == state_2.state
assert "Hello World" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work" == state_2.attributes.get(ATTR_ICON)
assert "0:00:10" == state_2.attributes.get(ATTR_DURATION)
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: {
"test_2": {
CONF_NAME: "Hello World reloaded",
CONF_ICON: "mdi:work-reloaded",
CONF_DURATION: 20,
},
"test_3": {},
}
},
):
with pytest.raises(Unauthorized):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_read_only_user.id),
)
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("timer.test_1")
state_2 = hass.states.get("timer.test_2")
state_3 = hass.states.get("timer.test_3")
assert state_1 is None
assert state_2 is not None
assert state_3 is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is not None
assert STATUS_IDLE == state_2.state
assert "Hello World reloaded" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work-reloaded" == state_2.attributes.get(ATTR_ICON)
assert "0:00:20" == state_2.attributes.get(ATTR_DURATION)
assert STATUS_IDLE == state_3.state
assert ATTR_ICON not in state_3.attributes
assert ATTR_FRIENDLY_NAME not in state_3.attributes
async def test_timer_restarted_event(hass):
"""Ensure restarted event is called after starting a paused or running timer."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_DURATION: 10}}})
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
results = []
def fake_event_listener(event):
"""Fake event listener for trigger."""
results.append(event)
hass.bus.async_listen(EVENT_TIMER_STARTED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_RESTARTED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_PAUSED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_FINISHED, fake_event_listener)
hass.bus.async_listen(EVENT_TIMER_CANCELLED, fake_event_listener)
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_TIMER_STARTED
assert len(results) == 1
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_TIMER_RESTARTED
assert len(results) == 2
await hass.services.async_call(
DOMAIN, SERVICE_PAUSE, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_PAUSED
assert results[-1].event_type == EVENT_TIMER_PAUSED
assert len(results) == 3
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_TIMER_RESTARTED
assert len(results) == 4
async def test_state_changed_when_timer_restarted(hass):
"""Ensure timer's state changes when it restarted."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_DURATION: 10}}})
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_IDLE
results = []
def fake_event_listener(event):
"""Fake event listener for trigger."""
results.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, fake_event_listener)
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_STATE_CHANGED
assert len(results) == 1
await hass.services.async_call(
DOMAIN, SERVICE_START, {CONF_ENTITY_ID: "timer.test1"}
)
await hass.async_block_till_done()
state = hass.states.get("timer.test1")
assert state
assert state.state == STATUS_ACTIVE
assert results[-1].event_type == EVENT_STATE_CHANGED
assert len(results) == 2
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.timer_from_storage")
assert state.state == STATUS_IDLE
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "timer from storage"
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(config={DOMAIN: {"from_yaml": None}})
state = hass.states.get(f"{DOMAIN}.{DOMAIN}_from_storage")
assert state.state == STATUS_IDLE
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "timer from storage"
assert state.attributes.get(ATTR_EDITABLE)
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert not state.attributes.get(ATTR_EDITABLE)
assert state.state == STATUS_IDLE
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(config={DOMAIN: {"from_yaml": None}})
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "timer from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
timer_id = "from_storage"
timer_entity_id = f"{DOMAIN}.{DOMAIN}_{timer_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(timer_entity_id)
assert state is not None
from_reg = ent_reg.async_get_entity_id(DOMAIN, DOMAIN, timer_id)
assert from_reg == timer_entity_id
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{timer_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(timer_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, timer_id) is None
async def test_update(hass, hass_ws_client, storage_setup):
"""Test updating timer entity."""
assert await storage_setup()
timer_id = "from_storage"
timer_entity_id = f"{DOMAIN}.{DOMAIN}_{timer_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(timer_entity_id)
assert state.attributes[ATTR_FRIENDLY_NAME] == "timer from storage"
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, timer_id) == timer_entity_id
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{timer_id}",
CONF_DURATION: 33,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(timer_entity_id)
assert state.attributes[ATTR_DURATION] == _format_timedelta(cv.time_period(33))
async def test_ws_create(hass, hass_ws_client, storage_setup):
"""Test create WS."""
assert await storage_setup(items=[])
timer_id = "new_timer"
timer_entity_id = f"{DOMAIN}.{timer_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(timer_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, timer_id) is None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/create",
CONF_NAME: "New Timer",
CONF_DURATION: 42,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(timer_entity_id)
assert state.state == STATUS_IDLE
assert state.attributes[ATTR_DURATION] == _format_timedelta(cv.time_period(42))
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, timer_id) == timer_entity_id
async def test_setup_no_config(hass, hass_admin_user):
"""Test component setup with no config."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(hass, DOMAIN, {})
with patch(
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start == len(hass.states.async_entity_ids())
|
from dynalite_devices_lib.cover import DynaliteTimeCoverWithTiltDevice
import pytest
from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME
from .common import (
ATTR_ARGS,
ATTR_METHOD,
ATTR_SERVICE,
create_entity_from_device,
create_mock_device,
run_service_tests,
)
@pytest.fixture
def mock_device():
"""Mock a Dynalite device."""
mock_dev = create_mock_device("cover", DynaliteTimeCoverWithTiltDevice)
mock_dev.device_class = "blind"
return mock_dev
async def test_cover_setup(hass, mock_device):
"""Test a successful setup."""
await create_entity_from_device(hass, mock_device)
entity_state = hass.states.get("cover.name")
assert entity_state.attributes[ATTR_FRIENDLY_NAME] == mock_device.name
assert (
entity_state.attributes["current_position"]
== mock_device.current_cover_position
)
assert (
entity_state.attributes["current_tilt_position"]
== mock_device.current_cover_tilt_position
)
assert entity_state.attributes[ATTR_DEVICE_CLASS] == mock_device.device_class
await run_service_tests(
hass,
mock_device,
"cover",
[
{ATTR_SERVICE: "open_cover", ATTR_METHOD: "async_open_cover"},
{ATTR_SERVICE: "close_cover", ATTR_METHOD: "async_close_cover"},
{ATTR_SERVICE: "stop_cover", ATTR_METHOD: "async_stop_cover"},
{
ATTR_SERVICE: "set_cover_position",
ATTR_METHOD: "async_set_cover_position",
ATTR_ARGS: {"position": 50},
},
{ATTR_SERVICE: "open_cover_tilt", ATTR_METHOD: "async_open_cover_tilt"},
{ATTR_SERVICE: "close_cover_tilt", ATTR_METHOD: "async_close_cover_tilt"},
{ATTR_SERVICE: "stop_cover_tilt", ATTR_METHOD: "async_stop_cover_tilt"},
{
ATTR_SERVICE: "set_cover_tilt_position",
ATTR_METHOD: "async_set_cover_tilt_position",
ATTR_ARGS: {"tilt_position": 50},
},
],
)
async def test_cover_without_tilt(hass, mock_device):
"""Test a cover with no tilt."""
mock_device.has_tilt = False
await create_entity_from_device(hass, mock_device)
await hass.services.async_call(
"cover", "open_cover_tilt", {"entity_id": "cover.name"}, blocking=True
)
await hass.async_block_till_done()
mock_device.async_open_cover_tilt.assert_not_called()
async def check_cover_position(
hass, update_func, device, closing, opening, closed, expected
):
"""Check that a given position behaves correctly."""
device.is_closing = closing
device.is_opening = opening
device.is_closed = closed
update_func(device)
await hass.async_block_till_done()
entity_state = hass.states.get("cover.name")
assert entity_state.state == expected
async def test_cover_positions(hass, mock_device):
"""Test that the state updates in the various positions."""
update_func = await create_entity_from_device(hass, mock_device)
await check_cover_position(
hass, update_func, mock_device, True, False, False, "closing"
)
await check_cover_position(
hass, update_func, mock_device, False, True, False, "opening"
)
await check_cover_position(
hass, update_func, mock_device, False, False, True, "closed"
)
await check_cover_position(
hass, update_func, mock_device, False, False, False, "open"
)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.