text
stringlengths 213
32.3k
|
---|
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import (
CONF_ADDRESS,
CONF_DEVICE_CLASS,
CONF_NAME,
DEVICE_DEFAULT_NAME,
)
import homeassistant.helpers.config_validation as cv
from . import (
CONF_BOARD,
CONF_CHANNELS,
CONF_I2C_HATS,
CONF_INDEX,
CONF_INVERT_LOGIC,
I2C_HAT_NAMES,
I2C_HATS_MANAGER,
I2CHatsException,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_INVERT_LOGIC = False
DEFAULT_DEVICE_CLASS = None
_CHANNELS_SCHEMA = vol.Schema(
[
{
vol.Required(CONF_INDEX): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): cv.string,
}
]
)
_I2C_HATS_SCHEMA = vol.Schema(
[
{
vol.Required(CONF_BOARD): vol.In(I2C_HAT_NAMES),
vol.Required(CONF_ADDRESS): vol.Coerce(int),
vol.Required(CONF_CHANNELS): _CHANNELS_SCHEMA,
}
]
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_I2C_HATS): _I2C_HATS_SCHEMA}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the raspihats binary_sensor devices."""
I2CHatBinarySensor.I2C_HATS_MANAGER = hass.data[I2C_HATS_MANAGER]
binary_sensors = []
i2c_hat_configs = config.get(CONF_I2C_HATS)
for i2c_hat_config in i2c_hat_configs:
address = i2c_hat_config[CONF_ADDRESS]
board = i2c_hat_config[CONF_BOARD]
try:
I2CHatBinarySensor.I2C_HATS_MANAGER.register_board(board, address)
for channel_config in i2c_hat_config[CONF_CHANNELS]:
binary_sensors.append(
I2CHatBinarySensor(
address,
channel_config[CONF_INDEX],
channel_config[CONF_NAME],
channel_config[CONF_INVERT_LOGIC],
channel_config[CONF_DEVICE_CLASS],
)
)
except I2CHatsException as ex:
_LOGGER.error(
"Failed to register %s I2CHat@%s %s", board, hex(address), str(ex)
)
add_entities(binary_sensors)
class I2CHatBinarySensor(BinarySensorEntity):
"""Representation of a binary sensor that uses a I2C-HAT digital input."""
I2C_HATS_MANAGER = None
def __init__(self, address, channel, name, invert_logic, device_class):
"""Initialize the raspihats sensor."""
self._address = address
self._channel = channel
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._device_class = device_class
self._state = self.I2C_HATS_MANAGER.read_di(self._address, self._channel)
def online_callback():
"""Call fired when board is online."""
self.schedule_update_ha_state()
self.I2C_HATS_MANAGER.register_online_callback(
self._address, self._channel, online_callback
)
def edge_callback(state):
"""Read digital input state."""
self._state = state
self.schedule_update_ha_state()
self.I2C_HATS_MANAGER.register_di_callback(
self._address, self._channel, edge_callback
)
@property
def device_class(self):
"""Return the class of this sensor."""
return self._device_class
@property
def name(self):
"""Return the name of this sensor."""
return self._name
@property
def should_poll(self):
"""No polling needed for this sensor."""
return False
@property
def is_on(self):
"""Return the state of this sensor."""
return self._state != self._invert_logic
|
class cursor(object):
def __init__(self, conn):
self.conn = conn
def __enter__(self):
self.cursor = self.conn.cursor()
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
self.cursor.close()
class mod_cursor(object):
def __init__(self, conn):
self.conn = conn
def __enter__(self):
self.cursor = self.conn.cursor()
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self.conn.commit()
else:
self.conn.rollback()
self.cursor.close()
|
import logging
from typing import List
from typing import Mapping
from typing import Optional
import service_configuration_lib
from paasta_tools.kubernetes_tools import sanitise_kubernetes_name
from paasta_tools.kubernetes_tools import sanitised_cr_name
from paasta_tools.long_running_service_tools import LongRunningServiceConfig
from paasta_tools.long_running_service_tools import LongRunningServiceConfigDict
from paasta_tools.utils import BranchDictV2
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import deep_merge_dictionaries
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import InvalidJobNameError
from paasta_tools.utils import load_service_instance_config
from paasta_tools.utils import load_v2_deployments_json
KUBERNETES_NAMESPACE = "paasta-cassandraclusters"
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
class CassandraClusterDeploymentConfigDict(LongRunningServiceConfigDict, total=False):
bounce_margin_factor: float
replicas: int
class CassandraClusterDeploymentConfig(LongRunningServiceConfig):
config_dict: CassandraClusterDeploymentConfigDict
config_filename_prefix = "cassandracluster"
def __init__(
self,
service: str,
cluster: str,
instance: str,
config_dict: CassandraClusterDeploymentConfigDict,
branch_dict: Optional[BranchDictV2],
soa_dir: str = DEFAULT_SOA_DIR,
) -> None:
super().__init__(
cluster=cluster,
instance=instance,
service=service,
soa_dir=soa_dir,
config_dict=config_dict,
branch_dict=branch_dict,
)
def get_service_name_smartstack(self) -> str:
"""
To support apollo we always register in
cassandra_<cluster>.main
"""
return "cassandra_" + self.get_instance()
def get_nerve_namespace(self) -> str:
"""
To support apollo we always register in
cassandra_<cluster>.main
"""
return "main"
def get_registrations(self) -> List[str]:
"""
To support apollo we always register in
cassandra_<cluster>.main
"""
registrations = self.config_dict.get("registrations", [])
for registration in registrations:
try:
decompose_job_id(registration)
except InvalidJobNameError:
log.error(
"Provided registration {} for service "
"{} is invalid".format(registration, self.service)
)
return registrations or [
compose_job_id(self.get_service_name_smartstack(), "main")
]
def get_kubernetes_namespace(self) -> str:
return KUBERNETES_NAMESPACE
def get_instances(self, with_limit: bool = True) -> int:
return self.config_dict.get("replicas", 1)
def get_bounce_method(self) -> str:
"""
This isn't really true since we use the StatefulSet RollingUpdate strategy
However for the paasta-api we need to map to a paasta bounce method and
crossover is the closest
"""
return "crossover"
def get_bounce_margin_factor(self) -> float:
return self.config_dict.get("bounce_margin_factor", 1.0)
def get_sanitised_service_name(self) -> str:
return sanitise_kubernetes_name(self.get_service())
def get_sanitised_instance_name(self) -> str:
return sanitise_kubernetes_name(self.get_instance())
def get_sanitised_deployment_name(self) -> str:
return self.get_sanitised_instance_name()
def validate(
self,
params: List[str] = [
"cpus",
"security",
"dependencies_reference",
"deploy_group",
],
) -> List[str]:
# Use InstanceConfig to validate shared config keys like cpus and mem
# TODO: add mem back to this list once we fix PAASTA-15582 and
# move to using the same units as flink/marathon etc.
error_msgs = super().validate(params=params)
if error_msgs:
name = self.get_instance()
return [f"{name}: {msg}" for msg in error_msgs]
else:
return []
def load_cassandracluster_instance_config(
service: str,
instance: str,
cluster: str,
load_deployments: bool = True,
soa_dir: str = DEFAULT_SOA_DIR,
) -> CassandraClusterDeploymentConfig:
"""Read a service instance's configuration for CassandraCluster.
If a branch isn't specified for a config, the 'branch' key defaults to
paasta-${cluster}.${instance}.
:param service: The service name
:param instance: The instance of the service to retrieve
:param cluster: The cluster to read the configuration for
:param load_deployments: A boolean indicating if the corresponding deployments.json for this service
should also be loaded
:param soa_dir: The SOA configuration directory to read from
:returns: A dictionary of whatever was in the config for the service instance"""
general_config = service_configuration_lib.read_service_configuration(
service, soa_dir=soa_dir
)
instance_config = load_service_instance_config(
service, instance, "cassandracluster", cluster, soa_dir=soa_dir
)
general_config = deep_merge_dictionaries(
overrides=instance_config, defaults=general_config
)
branch_dict: Optional[BranchDictV2] = None
if load_deployments:
deployments_json = load_v2_deployments_json(service, soa_dir=soa_dir)
temp_instance_config = CassandraClusterDeploymentConfig(
service=service,
cluster=cluster,
instance=instance,
config_dict=general_config,
branch_dict=None,
soa_dir=soa_dir,
)
branch = temp_instance_config.get_branch()
deploy_group = temp_instance_config.get_deploy_group()
branch_dict = deployments_json.get_branch_dict(service, branch, deploy_group)
return CassandraClusterDeploymentConfig(
service=service,
cluster=cluster,
instance=instance,
config_dict=general_config,
branch_dict=branch_dict,
soa_dir=soa_dir,
)
# TODO: read this from CRD in service configs
def cr_id(service: str, instance: str) -> Mapping[str, str]:
return dict(
group="yelp.com",
version="v1alpha1",
namespace="paasta-cassandraclusters",
plural="cassandraclusters",
name=sanitised_cr_name(service, instance),
)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from diamond.collector import Collector
from cassandra_jolokia import CassandraJolokiaCollector
##########################################################################
class TestCassandraJolokiaCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('CassandraJolokiaCollector', {})
self.collector = CassandraJolokiaCollector(config, None)
# Used for all the tests so the expected numbers are all the same.
def fixture_a(self):
values = [0] * 92
values[30:56] = [3, 3, 1, 1, 8, 5, 6, 1, 6, 5, 3, 8, 9, 10, 7, 8, 7, 5,
5, 5, 3, 3, 2, 2, 2]
return values
def empty_fixture_values(self):
return [0] * 91
def expected_fixture_a_p(self, percentile_key):
return {
'p25': 192.0,
'p50': 398.0,
'p75': 824.0,
'p95': 2050.0,
'p99': 2952.0
}[percentile_key]
def test_import(self):
self.assertTrue(CassandraJolokiaCollector)
def test_should_compute_percentiles_accurately(self):
ninety_offsets = self.collector.create_offsets(90)
percentile_value = self.collector.compute_percentile(
ninety_offsets, self.fixture_a(), 50)
self.assertEqual(percentile_value, 398.0)
def test_should_compute_percentiles_accurately_when_empty(self):
ninety_offsets = self.collector.create_offsets(90)
self.assertEqual(self.collector.compute_percentile(
ninety_offsets, self.empty_fixture_values(), 50), 0.0)
self.assertEqual(self.collector.compute_percentile(
ninety_offsets, self.empty_fixture_values(), 95), 0.0)
self.assertEqual(self.collector.compute_percentile(
ninety_offsets, self.empty_fixture_values(), 99), 0.0)
@patch.object(Collector, 'publish')
def test_should_not_collect_non_histogram_attributes(self, publish_mock):
self.collector.interpret_bean_with_list(
'RecentReadLatencyMicros', self.fixture_a())
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_should_collect_metrics_histogram_attributes(self, publish_mock):
self.collector.interpret_bean_with_list(
'RecentReadLatencyHistogramMicros', self.fixture_a())
self.assertPublishedMany(publish_mock, {
'RecentReadLatencyHistogramMicros.p50':
self.expected_fixture_a_p('p50'),
'RecentReadLatencyHistogramMicros.p95':
self.expected_fixture_a_p('p95'),
'RecentReadLatencyHistogramMicros.p99':
self.expected_fixture_a_p('p99')
})
@patch.object(Collector, 'publish')
# db:columnfamily=HintsColumnFamily,keyspace=system,type=ColumnFamilies:
def test_should_escape_histogram_attributes(self, publish_mock):
test_bean = ','.join([
'db:columnfamily=HintsColumnFamily',
'keyspace=system',
'type=ColumnFamilies:RecentReadLatencyHistogramMicros'
])
self.collector.interpret_bean_with_list(test_bean, self.fixture_a())
expected_base = '.'.join([
'db.columnfamily_HintsColumnFamily',
'keyspace_system',
'type_ColumnFamilies',
'RecentReadLatencyHistogramMicros'
])
self.assertPublishedMany(publish_mock, {
'.'.join([expected_base, 'p50']): self.expected_fixture_a_p('p50'),
'.'.join([expected_base, 'p95']): self.expected_fixture_a_p('p95'),
'.'.join([expected_base, 'p99']): self.expected_fixture_a_p('p99')
})
@patch.object(Collector, 'publish')
def test_should_respect_percentiles_config(self, publish_mock):
self.collector.update_config({
'percentiles': ['25', '75']
})
self.collector.interpret_bean_with_list(
'RecentReadLatencyHistogramMicros', self.fixture_a())
self.assertPublishedMany(publish_mock, {
'RecentReadLatencyHistogramMicros.p25':
self.expected_fixture_a_p('p25'),
'RecentReadLatencyHistogramMicros.p75':
self.expected_fixture_a_p('p75'),
})
@patch.object(Collector, 'publish')
def test_should_respect_histogram_regex_config(self, publish_mock):
self.collector.update_config({
'histogram_regex': '^WackyMetric'
})
self.collector.interpret_bean_with_list(
'WackyMetricSeventeen', self.fixture_a())
self.assertPublishedMany(publish_mock, {
'WackyMetricSeventeen.p50':
self.expected_fixture_a_p('p50'),
'WackyMetricSeventeen.p95':
self.expected_fixture_a_p('p95'),
'WackyMetricSeventeen.p99':
self.expected_fixture_a_p('p99')
})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from datetime import timedelta
import logging
from swisshydrodata import SwissHydroData
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by the Swiss Federal Office for the Environment FOEN"
ATTR_DELTA_24H = "delta-24h"
ATTR_MAX_1H = "max-1h"
ATTR_MAX_24H = "max-24h"
ATTR_MEAN_1H = "mean-1h"
ATTR_MEAN_24H = "mean-24h"
ATTR_MIN_1H = "min-1h"
ATTR_MIN_24H = "min-24h"
ATTR_PREVIOUS_24H = "previous-24h"
ATTR_STATION = "station"
ATTR_STATION_UPDATE = "station_update"
ATTR_WATER_BODY = "water_body"
ATTR_WATER_BODY_TYPE = "water_body_type"
CONF_STATION = "station"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_DISCHARGE = "discharge"
SENSOR_LEVEL = "level"
SENSOR_TEMPERATURE = "temperature"
CONDITIONS = {
SENSOR_DISCHARGE: "mdi:waves",
SENSOR_LEVEL: "mdi:zodiac-aquarius",
SENSOR_TEMPERATURE: "mdi:oil-temperature",
}
CONDITION_DETAILS = [
ATTR_DELTA_24H,
ATTR_MAX_1H,
ATTR_MAX_24H,
ATTR_MEAN_1H,
ATTR_MEAN_24H,
ATTR_MIN_1H,
ATTR_MIN_24H,
ATTR_PREVIOUS_24H,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION): vol.Coerce(int),
vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_TEMPERATURE]): vol.All(
cv.ensure_list, [vol.In(CONDITIONS)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Swiss hydrological sensor."""
station = config.get(CONF_STATION)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
hydro_data = HydrologicalData(station)
hydro_data.update()
if hydro_data.data is None:
_LOGGER.error("The station doesn't exists: %s", station)
return
entities = []
for condition in monitored_conditions:
entities.append(SwissHydrologicalDataSensor(hydro_data, station, condition))
add_entities(entities, True)
class SwissHydrologicalDataSensor(Entity):
"""Implementation of a Swiss hydrological sensor."""
def __init__(self, hydro_data, station, condition):
"""Initialize the Swiss hydrological sensor."""
self.hydro_data = hydro_data
self._condition = condition
self._data = self._state = self._unit_of_measurement = None
self._icon = CONDITIONS[condition]
self._station = station
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._data["water-body-name"], self._condition)
@property
def unique_id(self) -> str:
"""Return a unique, friendly identifier for this entity."""
return f"{self._station}_{self._condition}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self._state is not None:
return self.hydro_data.data["parameters"][self._condition]["unit"]
return None
@property
def state(self):
"""Return the state of the sensor."""
if isinstance(self._state, (int, float)):
return round(self._state, 2)
return None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attrs = {}
if not self._data:
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
return attrs
attrs[ATTR_WATER_BODY_TYPE] = self._data["water-body-type"]
attrs[ATTR_STATION] = self._data["name"]
attrs[ATTR_STATION_UPDATE] = self._data["parameters"][self._condition][
"datetime"
]
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
for entry in CONDITION_DETAILS:
attrs[entry.replace("-", "_")] = self._data["parameters"][self._condition][
entry
]
return attrs
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
def update(self):
"""Get the latest data and update the state."""
self.hydro_data.update()
self._data = self.hydro_data.data
if self._data is None:
self._state = None
else:
self._state = self._data["parameters"][self._condition]["value"]
class HydrologicalData:
"""The Class for handling the data retrieval."""
def __init__(self, station):
"""Initialize the data object."""
self.station = station
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
shd = SwissHydroData()
self.data = shd.get_station(self.station)
|
import logging
import collections
from .common import *
logger = logging.getLogger(__name__)
class _Usage(object):
dict_ = {}
def __init__(self, dict_):
self.dict_ = dict_
@staticmethod
def format_line(type_, count, size):
return '{0:10} {1:7}, {2:>6} {3:3}\n'.format(type_ + ':', count, *size)
def __str__(self):
str_ = ''
try:
sum_count = 0
sum_bytes = 0
for key in self.dict_.keys():
if not isinstance(self.dict_[key], dict):
continue
sum_count += self.dict_[key]['total']['count']
sum_bytes += self.dict_[key]['total']['bytes']
types = collections.OrderedDict([('Documents', 'doc'),
('Other', 'other'),
('Photos', 'photo'),
('Videos', 'video')])
total_count = 0
total_bytes = 0
for desc in types:
t = types[desc]
type_usage = self.dict_[t]['total']
type_count = type_usage['count']
type_bytes = type_usage['bytes']
total_count += type_count
total_bytes += type_bytes
str_ += _Usage.format_line(desc, type_count, _Usage.file_size_pair(type_bytes))
str_ += _Usage.format_line('Total', total_count, _Usage.file_size_pair(total_bytes))
except KeyError:
logger.warning('Invalid usage JSON string.')
return str_
@staticmethod
def file_size_pair(num: int, suffix='B') -> str:
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return '%3.1f' % num, '%s%s' % (unit, suffix)
num /= 1024.0
return '%.1f' % num, '%s%s' % ('Yi', suffix)
class AccountMixin(object):
def get_account_info(self) -> dict:
"""Gets account status [ACTIVE, ...?] and terms of use version."""
r = self.BOReq.get(self.metadata_url + 'account/info')
return r.json()
def get_account_usage(self) -> str:
r = self.BOReq.get(self.metadata_url + 'account/usage')
if r.status_code not in OK_CODES:
raise RequestError(r.status_code, r.text)
return _Usage(r.json())
def get_quota(self) -> dict:
r = self.BOReq.get(self.metadata_url + 'account/quota')
if r.status_code not in OK_CODES:
raise RequestError(r.status_code, r.text)
return r.json()
def fs_sizes(self) -> tuple:
""":returns tuple: total and free space"""
q = self.get_quota()
return q.get('quota', 0), q.get('available', 0)
|
from unittest.mock import Mock, PropertyMock, patch
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.spc import DATA_API
from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED
from tests.common import mock_coro
async def test_valid_device_config(hass, monkeypatch):
"""Test valid device config."""
config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}}
with patch(
"homeassistant.components.spc.SpcWebGateway.async_load_parameters",
return_value=mock_coro(True),
):
assert await async_setup_component(hass, "spc", config) is True
async def test_invalid_device_config(hass, monkeypatch):
"""Test valid device config."""
config = {"spc": {"api_url": "http://localhost/"}}
with patch(
"homeassistant.components.spc.SpcWebGateway.async_load_parameters",
return_value=mock_coro(True),
):
assert await async_setup_component(hass, "spc", config) is False
async def test_update_alarm_device(hass):
"""Test that alarm panel state changes on incoming websocket data."""
import pyspcwebgw
from pyspcwebgw.const import AreaMode
config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}}
area_mock = Mock(
spec=pyspcwebgw.area.Area,
id="1",
mode=AreaMode.FULL_SET,
last_changed_by="Sven",
)
area_mock.name = "House"
area_mock.verified_alarm = False
with patch(
"homeassistant.components.spc.SpcWebGateway.areas", new_callable=PropertyMock
) as mock_areas:
mock_areas.return_value = {"1": area_mock}
with patch(
"homeassistant.components.spc.SpcWebGateway.async_load_parameters",
return_value=mock_coro(True),
):
assert await async_setup_component(hass, "spc", config) is True
await hass.async_block_till_done()
entity_id = "alarm_control_panel.house"
assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY
assert hass.states.get(entity_id).attributes["changed_by"] == "Sven"
area_mock.mode = AreaMode.UNSET
area_mock.last_changed_by = "Anna"
await hass.data[DATA_API]._async_callback(area_mock)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED
assert hass.states.get(entity_id).attributes["changed_by"] == "Anna"
|
from yandextank.common.util import get_test_path
from yandextank.plugins.Telegraf.config import ConfigManager, AgentConfig
import os
from configparser import RawConfigParser
class TestConfigManager(object):
def test_rawxml_parse(self):
""" raw xml read from string """
manager = ConfigManager()
config = """
<Monitoring>
<Host>
<CPU feature="passed"/>
</Host>
</Monitoring>
"""
etree = manager.parse_xml(config)
host = etree.findall('Host')[0]
assert (host[0].tag == 'CPU')
def test_xml_old_parse(self):
""" old-style monitoring xml parse """
manager = ConfigManager()
configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/old_mon.xml'), 'sometargethint')
assert (
configs[0]['host'] == 'somehost.yandex.tld'
and configs[0]['host_config']['CPU']['name'] == '[inputs.cpu]')
def test_xml_telegraf_parse(self):
""" telegraf-style monitoring xml parse """
manager = ConfigManager()
configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/telegraf_mon.xml'),
'sometargethint')
assert (
configs[0]['host'] == 'somehost.yandex.tld'
and configs[0]['host_config']['CPU']['name'] == '[inputs.cpu]')
def test_target_hint(self):
""" test target hint (special address=[target] option) """
manager = ConfigManager()
configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/target_hint.xml'),
'somehost.yandex.tld')
assert (configs[0]['host'] == 'somehost.yandex.tld')
class TestAgentConfig(object):
def test_create_startup_configs(self):
""" test agent config creates startup config """
manager = ConfigManager()
telegraf_configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/telegraf_mon.xml'),
'sometargethint')
agent_config = AgentConfig(telegraf_configs[0], False)
startup = agent_config.create_startup_config()
cfg_parser = RawConfigParser(strict=False)
cfg_parser.read(startup)
assert cfg_parser.has_section('startup')
def test_create_collector_configs(self):
""" test agent config creates collector config """
manager = ConfigManager()
telegraf_configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/telegraf_mon.xml'),
'sometargethint')
agent_config = AgentConfig(telegraf_configs[0], False)
remote_workdir = '/path/to/workdir/temp'
collector_config = agent_config.create_collector_config(remote_workdir)
cfg_parser = RawConfigParser(strict=False)
cfg_parser.read(collector_config)
assert (
cfg_parser.has_section('agent')
and cfg_parser.get('agent', 'interval') == "'1s'"
and cfg_parser.has_section('[outputs.file')
and cfg_parser.get('[outputs.file', 'files')
== "['{rmt}/monitoring.rawdata']".format(rmt=remote_workdir))
def test_create_custom_exec_script(self):
""" test agent config creates custom_exec config """
manager = ConfigManager()
telegraf_configs = manager.getconfig(
os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/telegraf_mon.xml'),
'sometargethint')
agent_config = AgentConfig(telegraf_configs[0], False)
custom_exec_config = agent_config.create_custom_exec_script()
with open(custom_exec_config, 'r') as custom_fname:
data = custom_fname.read()
assert (data.find("-0) curl -s 'http://localhost:6100/stat'") != -1)
|
import logging
from pyhap.const import CATEGORY_FAN
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_SPEED,
ATTR_SPEED_LIST,
DIRECTION_FORWARD,
DIRECTION_REVERSE,
DOMAIN,
SERVICE_OSCILLATE,
SERVICE_SET_DIRECTION,
SERVICE_SET_SPEED,
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_ACTIVE,
CHAR_ROTATION_DIRECTION,
CHAR_ROTATION_SPEED,
CHAR_SWING_MODE,
SERV_FANV2,
)
from .util import HomeKitSpeedMapping
_LOGGER = logging.getLogger(__name__)
@TYPES.register("Fan")
class Fan(HomeAccessory):
"""Generate a Fan accessory for a fan entity.
Currently supports: state, speed, oscillate, direction.
"""
def __init__(self, *args):
"""Initialize a new Light accessory object."""
super().__init__(*args, category=CATEGORY_FAN)
chars = []
state = self.hass.states.get(self.entity_id)
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & SUPPORT_DIRECTION:
chars.append(CHAR_ROTATION_DIRECTION)
if features & SUPPORT_OSCILLATE:
chars.append(CHAR_SWING_MODE)
if features & SUPPORT_SET_SPEED:
speed_list = self.hass.states.get(self.entity_id).attributes.get(
ATTR_SPEED_LIST
)
self.speed_mapping = HomeKitSpeedMapping(speed_list)
chars.append(CHAR_ROTATION_SPEED)
serv_fan = self.add_preload_service(SERV_FANV2, chars)
self.char_active = serv_fan.configure_char(CHAR_ACTIVE, value=0)
self.char_direction = None
self.char_speed = None
self.char_swing = None
if CHAR_ROTATION_DIRECTION in chars:
self.char_direction = serv_fan.configure_char(
CHAR_ROTATION_DIRECTION, value=0
)
if CHAR_ROTATION_SPEED in chars:
# Initial value is set to 100 because 0 is a special value (off). 100 is
# an arbitrary non-zero value. It is updated immediately by async_update_state
# to set to the correct initial value.
self.char_speed = serv_fan.configure_char(CHAR_ROTATION_SPEED, value=100)
if CHAR_SWING_MODE in chars:
self.char_swing = serv_fan.configure_char(CHAR_SWING_MODE, value=0)
self.async_update_state(state)
serv_fan.setter_callback = self._set_chars
def _set_chars(self, char_values):
_LOGGER.debug("Fan _set_chars: %s", char_values)
if CHAR_ACTIVE in char_values:
if char_values[CHAR_ACTIVE]:
# If the device supports set speed we
# do not want to turn on as it will take
# the fan to 100% than to the desired speed.
#
# Setting the speed will take care of turning
# on the fan if SUPPORT_SET_SPEED is set.
if not self.char_speed or CHAR_ROTATION_SPEED not in char_values:
self.set_state(1)
else:
# Its off, nothing more to do as setting the
# other chars will likely turn it back on which
# is what we want to avoid
self.set_state(0)
return
if CHAR_SWING_MODE in char_values:
self.set_oscillating(char_values[CHAR_SWING_MODE])
if CHAR_ROTATION_DIRECTION in char_values:
self.set_direction(char_values[CHAR_ROTATION_DIRECTION])
# We always do this LAST to ensure they
# get the speed they asked for
if CHAR_ROTATION_SPEED in char_values:
self.set_speed(char_values[CHAR_ROTATION_SPEED])
def set_state(self, value):
"""Set state if call came from HomeKit."""
_LOGGER.debug("%s: Set state to %d", self.entity_id, value)
service = SERVICE_TURN_ON if value == 1 else SERVICE_TURN_OFF
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
def set_direction(self, value):
"""Set state if call came from HomeKit."""
_LOGGER.debug("%s: Set direction to %d", self.entity_id, value)
direction = DIRECTION_REVERSE if value == 1 else DIRECTION_FORWARD
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_DIRECTION: direction}
self.call_service(DOMAIN, SERVICE_SET_DIRECTION, params, direction)
def set_oscillating(self, value):
"""Set state if call came from HomeKit."""
_LOGGER.debug("%s: Set oscillating to %d", self.entity_id, value)
oscillating = value == 1
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_OSCILLATING: oscillating}
self.call_service(DOMAIN, SERVICE_OSCILLATE, params, oscillating)
def set_speed(self, value):
"""Set state if call came from HomeKit."""
_LOGGER.debug("%s: Set speed to %d", self.entity_id, value)
speed = self.speed_mapping.speed_to_states(value)
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_SPEED: speed}
self.call_service(DOMAIN, SERVICE_SET_SPEED, params, speed)
@callback
def async_update_state(self, new_state):
"""Update fan after state change."""
# Handle State
state = new_state.state
if state in (STATE_ON, STATE_OFF):
self._state = 1 if state == STATE_ON else 0
if self.char_active.value != self._state:
self.char_active.set_value(self._state)
# Handle Direction
if self.char_direction is not None:
direction = new_state.attributes.get(ATTR_DIRECTION)
if direction in (DIRECTION_FORWARD, DIRECTION_REVERSE):
hk_direction = 1 if direction == DIRECTION_REVERSE else 0
if self.char_direction.value != hk_direction:
self.char_direction.set_value(hk_direction)
# Handle Speed
if self.char_speed is not None and state != STATE_OFF:
# We do not change the homekit speed when turning off
# as it will clear the restore state
speed = new_state.attributes.get(ATTR_SPEED)
hk_speed_value = self.speed_mapping.speed_to_homekit(speed)
if hk_speed_value is not None and self.char_speed.value != hk_speed_value:
# If the homeassistant component reports its speed as the first entry
# in its speed list but is not off, the hk_speed_value is 0. But 0
# is a special value in homekit. When you turn on a homekit accessory
# it will try to restore the last rotation speed state which will be
# the last value saved by char_speed.set_value. But if it is set to
# 0, HomeKit will update the rotation speed to 100 as it thinks 0 is
# off.
#
# Therefore, if the hk_speed_value is 0 and the device is still on,
# the rotation speed is mapped to 1 otherwise the update is ignored
# in order to avoid this incorrect behavior.
if hk_speed_value == 0 and state == STATE_ON:
hk_speed_value = 1
if self.char_speed.value != hk_speed_value:
self.char_speed.set_value(hk_speed_value)
# Handle Oscillating
if self.char_swing is not None:
oscillating = new_state.attributes.get(ATTR_OSCILLATING)
if isinstance(oscillating, bool):
hk_oscillating = 1 if oscillating else 0
if self.char_swing.value != hk_oscillating:
self.char_swing.set_value(hk_oscillating)
|
from inspect import isgenerator
from collections import namedtuple
import numpy as np
from scipy import linalg, sparse
from ..source_estimate import SourceEstimate
from ..epochs import BaseEpochs
from ..evoked import Evoked, EvokedArray
from ..utils import logger, _reject_data_segments, warn, fill_doc
from ..io.pick import pick_types, pick_info, _picks_to_idx
def linear_regression(inst, design_matrix, names=None):
"""Fit Ordinary Least Squares regression (OLS).
Parameters
----------
inst : instance of Epochs | iterable of SourceEstimate
The data to be regressed. Contains all the trials, sensors, and time
points for the regression. For Source Estimates, accepts either a list
or a generator object.
design_matrix : ndarray, shape (n_observations, n_regressors)
The regressors to be used. Must be a 2d array with as many rows as
the first dimension of the data. The first column of this matrix will
typically consist of ones (intercept column).
names : array-like | None
Optional parameter to name the regressors. If provided, the length must
correspond to the number of columns present in regressors
(including the intercept, if present).
Otherwise the default names are x0, x1, x2...xn for n regressors.
Returns
-------
results : dict of namedtuple
For each regressor (key) a namedtuple is provided with the
following attributes:
beta : regression coefficients
stderr : standard error of regression coefficients
t_val : t statistics (beta / stderr)
p_val : two-sided p-value of t statistic under the t distribution
mlog10_p_val : -log10 transformed p-value.
The tuple members are numpy arrays. The shape of each numpy array is
the shape of the data minus the first dimension; e.g., if the shape of
the original data was (n_observations, n_channels, n_timepoints),
then the shape of each of the arrays will be
(n_channels, n_timepoints).
"""
if names is None:
names = ['x%i' % i for i in range(design_matrix.shape[1])]
if isinstance(inst, BaseEpochs):
picks = pick_types(inst.info, meg=True, eeg=True, ref_meg=True,
stim=False, eog=False, ecg=False,
emg=False, exclude=['bads'])
if [inst.ch_names[p] for p in picks] != inst.ch_names:
warn('Fitting linear model to non-data or bad channels. '
'Check picking')
msg = 'Fitting linear model to epochs'
data = inst.get_data()
out = EvokedArray(np.zeros(data.shape[1:]), inst.info, inst.tmin)
elif isgenerator(inst):
msg = 'Fitting linear model to source estimates (generator input)'
out = next(inst)
data = np.array([out.data] + [i.data for i in inst])
elif isinstance(inst, list) and isinstance(inst[0], SourceEstimate):
msg = 'Fitting linear model to source estimates (list input)'
out = inst[0]
data = np.array([i.data for i in inst])
else:
raise ValueError('Input must be epochs or iterable of source '
'estimates')
logger.info(msg + ', (%s targets, %s regressors)' %
(np.product(data.shape[1:]), len(names)))
lm_params = _fit_lm(data, design_matrix, names)
lm = namedtuple('lm', 'beta stderr t_val p_val mlog10_p_val')
lm_fits = {}
for name in names:
parameters = [p[name] for p in lm_params]
for ii, value in enumerate(parameters):
out_ = out.copy()
if not isinstance(out_, (SourceEstimate, Evoked)):
raise RuntimeError('Invalid container.')
out_._data[:] = value
parameters[ii] = out_
lm_fits[name] = lm(*parameters)
logger.info('Done')
return lm_fits
def _fit_lm(data, design_matrix, names):
"""Aux function."""
from scipy import stats
n_samples = len(data)
n_features = np.product(data.shape[1:])
if design_matrix.ndim != 2:
raise ValueError('Design matrix must be a 2d array')
n_rows, n_predictors = design_matrix.shape
if n_samples != n_rows:
raise ValueError('Number of rows in design matrix must be equal '
'to number of observations')
if n_predictors != len(names):
raise ValueError('Number of regressor names must be equal to '
'number of column in design matrix')
y = np.reshape(data, (n_samples, n_features))
betas, resid_sum_squares, _, _ = linalg.lstsq(a=design_matrix, b=y)
df = n_rows - n_predictors
sqrt_noise_var = np.sqrt(resid_sum_squares / df).reshape(data.shape[1:])
design_invcov = linalg.inv(np.dot(design_matrix.T, design_matrix))
unscaled_stderrs = np.sqrt(np.diag(design_invcov))
tiny = np.finfo(np.float64).tiny
beta, stderr, t_val, p_val, mlog10_p_val = (dict() for _ in range(5))
for x, unscaled_stderr, predictor in zip(betas, unscaled_stderrs, names):
beta[predictor] = x.reshape(data.shape[1:])
stderr[predictor] = sqrt_noise_var * unscaled_stderr
p_val[predictor] = np.empty_like(stderr[predictor])
t_val[predictor] = np.empty_like(stderr[predictor])
stderr_pos = (stderr[predictor] > 0)
beta_pos = (beta[predictor] > 0)
t_val[predictor][stderr_pos] = (beta[predictor][stderr_pos] /
stderr[predictor][stderr_pos])
cdf = stats.t.cdf(np.abs(t_val[predictor][stderr_pos]), df)
p_val[predictor][stderr_pos] = np.clip((1. - cdf) * 2., tiny, 1.)
# degenerate cases
mask = (~stderr_pos & beta_pos)
t_val[predictor][mask] = np.inf * np.sign(beta[predictor][mask])
p_val[predictor][mask] = tiny
# could do NaN here, but hopefully this is safe enough
mask = (~stderr_pos & ~beta_pos)
t_val[predictor][mask] = 0
p_val[predictor][mask] = 1.
mlog10_p_val[predictor] = -np.log10(p_val[predictor])
return beta, stderr, t_val, p_val, mlog10_p_val
@fill_doc
def linear_regression_raw(raw, events, event_id=None, tmin=-.1, tmax=1,
covariates=None, reject=None, flat=None, tstep=1.,
decim=1, picks=None, solver='cholesky'):
"""Estimate regression-based evoked potentials/fields by linear modeling.
This models the full M/EEG time course, including correction for
overlapping potentials and allowing for continuous/scalar predictors.
Internally, this constructs a predictor matrix X of size
n_samples * (n_conds * window length), solving the linear system
``Y = bX`` and returning ``b`` as evoked-like time series split by
condition. See [1]_.
Parameters
----------
raw : instance of Raw
A raw object. Note: be very careful about data that is not
downsampled, as the resulting matrices can be enormous and easily
overload your computer. Typically, 100 Hz sampling rate is
appropriate - or using the decim keyword (see below).
events : ndarray of int, shape (n_events, 3)
An array where the first column corresponds to samples in raw
and the last to integer codes in event_id.
event_id : dict | None
As in Epochs; a dictionary where the values may be integers or
iterables of integers, corresponding to the 3rd column of
events, and the keys are condition names.
If None, uses all events in the events array.
tmin : float | dict
If float, gives the lower limit (in seconds) for the time window for
which all event types' effects are estimated. If a dict, can be used to
specify time windows for specific event types: keys correspond to keys
in event_id and/or covariates; for missing values, the default (-.1) is
used.
tmax : float | dict
If float, gives the upper limit (in seconds) for the time window for
which all event types' effects are estimated. If a dict, can be used to
specify time windows for specific event types: keys correspond to keys
in event_id and/or covariates; for missing values, the default (1.) is
used.
covariates : dict-like | None
If dict-like (e.g., a pandas DataFrame), values have to be array-like
and of the same length as the rows in ``events``. Keys correspond
to additional event types/conditions to be estimated and are matched
with the time points given by the first column of ``events``. If
None, only binary events (from event_id) are used.
reject : None | dict
For cleaning raw data before the regression is performed: set up
rejection parameters based on peak-to-peak amplitude in continuously
selected subepochs. If None, no rejection is done.
If dict, keys are types ('grad' | 'mag' | 'eeg' | 'eog' | 'ecg')
and values are the maximal peak-to-peak values to select rejected
epochs, e.g.::
reject = dict(grad=4000e-12, # T / m (gradiometers)
mag=4e-11, # T (magnetometers)
eeg=40e-5, # V (EEG channels)
eog=250e-5 # V (EOG channels))
flat : None | dict
For cleaning raw data before the regression is performed: set up
rejection parameters based on flatness of the signal. If None, no
rejection is done. If a dict, keys are ('grad' | 'mag' |
'eeg' | 'eog' | 'ecg') and values are minimal peak-to-peak values to
select rejected epochs.
tstep : float
Length of windows for peak-to-peak detection for raw data cleaning.
decim : int
Decimate by choosing only a subsample of data points. Highly
recommended for data recorded at high sampling frequencies, as
otherwise huge intermediate matrices have to be created and inverted.
%(picks_good_data)s
solver : str | callable
Either a function which takes as its inputs the sparse predictor
matrix X and the observation matrix Y, and returns the coefficient
matrix b; or a string.
X is of shape (n_times, n_predictors * time_window_length).
y is of shape (n_channels, n_times).
If str, must be ``'cholesky'``, in which case the solver used is
``linalg.solve(dot(X.T, X), dot(X.T, y))``.
Returns
-------
evokeds : dict
A dict where the keys correspond to conditions and the values are
Evoked objects with the ER[F/P]s. These can be used exactly like any
other Evoked object, including e.g. plotting or statistics.
References
----------
.. [1] Smith, N. J., & Kutas, M. (2015). Regression-based estimation of ERP
waveforms: II. Non-linear effects, overlap correction, and practical
considerations. Psychophysiology, 52(2), 169-189.
"""
if isinstance(solver, str):
if solver not in {"cholesky"}:
raise ValueError("No such solver: {}".format(solver))
if solver == 'cholesky':
def solver(X, y):
a = (X.T * X).toarray() # dot product of sparse matrices
return linalg.solve(a, X.T * y, sym_pos=True,
overwrite_a=True, overwrite_b=True).T
elif callable(solver):
pass
else:
raise TypeError("The solver must be a str or a callable.")
# build data
data, info, events = _prepare_rerp_data(raw, events, picks=picks,
decim=decim)
if event_id is None:
event_id = {str(v): v for v in set(events[:, 2])}
# build predictors
X, conds, cond_length, tmin_s, tmax_s = _prepare_rerp_preds(
n_samples=data.shape[1], sfreq=info["sfreq"], events=events,
event_id=event_id, tmin=tmin, tmax=tmax, covariates=covariates)
# remove "empty" and contaminated data points
X, data = _clean_rerp_input(X, data, reject, flat, decim, info, tstep)
# solve linear system
coefs = solver(X, data.T)
if coefs.shape[0] != data.shape[0]:
raise ValueError("solver output has unexcepted shape. Supply a "
"function that returns coefficients in the form "
"(n_targets, n_features), where targets == channels.")
# construct Evoked objects to be returned from output
evokeds = _make_evokeds(coefs, conds, cond_length, tmin_s, tmax_s, info)
return evokeds
def _prepare_rerp_data(raw, events, picks=None, decim=1):
"""Prepare events and data, primarily for `linear_regression_raw`."""
picks = _picks_to_idx(raw.info, picks)
info = pick_info(raw.info, picks)
decim = int(decim)
info["sfreq"] /= decim
data, times = raw[:]
data = data[picks, ::decim]
if len(set(events[:, 0])) < len(events[:, 0]):
raise ValueError("`events` contains duplicate time points. Make "
"sure all entries in the first column of `events` "
"are unique.")
events = events.copy()
events[:, 0] -= raw.first_samp
events[:, 0] //= decim
if len(set(events[:, 0])) < len(events[:, 0]):
raise ValueError("After decimating, `events` contains duplicate time "
"points. This means some events are too closely "
"spaced for the requested decimation factor. Choose "
"different events, drop close events, or choose a "
"different decimation factor.")
return data, info, events
def _prepare_rerp_preds(n_samples, sfreq, events, event_id=None, tmin=-.1,
tmax=1, covariates=None):
"""Build predictor matrix and metadata (e.g. condition time windows)."""
conds = list(event_id)
if covariates is not None:
conds += list(covariates)
# time windows (per event type) are converted to sample points from times
# int(round()) to be safe and match Epochs constructor behavior
if isinstance(tmin, (float, int)):
tmin_s = {cond: int(round(tmin * sfreq)) for cond in conds}
else:
tmin_s = {cond: int(round(tmin.get(cond, -.1) * sfreq))
for cond in conds}
if isinstance(tmax, (float, int)):
tmax_s = {
cond: int(round((tmax * sfreq)) + 1) for cond in conds}
else:
tmax_s = {cond: int(round(tmax.get(cond, 1.) * sfreq)) + 1
for cond in conds}
# Construct predictor matrix
# We do this by creating one array per event type, shape (lags, samples)
# (where lags depends on tmin/tmax and can be different for different
# event types). Columns correspond to predictors, predictors correspond to
# time lags. Thus, each array is mostly sparse, with one diagonal of 1s
# per event (for binary predictors).
cond_length = dict()
xs = []
for cond in conds:
tmin_, tmax_ = tmin_s[cond], tmax_s[cond]
n_lags = int(tmax_ - tmin_) # width of matrix
if cond in event_id: # for binary predictors
ids = ([event_id[cond]]
if isinstance(event_id[cond], int)
else event_id[cond])
onsets = -(events[np.in1d(events[:, 2], ids), 0] + tmin_)
values = np.ones((len(onsets), n_lags))
else: # for predictors from covariates, e.g. continuous ones
covs = covariates[cond]
if len(covs) != len(events):
error = ("Condition {0} from ``covariates`` is "
"not the same length as ``events``").format(cond)
raise ValueError(error)
onsets = -(events[np.where(covs != 0), 0] + tmin_)[0]
v = np.asarray(covs)[np.nonzero(covs)].astype(float)
values = np.ones((len(onsets), n_lags)) * v[:, np.newaxis]
cond_length[cond] = len(onsets)
xs.append(sparse.dia_matrix((values, onsets),
shape=(n_samples, n_lags)))
return sparse.hstack(xs), conds, cond_length, tmin_s, tmax_s
def _clean_rerp_input(X, data, reject, flat, decim, info, tstep):
"""Remove empty and contaminated points from data & predictor matrices."""
# find only those positions where at least one predictor isn't 0
has_val = np.unique(X.nonzero()[0])
# reject positions based on extreme steps in the data
if reject is not None:
_, inds = _reject_data_segments(data, reject, flat, decim=None,
info=info, tstep=tstep)
for t0, t1 in inds:
has_val = np.setdiff1d(has_val, range(t0, t1))
return X.tocsr()[has_val], data[:, has_val]
def _make_evokeds(coefs, conds, cond_length, tmin_s, tmax_s, info):
"""Create a dictionary of Evoked objects.
These will be created from a coefs matrix and condition durations.
"""
evokeds = dict()
cumul = 0
for cond in conds:
tmin_, tmax_ = tmin_s[cond], tmax_s[cond]
evokeds[cond] = EvokedArray(
coefs[:, cumul:cumul + tmax_ - tmin_], info=info, comment=cond,
tmin=tmin_ / float(info["sfreq"]), nave=cond_length[cond],
kind='average') # nave and kind are technically incorrect
cumul += tmax_ - tmin_
return evokeds
|
import subprocess
from flask import current_app
from lemur.certificates.service import (
csr_created,
csr_imported,
certificate_issued,
certificate_imported,
)
def csr_dump_handler(sender, csr, **kwargs):
try:
subprocess.run(
["openssl", "req", "-text", "-noout", "-reqopt", "no_sigdump,no_pubkey"],
input=csr.encode("utf8"),
)
except Exception as err:
current_app.logger.warning("Error inspecting CSR: %s", err)
def cert_dump_handler(sender, certificate, **kwargs):
try:
subprocess.run(
["openssl", "x509", "-text", "-noout", "-certopt", "no_sigdump,no_pubkey"],
input=certificate.body.encode("utf8"),
)
except Exception as err:
current_app.logger.warning("Error inspecting certificate: %s", err)
def activate_debug_dump():
csr_created.connect(csr_dump_handler)
csr_imported.connect(csr_dump_handler)
certificate_issued.connect(cert_dump_handler)
certificate_imported.connect(cert_dump_handler)
|
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import STATE_OFF, STATE_ON
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
ATTR_RSSI_LEVEL = "rssi_level"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tahoma switches."""
if discovery_info is None:
return
controller = hass.data[TAHOMA_DOMAIN]["controller"]
devices = []
for switch in hass.data[TAHOMA_DOMAIN]["devices"]["switch"]:
devices.append(TahomaSwitch(switch, controller))
add_entities(devices, True)
class TahomaSwitch(TahomaDevice, SwitchEntity):
"""Representation a Tahoma Switch."""
def __init__(self, tahoma_device, controller):
"""Initialize the switch."""
super().__init__(tahoma_device, controller)
self._state = STATE_OFF
self._skip_update = False
self._available = False
def update(self):
"""Update method."""
# Postpone the immediate state check for changes that take time.
if self._skip_update:
self._skip_update = False
return
self.controller.get_states([self.tahoma_device])
if self.tahoma_device.type == "io:OnOffLightIOComponent":
if self.tahoma_device.active_states.get("core:OnOffState") == "on":
self._state = STATE_ON
else:
self._state = STATE_OFF
if self.tahoma_device.type == "zwave:OnOffLightZWaveComponent":
if self.tahoma_device.active_states.get("core:OnOffState") == "on":
self._state = STATE_ON
else:
self._state = STATE_OFF
# A RTS power socket doesn't have a feedback channel,
# so we must assume the socket is available.
if self.tahoma_device.type == "rts:OnOffRTSComponent":
self._available = True
elif self.tahoma_device.type == "zwave:OnOffLightZWaveComponent":
self._available = True
else:
self._available = bool(
self.tahoma_device.active_states.get("core:StatusState") == "available"
)
_LOGGER.debug("Update %s, state: %s", self._name, self._state)
@property
def device_class(self):
"""Return the class of the device."""
if self.tahoma_device.type == "rts:GarageDoor4TRTSComponent":
return "garage"
return None
def turn_on(self, **kwargs):
"""Send the on command."""
_LOGGER.debug("Turn on: %s", self._name)
if self.tahoma_device.type == "rts:GarageDoor4TRTSComponent":
self.toggle()
else:
self.apply_action("on")
self._skip_update = True
self._state = STATE_ON
def turn_off(self, **kwargs):
"""Send the off command."""
_LOGGER.debug("Turn off: %s", self._name)
if self.tahoma_device.type == "rts:GarageDoor4TRTSComponent":
return
self.apply_action("off")
self._skip_update = True
self._state = STATE_OFF
def toggle(self, **kwargs):
"""Click the switch."""
self.apply_action("cycle")
@property
def is_on(self):
"""Get whether the switch is in on state."""
if self.tahoma_device.type == "rts:GarageDoor4TRTSComponent":
return False
return bool(self._state == STATE_ON)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attr = {}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
if "core:RSSILevelState" in self.tahoma_device.active_states:
attr[ATTR_RSSI_LEVEL] = self.tahoma_device.active_states[
"core:RSSILevelState"
]
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._available
|
from aiohttp import web
import voluptuous as vol
from homeassistant.components.device_tracker import (
ATTR_BATTERY,
DOMAIN as DEVICE_TRACKER,
)
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_WEBHOOK_ID,
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
)
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
ATTR_ACCURACY,
ATTR_ACTIVITY,
ATTR_ALTITUDE,
ATTR_DEVICE,
ATTR_DIRECTION,
ATTR_PROVIDER,
ATTR_SPEED,
DOMAIN,
)
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
DEFAULT_ACCURACY = 200
DEFAULT_BATTERY = -1
def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "")
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE): _id,
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Optional(ATTR_ACCURACY, default=DEFAULT_ACCURACY): vol.Coerce(float),
vol.Optional(ATTR_ACTIVITY): cv.string,
vol.Optional(ATTR_ALTITUDE): vol.Coerce(float),
vol.Optional(ATTR_BATTERY, default=DEFAULT_BATTERY): vol.Coerce(float),
vol.Optional(ATTR_DIRECTION): vol.Coerce(float),
vol.Optional(ATTR_PROVIDER): cv.string,
vol.Optional(ATTR_SPEED): vol.Coerce(float),
}
)
async def async_setup(hass, hass_config):
"""Set up the GPSLogger component."""
hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook with GPSLogger request."""
try:
data = WEBHOOK_SCHEMA(dict(await request.post()))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
attrs = {
ATTR_SPEED: data.get(ATTR_SPEED),
ATTR_DIRECTION: data.get(ATTR_DIRECTION),
ATTR_ALTITUDE: data.get(ATTR_ALTITUDE),
ATTR_PROVIDER: data.get(ATTR_PROVIDER),
ATTR_ACTIVITY: data.get(ATTR_ACTIVITY),
}
device = data[ATTR_DEVICE]
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
(data[ATTR_LATITUDE], data[ATTR_LONGITUDE]),
data[ATTR_BATTERY],
data[ATTR_ACCURACY],
attrs,
)
return web.Response(text=f"Setting location for {device}", status=HTTP_OK)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "GPSLogger", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
return True
async_remove_entry = config_entry_flow.webhook_async_remove_entry
|
from django.core.exceptions import ImproperlyConfigured
from django.db import transaction
from django.utils.module_loading import import_string
from django.utils.translation import gettext_lazy as _
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from cms.plugin_pool import plugin_pool
from shop import messages
from shop.conf import app_settings
from shop.exceptions import ProductNotAvailable
from shop.models.cart import CartModel
from shop.serializers.checkout import CheckoutSerializer
from shop.serializers.cart import CartSerializer
from shop.modifiers.pool import cart_modifiers_pool
class CheckoutViewSet(GenericViewSet):
"""
View for our REST endpoint to communicate with the various forms used during the checkout.
"""
serializer_label = 'checkout'
serializer_class = CheckoutSerializer
cart_serializer_class = CartSerializer
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.dialog_forms = set([import_string(fc) for fc in app_settings.SHOP_DIALOG_FORMS])
try:
from shop.cascade.plugin_base import DialogFormPluginBase
except ImproperlyConfigured:
# cmsplugins_cascade has not been installed
pass
else:
# gather form classes from Cascade plugins for our checkout views
for p in plugin_pool.get_all_plugins():
if issubclass(p, DialogFormPluginBase):
if hasattr(p, 'form_classes'):
self.dialog_forms.update([import_string(fc) for fc in p.form_classes])
if hasattr(p, 'form_class'):
self.dialog_forms.add(import_string(p.form_class))
@action(methods=['put'], detail=False, url_path='upload')
def upload(self, request):
"""
Use this REST endpoint to upload the payload of all forms used to setup the checkout
dialogs. This method takes care to dispatch the uploaded payload to each corresponding
form.
"""
# sort posted form data by plugin order
cart = CartModel.objects.get_from_request(request)
dialog_data = []
for form_class in self.dialog_forms:
if form_class.scope_prefix in request.data:
if 'plugin_order' in request.data[form_class.scope_prefix]:
dialog_data.append((form_class, request.data[form_class.scope_prefix]))
else:
for data in request.data[form_class.scope_prefix].values():
dialog_data.append((form_class, data))
dialog_data = sorted(dialog_data, key=lambda tpl: int(tpl[1]['plugin_order']))
# save data, get text representation and collect potential errors
errors, response_data, set_is_valid = {}, {}, True
with transaction.atomic():
for form_class, data in dialog_data:
form = form_class.form_factory(request, data, cart)
if form.is_valid():
# empty error dict forces revalidation by the client side validation
errors[form_class.form_name] = {}
else:
errors[form_class.form_name] = form.errors
set_is_valid = False
# by updating the response data, we can override the form's content
update_data = form.get_response_data()
if isinstance(update_data, dict):
response_data[form_class.form_name] = update_data
# persist changes in cart
if set_is_valid:
cart.save()
# add possible form errors for giving feedback to the customer
if set_is_valid:
return Response(response_data)
else:
return Response(errors, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
@action(methods=['get'], detail=False, url_path='digest')
def digest(self, request):
"""
Returns the summaries of the cart and various checkout forms to be rendered in non-editable fields.
"""
cart = CartModel.objects.get_from_request(request)
cart.update(request)
context = self.get_serializer_context()
checkout_serializer = self.serializer_class(cart, context=context, label=self.serializer_label)
cart_serializer = self.cart_serializer_class(cart, context=context, label='cart')
response_data = {
'checkout_digest': checkout_serializer.data,
'cart_summary': cart_serializer.data,
}
return Response(data=response_data)
@action(methods=['post'], detail=False, url_path='purchase')
def purchase(self, request):
"""
This is the final step on converting a cart into an order object. It normally is used in
combination with the plugin :class:`shop.cascade.checkout.ProceedButtonPlugin` to render
a button labeled "Purchase Now".
"""
cart = CartModel.objects.get_from_request(request)
try:
cart.update(request, raise_exception=True)
except ProductNotAvailable as exc:
message = _("The product '{product_name}' ({product_code}) suddenly became unavailable, "\
"presumably because someone else has been faster purchasing it.\n Please "\
"recheck the cart or add an alternative product and proceed with the checkout.").\
format(product_name=exc.product.product_name, product_code=exc.product.product_code)
messages.error(request, message, title=_("Product Disappeared"), delay=10)
message = _("The product '{product_name}' ({product_code}) suddenly became unavailable.").\
format(product_name=exc.product.product_name, product_code=exc.product.product_code)
response_data = {'purchasing_error_message': message}
return Response(data=response_data, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
cart.save()
response_data = {}
try:
# Iterate over the registered modifiers, and search for the active payment service provider
for modifier in cart_modifiers_pool.get_payment_modifiers():
if modifier.is_active(cart.extra.get('payment_modifier')):
expression = modifier.payment_provider.get_payment_request(cart, request)
response_data.update(expression=expression)
break
except ValidationError as err:
message = _("Please select a valid payment method.")
messages.warning(request, message, title=_("Choose Payment Method"), delay=5)
response_data = {'purchasing_error_message': '. '.join(err.detail)}
return Response(data=response_data, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
return Response(data=response_data)
|
from typing import Optional
from homeassistant.components.sensor import DEVICE_CLASSES
from homeassistant.const import (
LENGTH_KILOMETERS,
LENGTH_MILES,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.helpers.entity import Entity
from homeassistant.util.distance import convert
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
coordinator = hass.data[TESLA_DOMAIN][config_entry.entry_id]["coordinator"]
entities = []
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"]["sensor"]:
if device.type == "temperature sensor":
entities.append(TeslaSensor(device, coordinator, "inside"))
entities.append(TeslaSensor(device, coordinator, "outside"))
else:
entities.append(TeslaSensor(device, coordinator))
async_add_entities(entities, True)
class TeslaSensor(TeslaDevice, Entity):
"""Representation of Tesla sensors."""
def __init__(self, tesla_device, coordinator, sensor_type=None):
"""Initialize of the sensor."""
super().__init__(tesla_device, coordinator)
self.type = sensor_type
if self.type:
self._name = f"{super().name} ({self.type})"
self._unique_id = f"{super().unique_id}_{self.type}"
@property
def state(self) -> Optional[float]:
"""Return the state of the sensor."""
if self.tesla_device.type == "temperature sensor":
if self.type == "outside":
return self.tesla_device.get_outside_temp()
return self.tesla_device.get_inside_temp()
if self.tesla_device.type in ["range sensor", "mileage sensor"]:
units = self.tesla_device.measurement
if units == "LENGTH_MILES":
return self.tesla_device.get_value()
return round(
convert(self.tesla_device.get_value(), LENGTH_MILES, LENGTH_KILOMETERS),
2,
)
if self.tesla_device.type == "charging rate sensor":
return self.tesla_device.charging_rate
return self.tesla_device.get_value()
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return the unit_of_measurement of the device."""
units = self.tesla_device.measurement
if units == "F":
return TEMP_FAHRENHEIT
if units == "C":
return TEMP_CELSIUS
if units == "LENGTH_MILES":
return LENGTH_MILES
if units == "LENGTH_KILOMETERS":
return LENGTH_KILOMETERS
return units
@property
def device_class(self) -> Optional[str]:
"""Return the device_class of the device."""
return (
self.tesla_device.device_class
if self.tesla_device.device_class in DEVICE_CLASSES
else None
)
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = self._attributes.copy()
if self.tesla_device.type == "charging rate sensor":
attr.update(
{
"time_left": self.tesla_device.time_left,
"added_range": self.tesla_device.added_range,
"charge_energy_added": self.tesla_device.charge_energy_added,
"charge_current_request": self.tesla_device.charge_current_request,
"charger_actual_current": self.tesla_device.charger_actual_current,
"charger_voltage": self.tesla_device.charger_voltage,
}
)
return attr
|
import numpy as np
from hypertools.tools.describe import describe
from hypertools.plot.plot import plot
data = np.random.multivariate_normal(np.zeros(10), np.eye(10), size=100)
def test_describe_data_is_dict():
result = describe(data, reduce='PCA', show=False)
assert type(result) is dict
def test_describe_geo():
geo = plot(data, show=False)
result = describe(geo, reduce='PCA', show=False)
assert type(result) is dict
|
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .bridge import DeviceHelper
from .const import DOMAIN
async def _async_has_devices(hass) -> bool:
"""Return if there are devices that can be discovered."""
devices = await DeviceHelper.find_devices()
return len(devices) > 0
config_entry_flow.register_discovery_flow(
DOMAIN, "Gree Climate", _async_has_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
|
import sys
import pytest
from homeassistant.components import media_player
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_VOLUME_MUTED,
SERVICE_SELECT_SOURCE,
)
from homeassistant.components.webostv.const import (
ATTR_BUTTON,
ATTR_COMMAND,
ATTR_PAYLOAD,
DOMAIN,
SERVICE_BUTTON,
SERVICE_COMMAND,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
SERVICE_VOLUME_MUTE,
)
from homeassistant.setup import async_setup_component
if sys.version_info >= (3, 8, 0):
from tests.async_mock import patch
else:
from tests.async_mock import patch
NAME = "fake"
ENTITY_ID = f"{media_player.DOMAIN}.{NAME}"
@pytest.fixture(name="client")
def client_fixture():
"""Patch of client library for tests."""
with patch(
"homeassistant.components.webostv.WebOsClient", autospec=True
) as mock_client_class:
client = mock_client_class.return_value
client.software_info = {"device_id": "a1:b1:c1:d1:e1:f1"}
client.client_key = "0123456789"
yield client
async def setup_webostv(hass):
"""Initialize webostv and media_player for tests."""
assert await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_HOST: "fake", CONF_NAME: NAME}},
)
await hass.async_block_till_done()
async def test_mute(hass, client):
"""Test simple service call."""
await setup_webostv(hass)
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_MEDIA_VOLUME_MUTED: True,
}
await hass.services.async_call(media_player.DOMAIN, SERVICE_VOLUME_MUTE, data)
await hass.async_block_till_done()
client.set_mute.assert_called_once()
async def test_select_source_with_empty_source_list(hass, client):
"""Ensure we don't call client methods when we don't have sources."""
await setup_webostv(hass)
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_INPUT_SOURCE: "nonexistent",
}
await hass.services.async_call(media_player.DOMAIN, SERVICE_SELECT_SOURCE, data)
await hass.async_block_till_done()
client.launch_app.assert_not_called()
client.set_input.assert_not_called()
async def test_button(hass, client):
"""Test generic button functionality."""
await setup_webostv(hass)
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_BUTTON: "test",
}
await hass.services.async_call(DOMAIN, SERVICE_BUTTON, data)
await hass.async_block_till_done()
client.button.assert_called_once()
client.button.assert_called_with("test")
async def test_command(hass, client):
"""Test generic command functionality."""
await setup_webostv(hass)
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_COMMAND: "test",
}
await hass.services.async_call(DOMAIN, SERVICE_COMMAND, data)
await hass.async_block_till_done()
client.request.assert_called_with("test", payload=None)
async def test_command_with_optional_arg(hass, client):
"""Test generic command functionality."""
await setup_webostv(hass)
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_COMMAND: "test",
ATTR_PAYLOAD: {"target": "https://www.google.com"},
}
await hass.services.async_call(DOMAIN, SERVICE_COMMAND, data)
await hass.async_block_till_done()
client.request.assert_called_with(
"test", payload={"target": "https://www.google.com"}
)
|
import pytest
from qutebrowser.mainwindow.statusbar.percentage import Percentage
@pytest.fixture
def percentage(qtbot):
"""Fixture providing a Percentage widget."""
widget = Percentage()
# Force immediate update of percentage widget
widget._set_text.set_delay(-1)
qtbot.add_widget(widget)
return widget
@pytest.mark.parametrize('y, raw, expected', [
(0, False, '[top]'),
(100, False, '[bot]'),
(75, False, '[75%]'),
(25, False, '[25%]'),
(5, False, '[05%]'),
(None, False, '[???]'),
(0, True, '[top]'),
(100, True, '[bot]'),
(75, True, '[75]'),
(25, True, '[25]'),
(5, True, '[05]'),
(None, True, '[???]'),
])
def test_percentage_text(percentage, y, raw, expected):
"""Test text displayed by the widget based on the y position of a page.
Args:
y: y position of the page as an int in the range [0, 100].
parametrized.
expected: expected text given y position. parametrized.
"""
if raw:
percentage.set_raw()
percentage.set_perc(x=None, y=y)
assert percentage.text() == expected
def test_tab_change(percentage, fake_web_tab):
"""Make sure the percentage gets changed correctly when switching tabs."""
percentage.set_perc(x=None, y=10)
tab = fake_web_tab(scroll_pos_perc=(0, 20))
percentage.on_tab_changed(tab)
assert percentage.text() == '[20%]'
|
from pyrainbird import AvailableStations, RainbirdController
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import ATTR_ENTITY_ID, CONF_FRIENDLY_NAME, CONF_TRIGGER_TIME
from homeassistant.helpers import config_validation as cv
from . import CONF_ZONES, DATA_RAINBIRD, DOMAIN, RAINBIRD_CONTROLLER
ATTR_DURATION = "duration"
SERVICE_START_IRRIGATION = "start_irrigation"
SERVICE_SCHEMA_IRRIGATION = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_DURATION): cv.positive_float,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Rain Bird switches over a Rain Bird controller."""
if discovery_info is None:
return
controller: RainbirdController = hass.data[DATA_RAINBIRD][
discovery_info[RAINBIRD_CONTROLLER]
]
available_stations: AvailableStations = controller.get_available_stations()
if not (available_stations and available_stations.stations):
return
devices = []
for zone in range(1, available_stations.stations.count + 1):
if available_stations.stations.active(zone):
zone_config = discovery_info.get(CONF_ZONES, {}).get(zone, {})
time = zone_config.get(CONF_TRIGGER_TIME, discovery_info[CONF_TRIGGER_TIME])
name = zone_config.get(CONF_FRIENDLY_NAME)
devices.append(
RainBirdSwitch(
controller,
zone,
time,
name if name else f"Sprinkler {zone}",
)
)
add_entities(devices, True)
def start_irrigation(service):
entity_id = service.data[ATTR_ENTITY_ID]
duration = service.data[ATTR_DURATION]
for device in devices:
if device.entity_id == entity_id:
device.turn_on(duration=duration)
hass.services.register(
DOMAIN,
SERVICE_START_IRRIGATION,
start_irrigation,
schema=SERVICE_SCHEMA_IRRIGATION,
)
class RainBirdSwitch(SwitchEntity):
"""Representation of a Rain Bird switch."""
def __init__(self, controller: RainbirdController, zone, time, name):
"""Initialize a Rain Bird Switch Device."""
self._rainbird = controller
self._zone = zone
self._name = name
self._state = None
self._duration = time
self._attributes = {ATTR_DURATION: self._duration, "zone": self._zone}
@property
def device_state_attributes(self):
"""Return state attributes."""
return self._attributes
@property
def name(self):
"""Get the name of the switch."""
return self._name
def turn_on(self, **kwargs):
"""Turn the switch on."""
if self._rainbird.irrigate_zone(
int(self._zone),
int(kwargs[ATTR_DURATION] if ATTR_DURATION in kwargs else self._duration),
):
self._state = True
def turn_off(self, **kwargs):
"""Turn the switch off."""
if self._rainbird.stop_irrigation():
self._state = False
def update(self):
"""Update switch status."""
self._state = self._rainbird.get_zone_state(self._zone)
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
|
import os
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import mne
from mne.io.kit.tests import data_dir as kit_data_dir
from mne.io import read_raw_fif
from mne.utils import (requires_mayavi, run_tests_if_main, traits_test,
modified_env)
mrk_pre_path = os.path.join(kit_data_dir, 'test_mrk_pre.sqd')
mrk_post_path = os.path.join(kit_data_dir, 'test_mrk_post.sqd')
sqd_path = os.path.join(kit_data_dir, 'test.sqd')
hsp_path = os.path.join(kit_data_dir, 'test_hsp.txt')
fid_path = os.path.join(kit_data_dir, 'test_elp.txt')
fif_path = os.path.join(kit_data_dir, 'test_bin_raw.fif')
@requires_mayavi
@traits_test
def test_kit2fiff_model(tmpdir):
"""Test Kit2Fiff model."""
from mne.gui._kit2fiff_gui import Kit2FiffModel
tempdir = str(tmpdir)
tgt_fname = os.path.join(tempdir, 'test-raw.fif')
model = Kit2FiffModel()
assert not model.can_save
assert model.misc_chs_desc == "No SQD file selected..."
assert model.stim_chs_comment == ""
model.markers.mrk1.file = mrk_pre_path
model.markers.mrk2.file = mrk_post_path
model.sqd_file = sqd_path
assert model.misc_chs_desc == "160:192"
model.hsp_file = hsp_path
assert not model.can_save
model.fid_file = fid_path
assert model.can_save
# events
model.stim_slope = '+'
assert model.get_event_info() == {1: 2}
model.stim_slope = '-'
assert model.get_event_info() == {254: 2, 255: 2}
# stim channels
model.stim_chs = "181:184, 186"
assert_array_equal(model.stim_chs_array, [181, 182, 183, 186])
assert model.stim_chs_ok
assert model.get_event_info() == {}
model.stim_chs = "181:184, bad"
assert not model.stim_chs_ok
assert not model.can_save
model.stim_chs = ""
assert model.can_save
# export raw
raw_out = model.get_raw()
raw_out.save(tgt_fname)
raw = read_raw_fif(tgt_fname)
# Compare exported raw with the original binary conversion
raw_bin = read_raw_fif(fif_path)
trans_bin = raw.info['dev_head_t']['trans']
want_keys = list(raw_bin.info.keys())
assert sorted(want_keys) == sorted(list(raw.info.keys()))
trans_transform = raw_bin.info['dev_head_t']['trans']
assert_allclose(trans_transform, trans_bin, 0.1)
# Averaging markers
model.markers.mrk3.method = "Average"
trans_avg = model.dev_head_trans
assert not np.all(trans_avg == trans_transform)
assert_allclose(trans_avg, trans_bin, 0.1)
# Test exclusion of one marker
model.markers.mrk3.method = "Transform"
model.use_mrk = [1, 2, 3, 4]
assert not np.all(model.dev_head_trans == trans_transform)
assert not np.all(model.dev_head_trans == trans_avg)
assert not np.all(model.dev_head_trans == np.eye(4))
# test setting stim channels
model.stim_slope = '+'
events_bin = mne.find_events(raw_bin, stim_channel='STI 014')
model.stim_coding = '<'
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_bin)
events_rev = events_bin.copy()
events_rev[:, 2] = 1
model.stim_coding = '>'
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_rev)
model.stim_coding = 'channel'
model.stim_chs = "160:161"
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_bin + [0, 0, 32])
# test reset
model.clear_all()
assert model.use_mrk == [0, 1, 2, 3, 4]
assert model.sqd_file == ""
@requires_mayavi
@traits_test
def test_kit2fiff_gui(check_gui_ci, tmpdir):
"""Test Kit2Fiff GUI."""
home_dir = str(tmpdir)
with modified_env(_MNE_GUI_TESTING_MODE='true',
_MNE_FAKE_HOME_DIR=home_dir):
from pyface.api import GUI
gui = GUI()
gui.process_events()
ui, frame = mne.gui.kit2fiff()
assert not frame.model.can_save
assert frame.model.stim_threshold == 1.
frame.model.stim_threshold = 10.
frame.model.stim_chs = 'save this!'
frame.save_config(home_dir)
ui.dispose()
gui.process_events()
# test setting persistence
ui, frame = mne.gui.kit2fiff()
assert frame.model.stim_threshold == 10.
assert frame.model.stim_chs == 'save this!'
# set and reset marker file
points = [[-0.084612, 0.021582, -0.056144],
[0.080425, 0.021995, -0.061171],
[-0.000787, 0.105530, 0.014168],
[-0.047943, 0.091835, 0.010240],
[0.042976, 0.094380, 0.010807]]
assert_array_equal(frame.marker_panel.mrk1_obj.points, 0)
assert_array_equal(frame.marker_panel.mrk3_obj.points, 0)
frame.model.markers.mrk1.file = mrk_pre_path
assert_allclose(frame.marker_panel.mrk1_obj.points, points, atol=1e-6)
assert_allclose(frame.marker_panel.mrk3_obj.points, points, atol=1e-6)
frame.marker_panel.mrk1_obj.label = True
frame.marker_panel.mrk1_obj.label = False
frame.kit2fiff_panel.clear_all = True
assert_array_equal(frame.marker_panel.mrk1_obj.points, 0)
assert_array_equal(frame.marker_panel.mrk3_obj.points, 0)
ui.dispose()
gui.process_events()
run_tests_if_main()
|
import logging
import os.path
logger = logging.getLogger(__name__)
_COMPRESSOR_REGISTRY = {}
def get_supported_extensions():
"""Return the list of file extensions for which we have registered compressors."""
return sorted(_COMPRESSOR_REGISTRY.keys())
def register_compressor(ext, callback):
"""Register a callback for transparently decompressing files with a specific extension.
Parameters
----------
ext: str
The extension. Must include the leading period, e.g. ``.gz``.
callback: callable
The callback. It must accept two position arguments, file_obj and mode.
This function will be called when ``smart_open`` is opening a file with
the specified extension.
Examples
--------
Instruct smart_open to use the `lzma` module whenever opening a file
with a .xz extension (see README.rst for the complete example showing I/O):
>>> def _handle_xz(file_obj, mode):
... import lzma
... return lzma.LZMAFile(filename=file_obj, mode=mode, format=lzma.FORMAT_XZ)
>>>
>>> register_compressor('.xz', _handle_xz)
"""
if not (ext and ext[0] == '.'):
raise ValueError('ext must be a string starting with ., not %r' % ext)
if ext in _COMPRESSOR_REGISTRY:
logger.warning('overriding existing compression handler for %r', ext)
_COMPRESSOR_REGISTRY[ext] = callback
def _handle_bz2(file_obj, mode):
from bz2 import BZ2File
return BZ2File(file_obj, mode)
def _handle_gzip(file_obj, mode):
import gzip
return gzip.GzipFile(fileobj=file_obj, mode=mode)
def compression_wrapper(file_obj, mode, filename=None):
"""
This function will wrap the file_obj with an appropriate
[de]compression mechanism based on the extension of the filename.
file_obj must either be a filehandle object, or a class which behaves
like one. It must have a .name attribute unless ``filename`` is given.
If the filename extension isn't recognized, will simply return the original
file_obj.
"""
try:
if filename is None:
filename = file_obj.name
_, ext = os.path.splitext(filename)
except (AttributeError, TypeError):
logger.warning(
'unable to transparently decompress %r because it '
'seems to lack a string-like .name', file_obj
)
return file_obj
if ext in _COMPRESSOR_REGISTRY and mode.endswith('+'):
raise ValueError('transparent (de)compression unsupported for mode %r' % mode)
try:
callback = _COMPRESSOR_REGISTRY[ext]
except KeyError:
return file_obj
else:
return callback(file_obj, mode)
#
# NB. avoid using lambda here to make stack traces more readable.
#
register_compressor('.bz2', _handle_bz2)
register_compressor('.gz', _handle_gzip)
|
import glob
from pkg_resources import yield_lines
from setuptools import find_packages
from setuptools import setup
from paasta_tools import __version__
def get_install_requires():
with open("requirements-minimal.txt", "r") as f:
minimal_reqs = list(yield_lines(f.read()))
return minimal_reqs
setup(
name="paasta-tools",
version=__version__,
provides=["paasta_tools"],
author="Compute Infrastructure @ Yelp",
author_email="[email protected]",
description="Tools for Yelps SOA infrastructure",
packages=find_packages(exclude=("tests*", "scripts*")),
include_package_data=True,
install_requires=get_install_requires(),
scripts=[
"paasta_tools/am_i_mesos_leader.py",
"paasta_tools/apply_external_resources.py",
"paasta_tools/autoscale_all_services.py",
"paasta_tools/check_flink_services_health.py",
"paasta_tools/check_cassandracluster_services_replication.py",
"paasta_tools/check_marathon_services_replication.py",
"paasta_tools/check_kubernetes_api.py",
"paasta_tools/check_kubernetes_services_replication.py",
"paasta_tools/check_oom_events.py",
"paasta_tools/check_spark_jobs.py",
"paasta_tools/cleanup_marathon_jobs.py",
"paasta_tools/cleanup_kubernetes_cr.py",
"paasta_tools/cleanup_kubernetes_crd.py",
"paasta_tools/cleanup_kubernetes_jobs.py",
"paasta_tools/delete_kubernetes_deployments.py",
"paasta_tools/deploy_marathon_services",
"paasta_tools/paasta_deploy_tron_jobs",
"paasta_tools/generate_all_deployments",
"paasta_tools/generate_deployments_for_service.py",
"paasta_tools/generate_services_file.py",
"paasta_tools/generate_services_yaml.py",
"paasta_tools/get_mesos_leader.py",
"paasta_tools/kubernetes/bin/paasta_secrets_sync.py",
"paasta_tools/kubernetes/bin/paasta_cleanup_stale_nodes.py",
"paasta_tools/kubernetes/bin/kubernetes_remove_evicted_pods.py",
"paasta_tools/list_marathon_service_instances.py",
"paasta_tools/log_task_lifecycle_events.py",
"paasta_tools/marathon_dashboard.py",
"paasta_tools/monitoring/check_capacity.py",
"paasta_tools/monitoring/check_marathon_has_apps.py",
"paasta_tools/monitoring/check_mesos_active_frameworks.py",
"paasta_tools/monitoring/check_mesos_duplicate_frameworks.py",
"paasta_tools/monitoring/check_mesos_quorum.py",
"paasta_tools/monitoring/check_mesos_outdated_tasks.py",
"paasta_tools/monitoring/kill_orphaned_docker_containers.py",
"paasta_tools/cli/paasta_tabcomplete.sh",
"paasta_tools/paasta_cluster_boost.py",
"paasta_tools/paasta_execute_docker_command.py",
"paasta_tools/paasta_maintenance.py",
"paasta_tools/paasta_metastatus.py",
"paasta_tools/paasta_remote_run.py",
"paasta_tools/setup_kubernetes_job.py",
"paasta_tools/setup_kubernetes_crd.py",
"paasta_tools/setup_kubernetes_cr.py",
"paasta_tools/setup_marathon_job.py",
"paasta_tools/synapse_srv_namespaces_fact.py",
]
+ glob.glob("paasta_tools/contrib/*.sh")
+ glob.glob("paasta_tools/contrib/[!_]*.py"),
entry_points={
"console_scripts": [
"paasta=paasta_tools.cli.cli:main",
"paasta-api=paasta_tools.api.api:main",
"paasta-deployd=paasta_tools.deployd.master:main",
"paasta-fsm=paasta_tools.cli.fsm_cmd:main",
"paasta_cleanup_tron_namespaces=paasta_tools.cleanup_tron_namespaces:main",
"paasta_list_kubernetes_service_instances=paasta_tools.list_kubernetes_service_instances:main",
"paasta_list_tron_namespaces=paasta_tools.list_tron_namespaces:main",
"paasta_setup_tron_namespace=paasta_tools.setup_tron_namespace:main",
"paasta_cleanup_maintenance=paasta_tools.cleanup_maintenance:main",
"paasta_docker_wrapper=paasta_tools.docker_wrapper:main",
"paasta_firewall_update=paasta_tools.firewall_update:main",
"paasta_firewall_logging=paasta_tools.firewall_logging:main",
"paasta_oom_logger=paasta_tools.oom_logger:main",
"paasta_broadcast_log=paasta_tools.broadcast_log_to_services:main",
"paasta_dump_locally_running_services=paasta_tools.dump_locally_running_services:main",
],
"paste.app_factory": ["paasta-api-config=paasta_tools.api.api:make_app"],
},
)
|
import logging
from pymystrom.bulb import MyStromBulb
from pymystrom.exceptions import MyStromConnectionError
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_EFFECT,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_EFFECT,
SUPPORT_FLASH,
LightEntity,
)
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "myStrom bulb"
SUPPORT_MYSTROM = SUPPORT_BRIGHTNESS | SUPPORT_EFFECT | SUPPORT_FLASH | SUPPORT_COLOR
EFFECT_RAINBOW = "rainbow"
EFFECT_SUNRISE = "sunrise"
MYSTROM_EFFECT_LIST = [EFFECT_RAINBOW, EFFECT_SUNRISE]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the myStrom light integration."""
host = config.get(CONF_HOST)
mac = config.get(CONF_MAC)
name = config.get(CONF_NAME)
bulb = MyStromBulb(host, mac)
try:
await bulb.get_state()
if bulb.bulb_type != "rgblamp":
_LOGGER.error("Device %s (%s) is not a myStrom bulb", host, mac)
return
except MyStromConnectionError as err:
_LOGGER.warning("No route to myStrom bulb: %s", host)
raise PlatformNotReady() from err
async_add_entities([MyStromLight(bulb, name, mac)], True)
class MyStromLight(LightEntity):
"""Representation of the myStrom WiFi bulb."""
def __init__(self, bulb, name, mac):
"""Initialize the light."""
self._bulb = bulb
self._name = name
self._state = None
self._available = False
self._brightness = 0
self._color_h = 0
self._color_s = 0
self._mac = mac
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._mac
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_MYSTROM
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def hs_color(self):
"""Return the color of the light."""
return self._color_h, self._color_s
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def effect_list(self):
"""Return the list of supported effects."""
return MYSTROM_EFFECT_LIST
@property
def is_on(self):
"""Return true if light is on."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn on the light."""
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
effect = kwargs.get(ATTR_EFFECT)
if ATTR_HS_COLOR in kwargs:
color_h, color_s = kwargs[ATTR_HS_COLOR]
elif ATTR_BRIGHTNESS in kwargs:
# Brightness update, keep color
color_h, color_s = self._color_h, self._color_s
else:
color_h, color_s = 0, 0 # Back to white
try:
if not self.is_on:
await self._bulb.set_on()
if brightness is not None:
await self._bulb.set_color_hsv(
int(color_h), int(color_s), round(brightness * 100 / 255)
)
if effect == EFFECT_SUNRISE:
await self._bulb.set_sunrise(30)
if effect == EFFECT_RAINBOW:
await self._bulb.set_rainbow(30)
except MyStromConnectionError:
_LOGGER.warning("No route to myStrom bulb")
async def async_turn_off(self, **kwargs):
"""Turn off the bulb."""
try:
await self._bulb.set_off()
except MyStromConnectionError:
_LOGGER.warning("myStrom bulb not online")
async def async_update(self):
"""Fetch new state data for this light."""
try:
await self._bulb.get_state()
self._state = self._bulb.state
colors = self._bulb.color
try:
color_h, color_s, color_v = colors.split(";")
except ValueError:
color_s, color_v = colors.split(";")
color_h = 0
self._color_h = int(color_h)
self._color_s = int(color_s)
self._brightness = int(color_v) * 255 / 100
self._available = True
except MyStromConnectionError:
_LOGGER.warning("No route to myStrom bulb")
self._available = False
|
import urwid
class MenuButton(urwid.Button):
def __init__(self, caption, callback):
super(MenuButton, self).__init__("")
urwid.connect_signal(self, 'click', callback)
self._w = urwid.AttrMap(urwid.SelectableIcon(
[u' \N{BULLET} ', caption], 2), None, 'selected')
class SubMenu(urwid.WidgetWrap):
def __init__(self, caption, choices):
super(SubMenu, self).__init__(MenuButton(
[caption, u"\N{HORIZONTAL ELLIPSIS}"], self.open_menu))
line = urwid.Divider(u'\N{LOWER ONE QUARTER BLOCK}')
listbox = urwid.ListBox(urwid.SimpleFocusListWalker([
urwid.AttrMap(urwid.Text([u"\n ", caption]), 'heading'),
urwid.AttrMap(line, 'line'),
urwid.Divider()] + choices + [urwid.Divider()]))
self.menu = urwid.AttrMap(listbox, 'options')
def open_menu(self, button):
top.open_box(self.menu)
class Choice(urwid.WidgetWrap):
def __init__(self, caption):
super(Choice, self).__init__(
MenuButton(caption, self.item_chosen))
self.caption = caption
def item_chosen(self, button):
response = urwid.Text([u' You chose ', self.caption, u'\n'])
done = MenuButton(u'Ok', exit_program)
response_box = urwid.Filler(urwid.Pile([response, done]))
top.open_box(urwid.AttrMap(response_box, 'options'))
def exit_program(key):
raise urwid.ExitMainLoop()
menu_top = SubMenu(u'Main Menu', [
SubMenu(u'Applications', [
SubMenu(u'Accessories', [
Choice(u'Text Editor'),
Choice(u'Terminal'),
]),
]),
SubMenu(u'System', [
SubMenu(u'Preferences', [
Choice(u'Appearance'),
]),
Choice(u'Lock Screen'),
]),
])
palette = [
(None, 'light gray', 'black'),
('heading', 'black', 'light gray'),
('line', 'black', 'light gray'),
('options', 'dark gray', 'black'),
('focus heading', 'white', 'dark red'),
('focus line', 'black', 'dark red'),
('focus options', 'black', 'light gray'),
('selected', 'white', 'dark blue')]
focus_map = {
'heading': 'focus heading',
'options': 'focus options',
'line': 'focus line'}
class HorizontalBoxes(urwid.Columns):
def __init__(self):
super(HorizontalBoxes, self).__init__([], dividechars=1)
def open_box(self, box):
if self.contents:
del self.contents[self.focus_position + 1:]
self.contents.append((urwid.AttrMap(box, 'options', focus_map),
self.options('given', 24)))
self.focus_position = len(self.contents) - 1
top = HorizontalBoxes()
top.open_box(menu_top.menu)
urwid.MainLoop(urwid.Filler(top, 'middle', 10), palette).run()
|
import uuid
import numpy as np
class Order(object):
"""
Represents sending an order from a trading algo entity
to a brokerage to execute.
A commission can be added here to override the commission
model, if known. An order_id can be added if required,
otherwise it will be randomly assigned.
Parameters
----------
dt : `pd.Timestamp`
The date-time that the order was created.
asset : `Asset`
The asset to transact with the order.
quantity : `int`
The quantity of the asset to transact.
A negative quantity means a short.
commission : `float`, optional
If commission is known it can be added.
order_id : `str`, optional
The order ID of the order, if known.
"""
def __init__(
self,
dt,
asset,
quantity,
commission=0.0,
order_id=None
):
self.created_dt = dt
self.cur_dt = dt
self.asset = asset
self.quantity = quantity
self.commission = commission
self.direction = np.copysign(1, self.quantity)
self.order_id = self._set_or_generate_order_id(order_id)
def _order_attribs_equal(self, other):
"""
Asserts whether all attributes of the Order are equal
with the exception of the order ID.
Used primarily for testing that orders are generated correctly.
Parameters
----------
other : `Order`
The order to compare attribute equality to.
Returns
-------
`Boolean`
Whether the non-order ID attributes are equal.
"""
if self.created_dt != other.created_dt:
return False
if self.cur_dt != other.cur_dt:
return False
if self.asset != other.asset:
return False
if self.quantity != other.quantity:
return False
if self.commission != other.commission:
return False
if self.direction != other.direction:
return False
return True
def __repr__(self):
"""
Output a string representation of the object
Returns
-------
`str`
String representation of the Order instance.
"""
return (
"Order(dt='%s', asset='%s', quantity=%s, "
"commission=%s, direction=%s, order_id=%s)" % (
self.created_dt, self.asset, self.quantity,
self.commission, self.direction, self.order_id
)
)
def _set_or_generate_order_id(self, order_id=None):
"""
Sets or generates a unique order ID for the order, using a UUID.
Parameters
----------
order_id : `str`, optional
An optional order ID override.
Returns
-------
`str`
The order ID string for the Order.
"""
if order_id is None:
return uuid.uuid4().hex
else:
return order_id
|
import numpy as np
from elephas.mllib.adapter import *
from pyspark.mllib.linalg import Matrices, Vectors
def test_to_matrix():
x = np.ones((4, 2))
mat = to_matrix(x)
assert mat.numRows == 4
assert mat.numCols == 2
def test_from_matrix():
mat = Matrices.dense(1, 2, [13, 37])
x = from_matrix(mat)
assert x.shape == (1, 2)
def test_from_vector():
x = np.ones((3,))
vector = to_vector(x)
assert len(vector) == 3
def test_to_vector():
vector = Vectors.dense([4, 2])
x = from_vector(vector)
assert x.shape == (2,)
|
import difflib
import importlib
import os
from pathlib import Path
import pkgutil
import re
import sys
from homeassistant.util.yaml.loader import load_yaml
from script.hassfest.model import Integration
COMMENT_REQUIREMENTS = (
"Adafruit_BBIO",
"Adafruit-DHT",
"avea", # depends on bluepy
"avion",
"beacontools",
"beewi_smartclim", # depends on bluepy
"blinkt",
"bluepy",
"bme680",
"credstash",
"decora",
"decora_wifi",
"env_canada",
"envirophat",
"evdev",
"face_recognition",
"i2csense",
"nuimo",
"opencv-python-headless",
"py_noaa",
"pybluez",
"pycups",
"PySwitchbot",
"pySwitchmate",
"python-eq3bt",
"python-gammu",
"python-lirc",
"pyuserinput",
"raspihats",
"rpi-rf",
"RPi.GPIO",
"smbus-cffi",
"tensorflow",
"tf-models-official",
"VL53L1X2",
)
IGNORE_PIN = ("colorlog>2.1,<3", "keyring>=9.3,<10.0", "urllib3")
URL_PIN = (
"https://developers.home-assistant.io/docs/"
"creating_platform_code_review.html#1-requirements"
)
CONSTRAINT_PATH = os.path.join(
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
)
CONSTRAINT_BASE = """
pycryptodome>=3.6.6
# Constrain urllib3 to ensure we deal with CVE-2019-11236 & CVE-2019-11324
urllib3>=1.24.3
# Constrain httplib2 to protect against CVE-2020-11078
httplib2>=0.18.0
# gRPC 1.32+ currently causes issues on ARMv7, see:
# https://github.com/home-assistant/core/issues/40148
grpcio==1.31.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# To remove reliance on typing
btlewrap>=0.0.10
# This overrides a built-in Python package
enum34==1000000000.0.0
typing==1000000000.0.0
uuid==1000000000.0.0
"""
IGNORE_PRE_COMMIT_HOOK_ID = (
"check-executables-have-shebangs",
"check-json",
"no-commit-to-branch",
"prettier",
)
def has_tests(module: str):
"""Test if a module has tests.
Module format: homeassistant.components.hue
Test if exists: tests/components/hue
"""
path = Path(module.replace(".", "/").replace("homeassistant", "tests"))
if not path.exists():
return False
if not path.is_dir():
return True
# Dev environments might have stale directories around
# from removed tests. Check for that.
content = [f.name for f in path.glob("*")]
# Directories need to contain more than `__pycache__`
# to exist in Git and so be seen by CI.
return content != ["__pycache__"]
def explore_module(package, explore_children):
"""Explore the modules."""
module = importlib.import_module(package)
found = []
if not hasattr(module, "__path__"):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, f"{package}."):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements():
"""Gather core requirements out of setup.py."""
reqs_raw = re.search(
r"REQUIRES = \[(.*?)\]", Path("setup.py").read_text(), re.S
).group(1)
return [x[1] for x in re.findall(r"(['\"])(.*?)\1", reqs_raw)]
def gather_recursive_requirements(domain, seen=None):
"""Recursively gather requirements from a module."""
if seen is None:
seen = set()
seen.add(domain)
integration = Integration(Path(f"homeassistant/components/{domain}"))
integration.load_manifest()
reqs = set(integration.requirements)
for dep_domain in integration.dependencies:
reqs.update(gather_recursive_requirements(dep_domain, seen))
return reqs
def comment_requirement(req):
"""Comment out requirement. Some don't install on all systems."""
return any(ign.lower() in req.lower() for ign in COMMENT_REQUIREMENTS)
def gather_modules():
"""Collect the information."""
reqs = {}
errors = []
gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs)
for key in reqs:
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ", ".join(errors))
return None
return reqs
def gather_requirements_from_manifests(errors, reqs):
"""Gather all of the requirements from manifests."""
integrations = Integration.load_dir(Path("homeassistant/components"))
for domain in sorted(integrations):
integration = integrations[domain]
if not integration.manifest:
errors.append(f"The manifest for integration {domain} is invalid.")
continue
if integration.disabled:
continue
process_requirements(
errors, integration.requirements, f"homeassistant.components.{domain}", reqs
)
def gather_requirements_from_modules(errors, reqs):
"""Collect the requirements from the modules directly."""
for package in sorted(
explore_module("homeassistant.scripts", True)
+ explore_module("homeassistant.auth", True)
):
try:
module = importlib.import_module(package)
except ImportError as err:
print(f"{package.replace('.', '/')}.py: {err}")
errors.append(package)
continue
if getattr(module, "REQUIREMENTS", None):
process_requirements(errors, module.REQUIREMENTS, package, reqs)
def process_requirements(errors, module_requirements, package, reqs):
"""Process all of the requirements."""
for req in module_requirements:
if "://" in req:
errors.append(f"{package}[Only pypi dependencies are allowed: {req}]")
if req.partition("==")[1] == "" and req not in IGNORE_PIN:
errors.append(f"{package}[Please pin requirement {req}, see {URL_PIN}]")
reqs.setdefault(req, []).append(package)
def generate_requirements_list(reqs):
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
for req in sorted(requirements):
output.append(f"\n# {req}")
if comment_requirement(pkg):
output.append(f"\n# {pkg}\n")
else:
output.append(f"\n{pkg}\n")
return "".join(output)
def requirements_output(reqs):
"""Generate output for requirements."""
output = [
"-c homeassistant/package_constraints.txt\n",
"\n",
"# Home Assistant Core\n",
]
output.append("\n".join(core_requirements()))
output.append("\n")
return "".join(output)
def requirements_all_output(reqs):
"""Generate output for requirements_all."""
output = [
"# Home Assistant Core, full dependency set\n",
"-r requirements.txt\n",
]
output.append(generate_requirements_list(reqs))
return "".join(output)
def requirements_test_all_output(reqs):
"""Generate output for test_requirements."""
output = [
"# Home Assistant tests, full dependency set\n",
f"# Automatically generated by {Path(__file__).name}, do not edit\n",
"\n",
"-r requirements_test.txt\n",
]
filtered = {
requirement: modules
for requirement, modules in reqs.items()
if any(
# Always install requirements that are not part of integrations
not mdl.startswith("homeassistant.components.") or
# Install tests for integrations that have tests
has_tests(mdl)
for mdl in modules
)
}
output.append(generate_requirements_list(filtered))
return "".join(output)
def requirements_pre_commit_output():
"""Generate output for pre-commit dependencies."""
source = ".pre-commit-config.yaml"
pre_commit_conf = load_yaml(source)
reqs = []
for repo in (x for x in pre_commit_conf["repos"] if x.get("rev")):
for hook in repo["hooks"]:
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
reqs.append(f"{hook['id']}=={repo['rev'].lstrip('v')}")
reqs.extend(x for x in hook.get("additional_dependencies", ()))
output = [
f"# Automatically generated "
f"from {source} by {Path(__file__).name}, do not edit",
"",
]
output.extend(sorted(reqs))
return "\n".join(output) + "\n"
def gather_constraints():
"""Construct output for constraint file."""
return (
"\n".join(
sorted(
{
*core_requirements(),
*gather_recursive_requirements("default_config"),
*gather_recursive_requirements("mqtt"),
}
)
+ [""]
)
+ CONSTRAINT_BASE
)
def diff_file(filename, content):
"""Diff a file."""
return list(
difflib.context_diff(
[f"{line}\n" for line in Path(filename).read_text().split("\n")],
[f"{line}\n" for line in content.split("\n")],
filename,
"generated",
)
)
def main(validate):
"""Run the script."""
if not os.path.isfile("requirements_all.txt"):
print("Run this from HA root dir")
return 1
data = gather_modules()
if data is None:
return 1
reqs_file = requirements_output(data)
reqs_all_file = requirements_all_output(data)
reqs_test_all_file = requirements_test_all_output(data)
reqs_pre_commit_file = requirements_pre_commit_output()
constraints = gather_constraints()
files = (
("requirements.txt", reqs_file),
("requirements_all.txt", reqs_all_file),
("requirements_test_pre_commit.txt", reqs_pre_commit_file),
("requirements_test_all.txt", reqs_test_all_file),
("homeassistant/package_constraints.txt", constraints),
)
if validate:
errors = []
for filename, content in files:
diff = diff_file(filename, content)
if diff:
errors.append("".join(diff))
if errors:
print("ERROR - FOUND THE FOLLOWING DIFFERENCES")
print()
print()
print("\n\n".join(errors))
print()
print("Please run python3 -m script.gen_requirements_all")
return 1
return 0
for filename, content in files:
Path(filename).write_text(content)
return 0
if __name__ == "__main__":
_VAL = sys.argv[-1] == "validate"
sys.exit(main(_VAL))
|
import logging
from homeassistant.components.cover import (
DEVICE_CLASS_WINDOW,
ENTITY_ID_FORMAT,
CoverEntity,
)
from homeassistant.util import slugify
from . import CONF_DEFAULT_REVERSE, DATA_SOMFY_MYLINK
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Discover and configure Somfy covers."""
if discovery_info is None:
return
somfy_mylink = hass.data[DATA_SOMFY_MYLINK]
cover_list = []
try:
mylink_status = await somfy_mylink.status_info()
except TimeoutError:
_LOGGER.error(
"Unable to connect to the Somfy MyLink device, "
"please check your settings"
)
return
for cover in mylink_status["result"]:
entity_id = ENTITY_ID_FORMAT.format(slugify(cover["name"]))
entity_config = discovery_info.get(entity_id, {})
default_reverse = discovery_info[CONF_DEFAULT_REVERSE]
cover_config = {}
cover_config["target_id"] = cover["targetID"]
cover_config["name"] = cover["name"]
cover_config["reverse"] = entity_config.get("reverse", default_reverse)
cover_list.append(SomfyShade(somfy_mylink, **cover_config))
_LOGGER.info(
"Adding Somfy Cover: %s with targetID %s",
cover_config["name"],
cover_config["target_id"],
)
async_add_entities(cover_list)
class SomfyShade(CoverEntity):
"""Object for controlling a Somfy cover."""
def __init__(
self,
somfy_mylink,
target_id,
name="SomfyShade",
reverse=False,
device_class=DEVICE_CLASS_WINDOW,
):
"""Initialize the cover."""
self.somfy_mylink = somfy_mylink
self._target_id = target_id
self._name = name
self._reverse = reverse
self._device_class = device_class
@property
def unique_id(self):
"""Return the unique ID of this cover."""
return self._target_id
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def is_closed(self):
"""Return if the cover is closed."""
return None
@property
def assumed_state(self):
"""Let HA know the integration is assumed state."""
return True
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._device_class
async def async_open_cover(self, **kwargs):
"""Wrap Homeassistant calls to open the cover."""
if not self._reverse:
await self.somfy_mylink.move_up(self._target_id)
else:
await self.somfy_mylink.move_down(self._target_id)
async def async_close_cover(self, **kwargs):
"""Wrap Homeassistant calls to close the cover."""
if not self._reverse:
await self.somfy_mylink.move_down(self._target_id)
else:
await self.somfy_mylink.move_up(self._target_id)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.somfy_mylink.move_stop(self._target_id)
|
import pytest
from homeassistant.auth.providers import homeassistant as prov_ha
from homeassistant.components.config import auth_provider_homeassistant as auth_ha
from tests.common import CLIENT_ID, MockUser, register_auth_provider
@pytest.fixture(autouse=True)
def setup_config(hass):
"""Fixture that sets up the auth provider homeassistant module."""
hass.loop.run_until_complete(
register_auth_provider(hass, {"type": "homeassistant"})
)
hass.loop.run_until_complete(auth_ha.async_setup(hass))
@pytest.fixture
async def auth_provider(hass):
"""Hass auth provider."""
provider = hass.auth.auth_providers[0]
await provider.async_initialize()
return provider
@pytest.fixture
async def owner_access_token(hass, hass_owner_user):
"""Access token for owner user."""
refresh_token = await hass.auth.async_create_refresh_token(
hass_owner_user, CLIENT_ID
)
return hass.auth.async_create_access_token(refresh_token)
@pytest.fixture
async def test_user_credential(hass, auth_provider):
"""Add a test user."""
await hass.async_add_executor_job(
auth_provider.data.add_auth, "test-user", "test-pass"
)
return await auth_provider.async_get_or_create_credentials(
{"username": "test-user"}
)
async def test_create_auth_system_generated_user(hass, hass_ws_client):
"""Test we can't add auth to system generated users."""
system_user = MockUser(system_generated=True).add_to_hass(hass)
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/create",
"user_id": system_user.id,
"username": "test-user",
"password": "test-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "system_generated"
async def test_create_auth_user_already_credentials():
"""Test we can't create auth for user with pre-existing credentials."""
# assert False
async def test_create_auth_unknown_user(hass_ws_client, hass):
"""Test create pointing at unknown user."""
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/create",
"user_id": "test-id",
"username": "test-user",
"password": "test-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "not_found"
async def test_create_auth_requires_admin(
hass, hass_ws_client, hass_read_only_access_token
):
"""Test create requires admin to call API."""
client = await hass_ws_client(hass, hass_read_only_access_token)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/create",
"user_id": "test-id",
"username": "test-user",
"password": "test-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "unauthorized"
async def test_create_auth(hass, hass_ws_client, hass_storage):
"""Test create auth command works."""
client = await hass_ws_client(hass)
user = MockUser().add_to_hass(hass)
assert len(user.credentials) == 0
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/create",
"user_id": user.id,
"username": "test-user",
"password": "test-pass",
}
)
result = await client.receive_json()
assert result["success"], result
assert len(user.credentials) == 1
creds = user.credentials[0]
assert creds.auth_provider_type == "homeassistant"
assert creds.auth_provider_id is None
assert creds.data == {"username": "test-user"}
assert prov_ha.STORAGE_KEY in hass_storage
entry = hass_storage[prov_ha.STORAGE_KEY]["data"]["users"][0]
assert entry["username"] == "test-user"
async def test_create_auth_duplicate_username(hass, hass_ws_client, hass_storage):
"""Test we can't create auth with a duplicate username."""
client = await hass_ws_client(hass)
user = MockUser().add_to_hass(hass)
hass_storage[prov_ha.STORAGE_KEY] = {
"version": 1,
"data": {"users": [{"username": "test-user"}]},
}
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/create",
"user_id": user.id,
"username": "test-user",
"password": "test-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "username_exists"
async def test_delete_removes_just_auth(hass_ws_client, hass, hass_storage):
"""Test deleting an auth without being connected to a user."""
client = await hass_ws_client(hass)
hass_storage[prov_ha.STORAGE_KEY] = {
"version": 1,
"data": {"users": [{"username": "test-user"}]},
}
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/delete",
"username": "test-user",
}
)
result = await client.receive_json()
assert result["success"], result
assert len(hass_storage[prov_ha.STORAGE_KEY]["data"]["users"]) == 0
async def test_delete_removes_credential(hass, hass_ws_client, hass_storage):
"""Test deleting auth that is connected to a user."""
client = await hass_ws_client(hass)
user = MockUser().add_to_hass(hass)
hass_storage[prov_ha.STORAGE_KEY] = {
"version": 1,
"data": {"users": [{"username": "test-user"}]},
}
user.credentials.append(
await hass.auth.auth_providers[0].async_get_or_create_credentials(
{"username": "test-user"}
)
)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/delete",
"username": "test-user",
}
)
result = await client.receive_json()
assert result["success"], result
assert len(hass_storage[prov_ha.STORAGE_KEY]["data"]["users"]) == 0
async def test_delete_requires_admin(hass, hass_ws_client, hass_read_only_access_token):
"""Test delete requires admin."""
client = await hass_ws_client(hass, hass_read_only_access_token)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/delete",
"username": "test-user",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "unauthorized"
async def test_delete_unknown_auth(hass, hass_ws_client):
"""Test trying to delete an unknown auth username."""
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 5,
"type": "config/auth_provider/homeassistant/delete",
"username": "test-user",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "auth_not_found"
async def test_change_password(
hass, hass_ws_client, hass_admin_user, auth_provider, test_user_credential
):
"""Test that change password succeeds with valid password."""
await hass.auth.async_link_user(hass_admin_user, test_user_credential)
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/change_password",
"current_password": "test-pass",
"new_password": "new-pass",
}
)
result = await client.receive_json()
assert result["success"], result
await auth_provider.async_validate_login("test-user", "new-pass")
async def test_change_password_wrong_pw(
hass, hass_ws_client, hass_admin_user, auth_provider, test_user_credential
):
"""Test that change password fails with invalid password."""
await hass.auth.async_link_user(hass_admin_user, test_user_credential)
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/change_password",
"current_password": "wrong-pass",
"new_password": "new-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "invalid_password"
with pytest.raises(prov_ha.InvalidAuth):
await auth_provider.async_validate_login("test-user", "new-pass")
async def test_change_password_no_creds(hass, hass_ws_client):
"""Test that change password fails with no credentials."""
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/change_password",
"current_password": "test-pass",
"new_password": "new-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "credentials_not_found"
async def test_admin_change_password_not_owner(
hass, hass_ws_client, auth_provider, test_user_credential
):
"""Test that change password fails when not owner."""
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/admin_change_password",
"user_id": "test-user",
"password": "new-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "unauthorized"
# Validate old login still works
await auth_provider.async_validate_login("test-user", "test-pass")
async def test_admin_change_password_no_user(hass, hass_ws_client, owner_access_token):
"""Test that change password fails with unknown user."""
client = await hass_ws_client(hass, owner_access_token)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/admin_change_password",
"user_id": "non-existing",
"password": "new-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "user_not_found"
async def test_admin_change_password_no_cred(
hass, hass_ws_client, owner_access_token, hass_admin_user
):
"""Test that change password fails with unknown credential."""
client = await hass_ws_client(hass, owner_access_token)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/admin_change_password",
"user_id": hass_admin_user.id,
"password": "new-pass",
}
)
result = await client.receive_json()
assert not result["success"], result
assert result["error"]["code"] == "credentials_not_found"
async def test_admin_change_password(
hass,
hass_ws_client,
owner_access_token,
auth_provider,
test_user_credential,
hass_admin_user,
):
"""Test that owners can change any password."""
await hass.auth.async_link_user(hass_admin_user, test_user_credential)
client = await hass_ws_client(hass, owner_access_token)
await client.send_json(
{
"id": 6,
"type": "config/auth_provider/homeassistant/admin_change_password",
"user_id": hass_admin_user.id,
"password": "new-pass",
}
)
result = await client.receive_json()
assert result["success"], result
await auth_provider.async_validate_login("test-user", "new-pass")
|
from meld.matchers import diffutil
from meld.matchers.myers import MyersSequenceMatcher
LO, HI = 1, 2
class AutoMergeDiffer(diffutil.Differ):
_matcher = MyersSequenceMatcher
# _matcher = PatienceSequenceMatcher
def __init__(self):
super().__init__()
self.auto_merge = False
self.unresolved = []
def _auto_merge(self, using, texts):
for out0, out1 in super()._auto_merge(using, texts):
if self.auto_merge and out0[0] == 'conflict':
# we will try to resolve more complex conflicts automatically
# here... if possible
l0, h0, l1, h1, l2, h2 = (
out0[3], out0[4], out0[1], out0[2], out1[3], out1[4])
len0 = h0 - l0
len1 = h1 - l1
len2 = h2 - l2
if (len0 > 0 and len2 > 0) and (
len0 == len1 or len2 == len1 or len1 == 0):
matcher = self._matcher(
None, texts[0][l0:h0], texts[2][l2:h2])
for chunk in matcher.get_opcodes():
s1 = l1
e1 = l1
if len0 == len1:
s1 += chunk[1]
e1 += chunk[2]
elif len2 == len1:
s1 += chunk[3]
e1 += chunk[4]
out0_bounds = (s1, e1, l0 + chunk[1], l0 + chunk[2])
out1_bounds = (s1, e1, l2 + chunk[3], l2 + chunk[4])
if chunk[0] == 'equal':
out0 = ('replace',) + out0_bounds
out1 = ('replace',) + out1_bounds
yield out0, out1
else:
out0 = ('conflict',) + out0_bounds
out1 = ('conflict',) + out1_bounds
yield out0, out1
return
# elif len0 > 0 and len2 > 0:
# # this logic will resolve more conflicts automatically,
# # but unresolved conflicts may sometimes look confusing
# # as the line numbers in ancestor file will be
# # interpolated and may not reflect the actual changes
# matcher = self._matcher(
# None, texts[0][l0:h0], texts[2][l2:h2])
# if len0 > len2:
# maxindex = 1
# maxlen = len0
# else:
# maxindex = 3
# maxlen = len2
# for chunk in matcher.get_opcodes():
# new_start = l1 + len1 * chunk[maxindex] / maxlen
# new_end = l1 + len1 * chunk[maxindex + 1] / maxlen
# out0_bounds = (
# new_start, new_end, l0 + chunk[1], l0 + chunk[2])
# out1_bounds = (
# new_start, new_end, l2 + chunk[3], l2 + chunk[4])
# if chunk[0] == 'equal':
# out0 = ('replace',) + out0_bounds
# out1 = ('replace',) + out1_bounds
# yield out0, out1
# else:
# out0 = ('conflict',) + out0_bounds
# out1 = ('conflict',) + out1_bounds
# yield out0, out1
# return
else:
# some tricks to resolve even more conflicts automatically
# unfortunately the resulting chunks cannot be used to
# highlight changes but hey, they are good enough to merge
# the resulting file :)
chunktype = using[0][0][0]
for chunkarr in using:
for chunk in chunkarr:
if chunk[0] != chunktype:
chunktype = None
break
if not chunktype:
break
if chunktype == 'delete':
# delete + delete -> split into delete/conflict
seq0 = seq1 = None
while 1:
if seq0 is None:
try:
seq0 = using[0].pop(0)
i0 = seq0[1]
end0 = seq0[4]
except IndexError:
break
if seq1 is None:
try:
seq1 = using[1].pop(0)
i1 = seq1[1]
end1 = seq1[4]
except IndexError:
break
highstart = max(i0, i1)
if i0 != i1:
out0 = (
'conflict', i0 - highstart + i1, highstart,
seq0[3] - highstart + i1, seq0[3]
)
out1 = (
'conflict', i1 - highstart + i0, highstart,
seq1[3] - highstart + i0, seq1[3]
)
yield out0, out1
lowend = min(seq0[2], seq1[2])
if highstart != lowend:
out0 = (
'delete', highstart, lowend,
seq0[3], seq0[4]
)
out1 = (
'delete', highstart, lowend,
seq1[3], seq1[4]
)
yield out0, out1
i0 = i1 = lowend
if lowend == seq0[2]:
seq0 = None
if lowend == seq1[2]:
seq1 = None
if seq0:
out0 = (
'conflict', i0, seq0[2],
seq0[3], seq0[4]
)
out1 = (
'conflict', i0, seq0[2],
end1, end1 + seq0[2] - i0
)
yield out0, out1
elif seq1:
out0 = (
'conflict', i1, seq1[2],
end0, end0 + seq1[2] - i1
)
out1 = (
'conflict', i1,
seq1[2], seq1[3], seq1[4]
)
yield out0, out1
return
yield out0, out1
def change_sequence(self, sequence, startidx, sizechange, texts):
if sequence == 1:
lo = 0
for c in self.unresolved:
if startidx <= c:
break
lo += 1
if lo < len(self.unresolved):
hi = lo
if sizechange < 0:
for c in self.unresolved[lo:]:
if startidx - sizechange <= c:
break
hi += 1
elif sizechange == 0 and startidx == self.unresolved[lo]:
hi += 1
if hi < len(self.unresolved):
self.unresolved[hi:] = [
c + sizechange for c in self.unresolved[hi:]
]
self.unresolved[lo:hi] = []
return super().change_sequence(sequence, startidx, sizechange, texts)
def get_unresolved_count(self):
return len(self.unresolved)
class Merger(diffutil.Differ):
def __init__(self, ):
self.differ = AutoMergeDiffer()
self.differ.auto_merge = True
self.differ.unresolved = []
self.texts = []
def initialize(self, sequences, texts):
step = self.differ.set_sequences_iter(sequences)
while next(step) is None:
yield None
self.texts = texts
yield 1
def _apply_change(self, text, change, mergedtext):
if change[0] == 'insert':
for i in range(change[LO + 2], change[HI + 2]):
mergedtext.append(text[i])
return 0
elif change[0] == 'replace':
for i in range(change[LO + 2], change[HI + 2]):
mergedtext.append(text[i])
return change[HI] - change[LO]
else:
return change[HI] - change[LO]
def merge_3_files(self, mark_conflicts=True):
self.unresolved = []
lastline = 0
mergedline = 0
mergedtext = []
for change in self.differ.all_changes():
yield None
low_mark = lastline
if change[0] is not None:
low_mark = change[0][LO]
if change[1] is not None:
if change[1][LO] > low_mark:
low_mark = change[1][LO]
for i in range(lastline, low_mark, 1):
mergedtext.append(self.texts[1][i])
mergedline += low_mark - lastline
lastline = low_mark
if (change[0] is not None and change[1] is not None and
change[0][0] == 'conflict'):
high_mark = max(change[0][HI], change[1][HI])
if mark_conflicts:
if low_mark < high_mark:
for i in range(low_mark, high_mark):
mergedtext.append("(??)" + self.texts[1][i])
self.unresolved.append(mergedline)
mergedline += 1
else:
mergedtext.append("(??)")
self.unresolved.append(mergedline)
mergedline += 1
lastline = high_mark
elif change[0] is not None:
lastline += self._apply_change(
self.texts[0], change[0], mergedtext)
mergedline += change[0][HI + 2] - change[0][LO + 2]
else:
lastline += self._apply_change(
self.texts[2], change[1], mergedtext)
mergedline += change[1][HI + 2] - change[1][LO + 2]
baselen = len(self.texts[1])
for i in range(lastline, baselen, 1):
mergedtext.append(self.texts[1][i])
# FIXME: We need to obtain the original line endings from the lines
# that were merged and use those here instead of assuming '\n'.
yield "\n".join(mergedtext)
def merge_2_files(self, fromindex, toindex):
self.unresolved = []
lastline = 0
mergedtext = []
for change in self.differ.pair_changes(toindex, fromindex):
yield None
if change[0] == 'conflict':
low_mark = change[HI]
else:
low_mark = change[LO]
for i in range(lastline, low_mark):
mergedtext.append(self.texts[toindex][i])
lastline = low_mark
if change[0] != 'conflict':
lastline += self._apply_change(
self.texts[fromindex], change, mergedtext)
baselen = len(self.texts[toindex])
for i in range(lastline, baselen):
mergedtext.append(self.texts[toindex][i])
# FIXME: We need to obtain the original line endings from the lines
# that were merged and use those here instead of assuming '\n'.
yield "\n".join(mergedtext)
|
import json
import time
import asyncio
import socket
import mimetypes
import traceback
import threading
from urllib.parse import urlparse
# from concurrent.futures import ThreadPoolExecutor
import tornado
from tornado import gen, netutil
from tornado.web import Application, RequestHandler
from tornado.ioloop import IOLoop
from tornado.websocket import WebSocketHandler, WebSocketClosedError
from tornado.httpserver import HTTPServer
from tornado.platform.asyncio import AsyncIOMainLoop
from ._app import manager
from ._session import get_page
from ._server import AbstractServer
from ._assetstore import assets
from ._clientcore import serializer
from . import logger
from .. import config
if tornado.version_info < (4, ):
raise RuntimeError('Flexx requires Tornado v4.0 or higher.')
# todo: generalize -> Make Tornado mnore of an implementation detail.
# So we can use e.g. https://github.com/aaugustin/websockets
# todo: threading, or even multi-process
#executor = ThreadPoolExecutor(4)
IMPORT_TIME = time.time()
def is_main_thread():
""" Get whether this is the main thread. """
return isinstance(threading.current_thread(), threading._MainThread)
class TornadoServer(AbstractServer):
""" Flexx Server implemented in Tornado.
"""
def __init__(self, *args, **kwargs):
self._app = None
self._server = None
super().__init__(*args, **kwargs)
def _open(self, host, port, **kwargs):
# Note: does not get called if host is False. That way we can
# run Flexx in e.g. JLab's application.
# Hook Tornado up with asyncio. Flexx' BaseServer makes sure
# that the correct asyncio event loop is current (for this thread).
# http://www.tornadoweb.org/en/stable/asyncio.html
# todo: Since Tornado v5.0 asyncio is autom used, deprecating AsyncIOMainLoop
self._io_loop = AsyncIOMainLoop()
# I am sorry for this hack, but Tornado wont work otherwise :(
# I wonder how long it will take before this will bite me back. I guess
# we will be alright as long as there is no other Tornado stuff going on.
IOLoop._current.instance = None
self._io_loop.make_current()
# handle ssl, wether from configuration or given args
if config.ssl_certfile:
if 'ssl_options' not in kwargs:
kwargs['ssl_options'] = {}
if 'certfile' not in kwargs['ssl_options']:
kwargs['ssl_options']['certfile'] = config.ssl_certfile
if config.ssl_keyfile:
if 'ssl_options' not in kwargs:
kwargs['ssl_options'] = {}
if 'keyfile' not in kwargs['ssl_options']:
kwargs['ssl_options']['keyfile'] = config.ssl_keyfile
if config.tornado_debug:
app_kwargs = dict(debug=True)
else:
app_kwargs = dict()
# Create tornado application
self._app = Application([(r"/flexx/ws/(.*)", WSHandler),
(r"/flexx/(.*)", MainHandler),
(r"/(.*)", AppHandler), ], **app_kwargs)
self._app._io_loop = self._io_loop
# Create tornado server, bound to our own ioloop
if tornado.version_info < (5, ):
kwargs['io_loop'] = self._io_loop
self._server = HTTPServer(self._app, **kwargs)
# Start server (find free port number if port not given)
if port:
# Turn port into int, use hashed port number if a string was given
try:
port = int(port)
except ValueError:
port = port_hash(port)
self._server.listen(port, host)
else:
# Try N ports in a repeatable range (easier, browser history, etc.)
prefered_port = port_hash('Flexx')
for i in range(8):
port = prefered_port + i
try:
self._server.listen(port, host)
break
except (OSError, IOError):
pass # address already in use
else:
# Ok, let Tornado figure out a port
[sock] = netutil.bind_sockets(None, host, family=socket.AF_INET)
self._server.add_sockets([sock])
port = sock.getsockname()[1]
# Notify address, so its easy to e.g. copy and paste in the browser
self._serving = self._app._flexx_serving = host, port
proto = 'http'
if 'ssl_options' in kwargs:
proto = 'https'
# This string 'Serving apps at' is our 'ready' signal and is tested for.
logger.info('Serving apps at %s://%s:%i/' % (proto, host, port))
def _close(self):
self._server.stop()
@property
def app(self):
""" The Tornado Application object being used."""
return self._app
@property
def server(self):
""" The Tornado HttpServer object being used."""
return self._server
@property
def protocol(self):
""" Get a string representing served protocol."""
if self._server.ssl_options is not None:
return 'https'
return 'http'
def port_hash(name):
""" Given a string, returns a port number between 49152 and 65535
This range (of 2**14 posibilities) is the range for dynamic and/or
private ports (ephemeral ports) specified by iana.org. The algorithm
is deterministic.
"""
fac = 0xd2d84a61
val = 0
for c in name:
val += (val >> 3) + (ord(c) * fac)
val += (val >> 3) + (len(name) * fac)
return 49152 + (val % 2**14)
class FlexxHandler(RequestHandler):
""" Base class for Flexx' Tornado request handlers.
"""
def initialize(self, **kwargs):
# kwargs == dict set as third arg in url spec
pass
def write_error(self, status_code, **kwargs):
if status_code == 404: # does not work?
self.write('flexx.ui wants you to connect to root (404)')
else:
if config.browser_stacktrace:
msg = 'Flexx.ui encountered an error: <br /><br />'
try: # try providing a useful message; tough luck if this fails
type, value, tb = kwargs['exc_info']
tb_str = ''.join(traceback.format_tb(tb))
msg += '<pre>%s\n%s</pre>' % (tb_str, str(value))
except Exception:
pass
self.write(msg)
super().write_error(status_code, **kwargs)
def on_finish(self):
pass
class AppHandler(FlexxHandler):
""" Handler for http requests to get apps.
"""
@gen.coroutine
def get(self, full_path):
logger.debug('Incoming request at %r' % full_path)
ok_app_names = '__main__', '__default__', '__index__'
parts = [p for p in full_path.split('/') if p]
# Try getting regular app name
# Note: invalid part[0] can mean its a path relative to the main app
app_name = None
path = '/'.join(parts)
if parts:
if path.lower() == 'flexx': # reserved, redirect to other handler
return self.redirect('/flexx/')
if parts[0] in ok_app_names or manager.has_app_name(parts[0]):
app_name = parts[0]
path = '/'.join(parts[1:])
# If it does not look like an app, it might be that the request is for
# the main app. The main app can have sub-paths, but lets try to filter
# out cases that might make Flexx unnecessarily instantiate an app.
# In particular "favicon.ico" that browsers request by default (#385).
if app_name is None:
if len(parts) == 1 and '.' in full_path:
return self.redirect('/flexx/data/' + full_path)
# If we did not return ... assume this is the default app
app_name = '__main__'
# Try harder to produce an app
if app_name == '__main__':
app_name = manager.has_app_name('__main__')
elif '/' not in full_path:
return self.redirect('/%s/' % app_name) # ensure slash behind name
# Maybe the user wants an index? Otherwise error.
if not app_name:
if not parts:
app_name = '__index__'
else:
name = parts[0] if parts else '__main__'
return self.write('No app "%s" is currently hosted.' % name)
# We now have:
# * app_name: name of the app, must be a valid identifier, names
# with underscores are reserved for special things like assets,
# commands, etc.
# * path: part (possibly with slashes) after app_name
if app_name == '__index__':
self._get_index(app_name, path) # Index page
else:
self._get_app(app_name, path) # An actual app!
def _get_index(self, app_name, path):
if path:
return self.redirect('/flexx/__index__')
all_apps = ['<li><a href="%s/">%s</a></li>' % (name, name) for name in
manager.get_app_names()]
the_list = '<ul>%s</ul>' % ''.join(all_apps) if all_apps else 'no apps'
self.write('Index of available apps: ' + the_list)
def _get_app(self, app_name, path):
# Allow serving data/assets relative to app so that data can use
# relative paths just like exported apps.
if path.startswith(('flexx/data/', 'flexx/assets/')):
return self.redirect('/' + path)
# Get case-corrected app name if the app is known
correct_app_name = manager.has_app_name(app_name)
# Error or redirect if app name is not right
if not correct_app_name:
return self.write('No app "%s" is currently hosted.' % app_name)
if correct_app_name != app_name:
return self.redirect('/%s/%s' % (correct_app_name, path))
# Should we bind this app instance to a pre-created session?
session_id = self.get_argument('session_id', '')
if session_id:
# If session_id matches a pending app, use that session
session = manager.get_session_by_id(session_id)
if session and session.status == session.STATUS.PENDING:
self.write(get_page(session).encode())
else:
self.redirect('/%s/' % app_name) # redirect for normal serve
else:
# Create session - websocket will connect to it via session_id
session = manager.create_session(app_name, request=self.request)
self.write(get_page(session).encode())
class MainHandler(RequestHandler):
""" Handler for assets, commands, etc. Basically, everything for
which te path is clear.
"""
def _guess_mime_type(self, fname):
""" Set the mimetype if we can guess it from the filename.
"""
guess = mimetypes.guess_type(fname)[0]
if guess:
self.set_header("Content-Type", guess)
@gen.coroutine
def get(self, full_path):
logger.debug('Incoming request at %s' % full_path)
# Analyze path to derive components
# Note: invalid app name can mean its a path relative to the main app
parts = [p for p in full_path.split('/') if p]
if not parts:
return self.write('Root url for flexx: assets, assetview, data, cmd')
selector = parts[0]
path = '/'.join(parts[1:])
if selector in ('assets', 'assetview', 'data'):
self._get_asset(selector, path) # JS, CSS, or data
elif selector == 'info':
self._get_info(selector, path)
elif selector == 'cmd':
self._get_cmd(selector, path) # Execute (or ignore) command
else:
return self.write('Invalid url path "%s".' % full_path)
def _get_asset(self, selector, path):
# Get session id and filename
session_id, _, filename = path.partition('/')
session_id = '' if session_id == 'shared' else session_id
# Get asset provider: store or session
asset_provider = assets
if session_id and selector != 'data':
return self.write('Only supports shared assets, not ' % filename)
elif session_id:
asset_provider = manager.get_session_by_id(session_id)
# Checks
if asset_provider is None:
return self.write('Invalid session %r' % session_id)
if not filename:
return self.write('Root dir for %s/%s' % (selector, path))
if selector == 'assets':
# If colon: request for a view of an asset at a certain line
if '.js:' in filename or '.css:' in filename or filename[0] == ':':
fname, where = filename.split(':')[:2]
return self.redirect('/flexx/assetview/%s/%s#L%s' %
(session_id or 'shared', fname.replace('/:', ':'), where))
# Retrieve asset
try:
res = asset_provider.get_asset(filename)
except KeyError:
self.write('Could not load asset %r' % filename)
else:
self._guess_mime_type(filename)
self.write(res.to_string())
elif selector == 'assetview':
# Retrieve asset
try:
res = asset_provider.get_asset(filename)
except KeyError:
return self.write('Could not load asset %r' % filename)
else:
res = res.to_string()
# Build HTML page
style = ('pre {display:block; width: 100%; padding:0; margin:0;} '
'a {text-decoration: none; color: #000; background: #ddd;} '
':target {background:#ada;} ')
lines = ['<html><head><style>%s</style></head><body>' % style]
for i, line in enumerate(res.splitlines()):
table = {ord('&'): '&', ord('<'): '<', ord('>'): '>'}
line = line.translate(table).replace('\t', ' ')
lines.append('<pre id="L%i"><a href="#L%i">%s</a> %s</pre>' %
(i+1, i+1, str(i+1).rjust(4).replace(' ', ' '), line))
lines.append('</body></html>')
return self.write('\n'.join(lines))
elif selector == 'data':
# todo: can/do we async write in case the data is large?
# Retrieve data
res = asset_provider.get_data(filename)
if res is None:
return self.send_error(404)
else:
self._guess_mime_type(filename) # so that images show up
return self.write(res)
else:
raise RuntimeError('Invalid asset type %r' % selector)
def _get_info(self, selector, info):
""" Provide some rudimentary information about the server.
Note that this is publicly accesible.
"""
runtime = time.time() - IMPORT_TIME
napps = len(manager.get_app_names())
nsessions = sum([len(manager.get_connections(x))
for x in manager.get_app_names()])
info = []
info.append('Runtime: %1.1f s' % runtime)
info.append('Number of apps: %i' % napps)
info.append('Number of sessions: %i' % nsessions)
info = '\n'.join(['<li>%s</li>' % i for i in info])
self.write('<ul>' + info + '</ul>')
def _get_cmd(self, selector, path):
""" Allow control of the server using http, but only from localhost!
"""
if not self.request.host.startswith('localhost:'):
self.write('403')
return
if not path:
self.write('No command given')
elif path == 'info':
info = dict(address=self.application._flexx_serving,
app_names=manager.get_app_names(),
nsessions=sum([len(manager.get_connections(x))
for x in manager.get_app_names()]),
)
self.write(json.dumps(info))
elif path == 'stop':
asyncio.get_event_loop().stop()
# loop = IOLoop.current()
# loop.add_callback(loop.stop)
self.write("Stopping event loop.")
else:
self.write('unknown command %r' % path)
class MessageCounter:
""" Simple class to count incoming messages and periodically log
the number of messages per second.
"""
def __init__(self):
self._collect_interval = 0.2 # period over which to collect messages
self._notify_interval = 3.0 # period on which to log the mps
self._window_interval = 4.0 # size of sliding window
self._mps = [(time.time(), 0)] # tuples of (time, count)
self._collect_count = 0
self._collect_stoptime = 0
self._stop = False
self._notify()
def trigger(self):
t = time.time()
if t < self._collect_stoptime:
self._collect_count += 1
else:
self._mps.append((self._collect_stoptime, self._collect_count))
self._collect_count = 1
self._collect_stoptime = t + self._collect_interval
def _notify(self):
mintime = time.time() - self._window_interval
self._mps = [x for x in self._mps if x[0] > mintime]
if self._mps:
n = sum([x[1] for x in self._mps])
T = self._mps[-1][0] - self._mps[0][0] + self._collect_interval
else:
n, T = 0, self._collect_interval
logger.debug('Websocket messages per second: %1.1f' % (n / T))
if not self._stop:
loop = asyncio.get_event_loop()
loop.call_later(self._notify_interval, self._notify)
def stop(self):
self._stop = True
class WSHandler(WebSocketHandler):
""" Handler for websocket.
"""
# https://tools.ietf.org/html/rfc6455#section-7.4.1
known_reasons = {1000: 'client done',
1001: 'client closed',
1002: 'protocol error',
1003: 'could not accept data',
}
# --- callbacks
def open(self, path=None):
""" Called when a new connection is made.
"""
if not hasattr(self, 'close_code'): # old version of Tornado?
self.close_code, self.close_reason = None, None
self._session = None
self._mps_counter = MessageCounter()
# Don't collect messages to send them more efficiently, just send asap
# self.set_nodelay(True)
if isinstance(path, bytes):
path = path.decode()
self.app_name = path.strip('/')
logger.debug('New websocket connection %s' % path)
if manager.has_app_name(self.app_name):
self.application._io_loop.spawn_callback(self.pinger1)
else:
self.close(1003, "Could not associate socket with an app.")
# todo: @gen.coroutine?
def on_message(self, message):
""" Called when a new message is received from JS.
This handles one message per event loop iteration.
We now have a very basic protocol for receiving messages,
we should at some point define a real formalized protocol.
"""
self._mps_counter.trigger()
try:
command = serializer.decode(message)
except Exception as err:
err.skip_tb = 1
logger.exception(err)
self._pongtime = time.time()
if self._session is None:
if command[0] == 'HI_FLEXX':
session_id = command[1]
try:
self._session = manager.connect_client(self, self.app_name,
session_id,
cookies=self.cookies)
except Exception as err:
self.close(1003, "Could not launch app: %r" % err)
raise
else:
try:
self._session._receive_command(command)
except Exception as err:
err.skip_tb = 1
logger.exception(err)
def on_close(self):
""" Called when the connection is closed.
"""
self.close_code = code = self.close_code or 0
reason = self.close_reason or self.known_reasons.get(code, '')
logger.debug('Websocket closed: %s (%i)' % (reason, code))
self._mps_counter.stop()
if self._session is not None:
manager.disconnect_client(self._session)
self._session = None # Allow cleaning up
@gen.coroutine
def pinger1(self):
""" Check for timeouts. This helps remove lingering false connections.
This uses the websocket's native ping-ping mechanism. On the
browser side, pongs work even if JS is busy. On the Python side
we perform a check whether we were really waiting or whether Python
was too busy to detect the pong.
"""
self._pongtime = time.time()
self._pingtime = pingtime = 0
while self.close_code is None:
dt = config.ws_timeout
# Ping, but don't spam
if pingtime <= self._pongtime:
self.ping(b'x')
pingtime = self._pingtime = time.time()
iters_since_ping = 0
yield gen.sleep(dt / 5)
# Check pong status
iters_since_ping += 1
if iters_since_ping < 5:
pass # we might have missed the pong
elif time.time() - self._pongtime > dt:
# Delay is so big that connection probably dropped.
# Note that a browser sends a pong even if JS is busy
logger.warning('Closing connection due to lack of pong')
self.close(1000, 'Conection timed out (no pong).')
return
def on_pong(self, data):
""" Implement the ws's on_pong() method. Called when our ping
is returned by the browser.
"""
self._pongtime = time.time()
# --- methods
def write_command(self, cmd):
assert isinstance(cmd, tuple) and len(cmd) >= 1
bb = serializer.encode(cmd)
try:
self.write_message(bb, binary=True)
except WebSocketClosedError:
self.close(1000, 'closed by client')
def close(self, *args):
try:
WebSocketHandler.close(self, *args)
except TypeError:
WebSocketHandler.close(self) # older Tornado
def close_this(self):
""" Call this to close the websocket
"""
self.close(1000, 'closed by server')
def check_origin(self, origin):
""" Handle cross-domain access; override default same origin policy.
"""
# http://www.tornadoweb.org/en/stable/_modules/tornado/websocket.html
#WebSocketHandler.check_origin
serving_host = self.request.headers.get("Host")
serving_hostname, _, serving_port = serving_host.partition(':')
connecting_host = urlparse(origin).netloc
connecting_hostname, _, connecting_port = connecting_host.partition(':')
serving_port = serving_port or '80'
connecting_port = connecting_port or '80'
if serving_hostname == 'localhost':
return True # Safe
elif serving_host == connecting_host:
return True # Passed most strict test, hooray!
elif serving_hostname == '0.0.0.0' and serving_port == connecting_port:
return True # host on all addressses; best we can do is check port
elif connecting_host in config.host_whitelist:
return True
else:
logger.warning('Connection refused from %s' % origin)
return False
|
from datetime import timedelta
import logging
from aioymaps import YandexMapsRequester
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, DEVICE_CLASS_TIMESTAMP
from homeassistant.helpers.aiohttp_client import async_create_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
STOP_NAME = "stop_name"
USER_AGENT = "Home Assistant"
ATTRIBUTION = "Data provided by maps.yandex.ru"
CONF_STOP_ID = "stop_id"
CONF_ROUTE = "routes"
DEFAULT_NAME = "Yandex Transport"
ICON = "mdi:bus"
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ROUTE, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Yandex transport sensor."""
stop_id = config[CONF_STOP_ID]
name = config[CONF_NAME]
routes = config[CONF_ROUTE]
client_session = async_create_clientsession(hass, requote_redirect_url=False)
data = YandexMapsRequester(user_agent=USER_AGENT, client_session=client_session)
async_add_entities([DiscoverYandexTransport(data, stop_id, routes, name)], True)
class DiscoverYandexTransport(Entity):
"""Implementation of yandex_transport sensor."""
def __init__(self, requester: YandexMapsRequester, stop_id, routes, name):
"""Initialize sensor."""
self.requester = requester
self._stop_id = stop_id
self._routes = []
self._routes = routes
self._state = None
self._name = name
self._attrs = None
async def async_update(self, *, tries=0):
"""Get the latest data from maps.yandex.ru and update the states."""
attrs = {}
closer_time = None
yandex_reply = await self.requester.get_stop_info(self._stop_id)
try:
data = yandex_reply["data"]
except KeyError as key_error:
_LOGGER.warning(
"Exception KeyError was captured, missing key is %s. Yandex returned: %s",
key_error,
yandex_reply,
)
if tries > 0:
return
await self.requester.set_new_session()
await self.async_update(tries=tries + 1)
return
stop_name = data["name"]
transport_list = data["transports"]
for transport in transport_list:
route = transport["name"]
for thread in transport["threads"]:
if self._routes and route not in self._routes:
# skip unnecessary route info
continue
if "Events" not in thread["BriefSchedule"]:
continue
for event in thread["BriefSchedule"]["Events"]:
if "Estimated" not in event:
continue
posix_time_next = int(event["Estimated"]["value"])
if closer_time is None or closer_time > posix_time_next:
closer_time = posix_time_next
if route not in attrs:
attrs[route] = []
attrs[route].append(event["Estimated"]["text"])
attrs[STOP_NAME] = stop_name
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
if closer_time is None:
self._state = None
else:
self._state = dt_util.utc_from_timestamp(closer_time).isoformat(
timespec="seconds"
)
self._attrs = attrs
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
|
from homeassistant.components import ios
from homeassistant.const import PERCENTAGE
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
SENSOR_TYPES = {
"level": ["Battery Level", PERCENTAGE],
"state": ["Battery State", None],
}
DEFAULT_ICON_LEVEL = "mdi:battery"
DEFAULT_ICON_STATE = "mdi:power-plug"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the iOS sensor."""
# Leave here for if someone accidentally adds platform: ios to config
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up iOS from a config entry."""
dev = []
for device_name, device in ios.devices(hass).items():
for sensor_type in ("level", "state"):
dev.append(IOSSensor(sensor_type, device_name, device))
async_add_entities(dev, True)
class IOSSensor(Entity):
"""Representation of an iOS sensor."""
def __init__(self, sensor_type, device_name, device):
"""Initialize the sensor."""
self._device_name = device_name
self._name = f"{device_name} {SENSOR_TYPES[sensor_type][0]}"
self._device = device
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def device_info(self):
"""Return information about the device."""
return {
"identifiers": {
(
ios.DOMAIN,
self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_PERMANENT_ID],
)
},
"name": self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME],
"manufacturer": "Apple",
"model": self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_TYPE],
"sw_version": self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_SYSTEM_VERSION],
}
@property
def name(self):
"""Return the name of the iOS sensor."""
device_name = self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME]
return f"{device_name} {SENSOR_TYPES[self.type][0]}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unique_id(self):
"""Return the unique ID of this sensor."""
device_id = self._device[ios.ATTR_DEVICE_ID]
return f"{self.type}_{device_id}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the device state attributes."""
device = self._device[ios.ATTR_DEVICE]
device_battery = self._device[ios.ATTR_BATTERY]
return {
"Battery State": device_battery[ios.ATTR_BATTERY_STATE],
"Battery Level": device_battery[ios.ATTR_BATTERY_LEVEL],
"Device Type": device[ios.ATTR_DEVICE_TYPE],
"Device Name": device[ios.ATTR_DEVICE_NAME],
"Device Version": device[ios.ATTR_DEVICE_SYSTEM_VERSION],
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
device_battery = self._device[ios.ATTR_BATTERY]
battery_state = device_battery[ios.ATTR_BATTERY_STATE]
battery_level = device_battery[ios.ATTR_BATTERY_LEVEL]
charging = True
icon_state = DEFAULT_ICON_STATE
if battery_state in (
ios.ATTR_BATTERY_STATE_FULL,
ios.ATTR_BATTERY_STATE_UNPLUGGED,
):
charging = False
icon_state = f"{DEFAULT_ICON_STATE}-off"
elif battery_state == ios.ATTR_BATTERY_STATE_UNKNOWN:
battery_level = None
charging = False
icon_state = f"{DEFAULT_ICON_LEVEL}-unknown"
if self.type == "state":
return icon_state
return icon_for_battery_level(battery_level=battery_level, charging=charging)
async def async_update(self):
"""Get the latest state of the sensor."""
self._device = ios.devices(self.hass).get(self._device_name)
self._state = self._device[ios.ATTR_BATTERY][self.type]
|
from mock import MagicMock, ANY
from arctic._util import are_equals, enable_sharding
from arctic.arctic import Arctic
def test_are_equals_not_df():
assert(are_equals(1.0, 2.0) is False)
assert(are_equals([1, 2, 3], [1, 2, 3]))
assert(are_equals("Hello", "World") is False)
def test_enable_sharding_hashed():
m = MagicMock(Arctic, autospec=True)
enable_sharding(m, "test", hashed=True)
m._conn.admin.command.assert_called_with('shardCollection', ANY, key={'symbol': 'hashed'})
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shlex
from absl import flags
from perfkitbenchmarker import vm_util
import six
ALI_PREFIX = ['aliyun']
ROOT = 'root'
FLAGS = flags.FLAGS
PASSWD_LEN = 20
REGION_HZ = 'cn-hangzhou'
ADD_USER_TEMPLATE = """#!/bin/bash
echo "{user_name} ALL = NOPASSWD: ALL" >> /etc/sudoers
useradd {user_name} --home /home/{user_name} --shell /bin/bash -m
mkdir /home/{user_name}/.ssh
echo "{public_key}" >> /home/{user_name}/.ssh/authorized_keys
chown -R {user_name}:{user_name} /home/{user_name}/.ssh
chmod 700 /home/{user_name}/.ssh
chmod 600 /home/{user_name}/.ssh/authorized_keys
"""
def GetEncodedCmd(cmd):
cmd_line = ' '.join(cmd)
cmd_args = shlex.split(cmd_line)
return cmd_args
def GetRegionByZone(zone):
if zone.find(REGION_HZ) != -1:
return REGION_HZ
s = zone.split('-')
if s[0] == 'cn':
s.pop()
return '-'.join(s)
else:
return zone[:-1]
def AddTags(resource_id, resource_type, region, **kwargs):
"""Adds tags to an AliCloud resource created by PerfKitBenchmarker.
Args:
resource_id: An extant AliCloud resource to operate on.
resource_type: The type of the resource.
region: The AliCloud region 'resource_id' was created in.
**kwargs: dict. Key-value pairs to set on the instance.
"""
if not kwargs:
return
tag_cmd = ALI_PREFIX + [
'ecs', 'AddTags',
'--RegionId', region,
'--ResourceId', resource_id,
'--ResourceType', resource_type
]
for index, (key, value) in enumerate(six.iteritems(kwargs)):
tag_cmd.extend([
'--Tag.{0}.Key'.format(index + 1), str(key),
'--Tag.{0}.Value'.format(index + 1), str(value)
])
vm_util.IssueRetryableCommand(tag_cmd)
def AddDefaultTags(resource_id, resource_type, region):
"""Adds tags to an AliCloud resource created by PerfKitBenchmarker.
By default, resources are tagged with "owner" and "perfkitbenchmarker-run"
key-value
pairs.
Args:
resource_id: An extant AliCloud resource to operate on.
resource_type: The type of the 'resource_id'
region: The AliCloud region 'resource_id' was created in.
"""
tags = {'owner': FLAGS.owner, 'perfkitbenchmarker-run': FLAGS.run_uri}
AddTags(resource_id, resource_type, region, **tags)
def GetDrivePathPrefix():
if FLAGS.ali_io_optimized is None:
return '/dev/xvd'
elif FLAGS.ali_io_optimized:
return '/dev/vd'
|
import unittest
import pandas as pd
import numpy as np
from pgmpy.models import BayesianModel
from pgmpy.estimators import BayesianEstimator
from pgmpy.factors.discrete import TabularCPD
class TestBayesianEstimator(unittest.TestCase):
def setUp(self):
self.m1 = BayesianModel([("A", "C"), ("B", "C")])
self.d1 = pd.DataFrame(data={"A": [0, 0, 1], "B": [0, 1, 0], "C": [1, 1, 0]})
self.d2 = pd.DataFrame(
data={
"A": [0, 0, 1, 0, 2, 0, 2, 1, 0, 2],
"B": ["X", "Y", "X", "Y", "X", "Y", "X", "Y", "X", "Y"],
"C": [1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
}
)
self.est1 = BayesianEstimator(self.m1, self.d1)
self.est2 = BayesianEstimator(
self.m1, self.d1, state_names={"A": [0, 1, 2], "B": [0, 1], "C": [0, 1, 23]}
)
self.est3 = BayesianEstimator(self.m1, self.d2)
def test_estimate_cpd_dirichlet(self):
cpd_A = self.est1.estimate_cpd(
"A", prior_type="dirichlet", pseudo_counts=[[0], [1]]
)
cpd_A_exp = TabularCPD(
variable="A",
variable_card=2,
values=[[0.5], [0.5]],
state_names={"A": [0, 1]},
)
self.assertEqual(cpd_A, cpd_A_exp)
cpd_B = self.est1.estimate_cpd(
"B", prior_type="dirichlet", pseudo_counts=[[9], [3]]
)
cpd_B_exp = TabularCPD(
"B", 2, [[11.0 / 15], [4.0 / 15]], state_names={"B": [0, 1]}
)
self.assertEqual(cpd_B, cpd_B_exp)
cpd_C = self.est1.estimate_cpd(
"C",
prior_type="dirichlet",
pseudo_counts=[[0.4, 0.4, 0.4, 0.4], [0.6, 0.6, 0.6, 0.6]],
)
cpd_C_exp = TabularCPD(
"C",
2,
[[0.2, 0.2, 0.7, 0.4], [0.8, 0.8, 0.3, 0.6]],
evidence=["A", "B"],
evidence_card=[2, 2],
state_names={"A": [0, 1], "B": [0, 1], "C": [0, 1]},
)
self.assertEqual(cpd_C, cpd_C_exp)
def test_estimate_cpd_improper_prior(self):
cpd_C = self.est1.estimate_cpd(
"C", prior_type="dirichlet", pseudo_counts=[[0, 0, 0, 0], [0, 0, 0, 0]]
)
cpd_C_correct = TabularCPD(
"C",
2,
[[0.0, 0.0, 1.0, np.NaN], [1.0, 1.0, 0.0, np.NaN]],
evidence=["A", "B"],
evidence_card=[2, 2],
state_names={"A": [0, 1], "B": [0, 1], "C": [0, 1]},
)
# manual comparison because np.NaN != np.NaN
self.assertTrue(
(
(cpd_C.values == cpd_C_correct.values)
| np.isnan(cpd_C.values) & np.isnan(cpd_C_correct.values)
).all()
)
def test_estimate_cpd_shortcuts(self):
cpd_C1 = self.est2.estimate_cpd(
"C", prior_type="BDeu", equivalent_sample_size=9
)
cpd_C1_correct = TabularCPD(
"C",
3,
[
[0.2, 0.2, 0.6, 1.0 / 3, 1.0 / 3, 1.0 / 3],
[0.6, 0.6, 0.2, 1.0 / 3, 1.0 / 3, 1.0 / 3],
[0.2, 0.2, 0.2, 1.0 / 3, 1.0 / 3, 1.0 / 3],
],
evidence=["A", "B"],
evidence_card=[3, 2],
state_names={"A": [0, 1, 2], "B": [0, 1], "C": [0, 1, 23]},
)
self.assertEqual(cpd_C1, cpd_C1_correct)
cpd_C2 = self.est3.estimate_cpd("C", prior_type="K2")
cpd_C2_correct = TabularCPD(
"C",
2,
[
[0.5, 0.6, 1.0 / 3, 2.0 / 3, 0.75, 2.0 / 3],
[0.5, 0.4, 2.0 / 3, 1.0 / 3, 0.25, 1.0 / 3],
],
evidence=["A", "B"],
evidence_card=[3, 2],
state_names={"A": [0, 1, 2], "B": ["X", "Y"], "C": [0, 1]},
)
self.assertEqual(cpd_C2, cpd_C2_correct)
def test_get_parameters(self):
cpds = set(
[
self.est3.estimate_cpd("A"),
self.est3.estimate_cpd("B"),
self.est3.estimate_cpd("C"),
]
)
self.assertSetEqual(set(self.est3.get_parameters()), cpds)
def test_get_parameters2(self):
pseudo_counts = {
"A": [[1], [2], [3]],
"B": [[4], [5]],
"C": [[6, 6, 6, 6, 6, 6], [7, 7, 7, 7, 7, 7]],
}
cpds = set(
[
self.est3.estimate_cpd(
"A", prior_type="dirichlet", pseudo_counts=pseudo_counts["A"]
),
self.est3.estimate_cpd(
"B", prior_type="dirichlet", pseudo_counts=pseudo_counts["B"]
),
self.est3.estimate_cpd(
"C", prior_type="dirichlet", pseudo_counts=pseudo_counts["C"]
),
]
)
self.assertSetEqual(
set(
self.est3.get_parameters(
prior_type="dirichlet", pseudo_counts=pseudo_counts
)
),
cpds,
)
def test_get_parameters3(self):
pseudo_counts = 0.1
cpds = set(
[
self.est3.estimate_cpd(
"A", prior_type="dirichlet", pseudo_counts=pseudo_counts
),
self.est3.estimate_cpd(
"B", prior_type="dirichlet", pseudo_counts=pseudo_counts
),
self.est3.estimate_cpd(
"C", prior_type="dirichlet", pseudo_counts=pseudo_counts
),
]
)
self.assertSetEqual(
set(
self.est3.get_parameters(
prior_type="dirichlet", pseudo_counts=pseudo_counts
)
),
cpds,
)
def tearDown(self):
del self.m1
del self.d1
del self.d2
del self.est1
del self.est2
|
import re
from simplipy.errors import SimplipyError
from simplipy.system import SystemStates
from simplipy.websocket import (
EVENT_ALARM_CANCELED,
EVENT_ALARM_TRIGGERED,
EVENT_ARMED_AWAY,
EVENT_ARMED_AWAY_BY_KEYPAD,
EVENT_ARMED_AWAY_BY_REMOTE,
EVENT_ARMED_HOME,
EVENT_AWAY_EXIT_DELAY_BY_KEYPAD,
EVENT_AWAY_EXIT_DELAY_BY_REMOTE,
EVENT_DISARMED_BY_MASTER_PIN,
EVENT_DISARMED_BY_REMOTE,
EVENT_HOME_EXIT_DELAY,
)
from homeassistant.components.alarm_control_panel import (
FORMAT_NUMBER,
FORMAT_TEXT,
AlarmControlPanelEntity,
)
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
CONF_CODE,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import callback
from . import SimpliSafeEntity
from .const import (
ATTR_ALARM_DURATION,
ATTR_ALARM_VOLUME,
ATTR_CHIME_VOLUME,
ATTR_ENTRY_DELAY_AWAY,
ATTR_ENTRY_DELAY_HOME,
ATTR_EXIT_DELAY_AWAY,
ATTR_EXIT_DELAY_HOME,
ATTR_LIGHT,
ATTR_VOICE_PROMPT_VOLUME,
DATA_CLIENT,
DOMAIN,
LOGGER,
VOLUME_STRING_MAP,
)
ATTR_BATTERY_BACKUP_POWER_LEVEL = "battery_backup_power_level"
ATTR_GSM_STRENGTH = "gsm_strength"
ATTR_PIN_NAME = "pin_name"
ATTR_RF_JAMMING = "rf_jamming"
ATTR_WALL_POWER_LEVEL = "wall_power_level"
ATTR_WIFI_STRENGTH = "wifi_strength"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up a SimpliSafe alarm control panel based on a config entry."""
simplisafe = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities(
[SimpliSafeAlarm(simplisafe, system) for system in simplisafe.systems.values()],
True,
)
class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity):
"""Representation of a SimpliSafe alarm."""
def __init__(self, simplisafe, system):
"""Initialize the SimpliSafe alarm."""
super().__init__(simplisafe, system, "Alarm Control Panel")
self._changed_by = None
self._last_event = None
if system.alarm_going_off:
self._state = STATE_ALARM_TRIGGERED
elif system.state == SystemStates.away:
self._state = STATE_ALARM_ARMED_AWAY
elif system.state in (
SystemStates.away_count,
SystemStates.exit_delay,
SystemStates.home_count,
):
self._state = STATE_ALARM_ARMING
elif system.state == SystemStates.home:
self._state = STATE_ALARM_ARMED_HOME
elif system.state == SystemStates.off:
self._state = STATE_ALARM_DISARMED
else:
self._state = None
for event_type in (
EVENT_ALARM_CANCELED,
EVENT_ALARM_TRIGGERED,
EVENT_ARMED_AWAY,
EVENT_ARMED_AWAY_BY_KEYPAD,
EVENT_ARMED_AWAY_BY_REMOTE,
EVENT_ARMED_HOME,
EVENT_AWAY_EXIT_DELAY_BY_KEYPAD,
EVENT_AWAY_EXIT_DELAY_BY_REMOTE,
EVENT_DISARMED_BY_MASTER_PIN,
EVENT_DISARMED_BY_REMOTE,
EVENT_HOME_EXIT_DELAY,
):
self.websocket_events_to_listen_for.append(event_type)
@property
def changed_by(self):
"""Return info about who changed the alarm last."""
return self._changed_by
@property
def code_format(self):
"""Return one or more digits/characters."""
if not self._simplisafe.config_entry.options.get(CONF_CODE):
return None
if isinstance(
self._simplisafe.config_entry.options[CONF_CODE], str
) and re.search("^\\d+$", self._simplisafe.config_entry.options[CONF_CODE]):
return FORMAT_NUMBER
return FORMAT_TEXT
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
@callback
def _is_code_valid(self, code, state):
"""Validate that a code matches the required one."""
if not self._simplisafe.config_entry.options.get(CONF_CODE):
return True
if not code or code != self._simplisafe.config_entry.options[CONF_CODE]:
LOGGER.warning(
"Incorrect alarm code entered (target state: %s): %s", state, code
)
return False
return True
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
if not self._is_code_valid(code, STATE_ALARM_DISARMED):
return
try:
await self._system.set_off()
except SimplipyError as err:
LOGGER.error('Error while disarming "%s": %s', self._system.name, err)
return
self._state = STATE_ALARM_DISARMED
async def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
if not self._is_code_valid(code, STATE_ALARM_ARMED_HOME):
return
try:
await self._system.set_home()
except SimplipyError as err:
LOGGER.error('Error while arming "%s" (home): %s', self._system.name, err)
return
self._state = STATE_ALARM_ARMED_HOME
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
if not self._is_code_valid(code, STATE_ALARM_ARMED_AWAY):
return
try:
await self._system.set_away()
except SimplipyError as err:
LOGGER.error('Error while arming "%s" (away): %s', self._system.name, err)
return
self._state = STATE_ALARM_ARMING
@callback
def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
if self._system.version == 3:
self._attrs.update(
{
ATTR_ALARM_DURATION: self._system.alarm_duration,
ATTR_ALARM_VOLUME: VOLUME_STRING_MAP[self._system.alarm_volume],
ATTR_BATTERY_BACKUP_POWER_LEVEL: self._system.battery_backup_power_level,
ATTR_CHIME_VOLUME: VOLUME_STRING_MAP[self._system.chime_volume],
ATTR_ENTRY_DELAY_AWAY: self._system.entry_delay_away,
ATTR_ENTRY_DELAY_HOME: self._system.entry_delay_home,
ATTR_EXIT_DELAY_AWAY: self._system.exit_delay_away,
ATTR_EXIT_DELAY_HOME: self._system.exit_delay_home,
ATTR_GSM_STRENGTH: self._system.gsm_strength,
ATTR_LIGHT: self._system.light,
ATTR_RF_JAMMING: self._system.rf_jamming,
ATTR_VOICE_PROMPT_VOLUME: VOLUME_STRING_MAP[
self._system.voice_prompt_volume
],
ATTR_WALL_POWER_LEVEL: self._system.wall_power_level,
ATTR_WIFI_STRENGTH: self._system.wifi_strength,
}
)
# Although system state updates are designed the come via the websocket, the
# SimpliSafe cloud can sporadically fail to send those updates as expected; so,
# just in case, we synchronize the state via the REST API, too:
if self._system.state == SystemStates.alarm:
self._state = STATE_ALARM_TRIGGERED
elif self._system.state == SystemStates.away:
self._state = STATE_ALARM_ARMED_AWAY
elif self._system.state in (SystemStates.away_count, SystemStates.exit_delay):
self._state = STATE_ALARM_ARMING
elif self._system.state == SystemStates.home:
self._state = STATE_ALARM_ARMED_HOME
elif self._system.state == SystemStates.off:
self._state = STATE_ALARM_DISARMED
else:
self._state = None
@callback
def async_update_from_websocket_event(self, event):
"""Update the entity with the provided websocket API event data."""
if event.event_type in (
EVENT_ALARM_CANCELED,
EVENT_DISARMED_BY_MASTER_PIN,
EVENT_DISARMED_BY_REMOTE,
):
self._state = STATE_ALARM_DISARMED
elif event.event_type == EVENT_ALARM_TRIGGERED:
self._state = STATE_ALARM_TRIGGERED
elif event.event_type in (
EVENT_ARMED_AWAY,
EVENT_ARMED_AWAY_BY_KEYPAD,
EVENT_ARMED_AWAY_BY_REMOTE,
):
self._state = STATE_ALARM_ARMED_AWAY
elif event.event_type == EVENT_ARMED_HOME:
self._state = STATE_ALARM_ARMED_HOME
elif event.event_type in (
EVENT_AWAY_EXIT_DELAY_BY_KEYPAD,
EVENT_AWAY_EXIT_DELAY_BY_REMOTE,
EVENT_HOME_EXIT_DELAY,
):
self._state = STATE_ALARM_ARMING
else:
self._state = None
self._changed_by = event.changed_by
|
import datetime
from ...common.interfaces import AbstractInfoWidget
class BfgInfoWidget(AbstractInfoWidget):
''' Console widget '''
def __init__(self):
AbstractInfoWidget.__init__(self)
self.active_threads = 0
self.instances = 0
self.planned = 0
self.RPS = 0
self.selfload = 0
self.time_lag = 0
self.planned_rps_duration = 0
def get_index(self):
return 0
def on_aggregated_data(self, data, stat):
self.instances = stat["metrics"]["instances"]
self.RPS = data["overall"]["interval_real"]["len"]
self.selfload = 0 # TODO
self.time_lag = 0 # TODO
def render(self, screen):
res = ''
res += "Active instances: "
res += str(self.instances)
res += "\nPlanned requests: %s for %s\nActual responses: " % (
self.planned, datetime.timedelta(seconds=self.planned_rps_duration))
if not self.planned == self.RPS:
res += screen.markup.YELLOW + str(self.RPS) + screen.markup.RESET
else:
res += str(self.RPS)
res += "\n Accuracy: "
if self.selfload < 80:
res += screen.markup.RED + \
('%.2f' % self.selfload) + screen.markup.RESET
elif self.selfload < 95:
res += screen.markup.YELLOW + \
('%.2f' % self.selfload) + screen.markup.RESET
else:
res += ('%.2f' % self.selfload)
res += "%\n Time lag: "
res += str(datetime.timedelta(seconds=self.time_lag))
return res
|
from perfkitbenchmarker import data
AEROSPIKE_CLIENT = 'https://github.com/aerospike/aerospike-client-c.git'
CLIENT_DIR = 'aerospike-client-c'
CLIENT_VERSION = '4.0.4'
PATCH_FILE = 'aerospike.patch'
def _Install(vm):
"""Installs the aerospike client on the VM."""
vm.Install('build_tools')
vm.Install('lua5_1')
vm.Install('openssl')
clone_command = 'git clone %s'
vm.RemoteCommand(clone_command % AEROSPIKE_CLIENT)
build_command = ('cd %s && git checkout %s && git submodule update --init '
'&& make')
vm.RemoteCommand(build_command % (CLIENT_DIR, CLIENT_VERSION))
# Apply a patch to the client benchmark so we have access to average latency
# of requests. Switching over to YCSB should obviate this.
vm.PushDataFile(PATCH_FILE)
benchmark_dir = '%s/benchmarks/src/main' % CLIENT_DIR
vm.RemoteCommand('cp aerospike.patch %s' % benchmark_dir)
vm.RemoteCommand('cd %s && patch -p1 -f < aerospike.patch' % benchmark_dir)
vm.RemoteCommand('sed -i -e "s/lpthread/lpthread -lz/" '
'%s/benchmarks/Makefile' % CLIENT_DIR)
vm.RemoteCommand('cd %s/benchmarks && make' % CLIENT_DIR)
def AptInstall(vm):
"""Installs the aerospike client on the VM."""
vm.InstallPackages('netcat-openbsd')
_Install(vm)
def YumInstall(vm):
"""Installs the aerospike client on the VM."""
_Install(vm)
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
data.ResourcePath(PATCH_FILE)
def Uninstall(vm):
vm.RemoteCommand('sudo rm -rf aerospike-client-c')
|
import requests
import subprocess
from flask import current_app
from lemur.extensions import sentry
from requests.exceptions import ConnectionError, InvalidSchema
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from lemur.utils import mktempfile
from lemur.common.utils import parse_certificate
crl_cache = {}
def ocsp_verify(cert, cert_path, issuer_chain_path):
"""
Attempts to verify a certificate via OCSP. OCSP is a more modern version
of CRL in that it will query the OCSP URI in order to determine if the
certificate has been revoked
:param cert:
:param cert_path:
:param issuer_chain_path:
:return bool: True if certificate is valid, False otherwise
"""
command = ["openssl", "x509", "-noout", "-ocsp_uri", "-in", cert_path]
p1 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
url, err = p1.communicate()
if not url:
current_app.logger.debug(
"No OCSP URL in certificate {}".format(cert.serial_number)
)
return None
p2 = subprocess.Popen(
[
"openssl",
"ocsp",
"-issuer",
issuer_chain_path,
"-cert",
cert_path,
"-url",
url.strip(),
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
message, err = p2.communicate()
p_message = message.decode("utf-8")
if "error" in p_message or "Error" in p_message:
raise Exception("Got error when parsing OCSP url")
elif "revoked" in p_message:
current_app.logger.debug(
"OCSP reports certificate revoked: {}".format(cert.serial_number)
)
return False
elif "good" not in p_message:
raise Exception("Did not receive a valid response")
return True
def crl_verify(cert, cert_path):
"""
Attempts to verify a certificate using CRL.
:param cert:
:param cert_path:
:return: True if certificate is valid, False otherwise
:raise Exception: If certificate does not have CRL
"""
try:
distribution_points = cert.extensions.get_extension_for_oid(
x509.OID_CRL_DISTRIBUTION_POINTS
).value
except x509.ExtensionNotFound:
current_app.logger.debug(
"No CRLDP extension in certificate {}".format(cert.serial_number)
)
return None
for p in distribution_points:
point = p.full_name[0].value
if point not in crl_cache:
current_app.logger.debug("Retrieving CRL: {}".format(point))
try:
response = requests.get(point)
if response.status_code != 200:
raise Exception("Unable to retrieve CRL: {0}".format(point))
except InvalidSchema:
# Unhandled URI scheme (like ldap://); skip this distribution point.
continue
except ConnectionError:
raise Exception("Unable to retrieve CRL: {0}".format(point))
crl_cache[point] = x509.load_der_x509_crl(
response.content, backend=default_backend()
)
else:
current_app.logger.debug("CRL point is cached {}".format(point))
for r in crl_cache[point]:
if cert.serial_number == r.serial_number:
try:
reason = r.extensions.get_extension_for_class(x509.CRLReason).value
# Handle "removeFromCRL" revoke reason as unrevoked;
# continue with the next distribution point.
# Per RFC 5280 section 6.3.3 (k):
# https://tools.ietf.org/html/rfc5280#section-6.3.3
if reason == x509.ReasonFlags.remove_from_crl:
break
except x509.ExtensionNotFound:
pass
current_app.logger.debug(
"CRL reports certificate " "revoked: {}".format(cert.serial_number)
)
return False
return True
def verify(cert_path, issuer_chain_path):
"""
Verify a certificate using OCSP and CRL
:param cert_path:
:param issuer_chain_path:
:return: True if valid, False otherwise
"""
with open(cert_path, "rt") as c:
try:
cert = parse_certificate(c.read())
except ValueError as e:
current_app.logger.error(e)
return None
# OCSP is our main source of truth, in a lot of cases CRLs
# have been deprecated and are no longer updated
verify_result = None
try:
verify_result = ocsp_verify(cert, cert_path, issuer_chain_path)
except Exception as e:
sentry.captureException()
current_app.logger.exception(e)
if verify_result is None:
try:
verify_result = crl_verify(cert, cert_path)
except Exception as e:
sentry.captureException()
current_app.logger.exception(e)
if verify_result is None:
current_app.logger.debug("Failed to verify {}".format(cert.serial_number))
return verify_result
def verify_string(cert_string, issuer_string):
"""
Verify a certificate given only it's string value
:param cert_string:
:param issuer_string:
:return: True if valid, False otherwise
"""
with mktempfile() as cert_tmp:
with open(cert_tmp, "w") as f:
f.write(cert_string)
with mktempfile() as issuer_tmp:
with open(issuer_tmp, "w") as f:
f.write(issuer_string)
status = verify(cert_tmp, issuer_tmp)
return status
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import inception_utils
slim = tf.contrib.slim
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
def inception_v1_base(inputs,
final_endpoint='Mixed_5c',
scope='InceptionV1'):
"""Defines the Inception V1 base architecture.
This architecture is defined in:
Going deeper with convolutions
Christian Szegedy, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed,
Dragomir Anguelov, Dumitru Erhan, Vincent Vanhoucke, Andrew Rabinovich.
http://arxiv.org/pdf/1409.4842v1.pdf.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c',
'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c', 'Mixed_4d', 'Mixed_4e',
'Mixed_4f', 'MaxPool_5a_2x2', 'Mixed_5b', 'Mixed_5c']
scope: Optional variable_scope.
Returns:
A dictionary from components of the network to the corresponding activation.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionV1', [inputs]):
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_initializer=trunc_normal(0.01)):
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
stride=1, padding='SAME'):
end_point = 'Conv2d_1a_7x7'
net = slim.conv2d(inputs, 64, [7, 7], stride=2, scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'MaxPool_2a_3x3'
net = slim.max_pool2d(net, [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Conv2d_2b_1x1'
net = slim.conv2d(net, 64, [1, 1], scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Conv2d_2c_3x3'
net = slim.conv2d(net, 192, [3, 3], scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'MaxPool_3a_3x3'
net = slim.max_pool2d(net, [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 64, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 96, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 128, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 16, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 32, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 32, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 192, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 32, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 96, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'MaxPool_4a_3x3'
net = slim.max_pool2d(net, [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 96, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 208, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 16, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 48, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 112, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 224, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 24, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 64, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4d'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 128, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 256, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 24, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 64, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4e'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 112, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 144, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 288, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 32, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 64, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 64, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4f'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 256, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 320, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 32, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 128, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 128, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'MaxPool_5a_2x2'
net = slim.max_pool2d(net, [2, 2], stride=2, scope=end_point)
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 256, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 160, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 320, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 32, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 128, [3, 3], scope='Conv2d_0a_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 128, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, 384, [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, 192, [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, 384, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, 48, [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, 128, [3, 3], scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, 128, [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if final_endpoint == end_point: return net, end_points
raise ValueError('Unknown final endpoint %s' % final_endpoint)
def inception_v1(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.8,
prediction_fn=slim.softmax,
spatial_squeeze=True,
reuse=None,
scope='InceptionV1'):
"""Defines the Inception V1 architecture.
This architecture is defined in:
Going deeper with convolutions
Christian Szegedy, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed,
Dragomir Anguelov, Dumitru Erhan, Vincent Vanhoucke, Andrew Rabinovich.
http://arxiv.org/pdf/1409.4842v1.pdf.
The default image size used to train this network is 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: the percentage of activation values that are retained.
prediction_fn: a function to get predictions out of logits.
spatial_squeeze: if True, logits is of shape [B, C], if false logits is
of shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, num_classes]
end_points: a dictionary from components of the network to the corresponding
activation.
"""
# Final pooling and prediction
with tf.variable_scope(scope, 'InceptionV1', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_v1_base(inputs, scope=scope)
with tf.variable_scope('Logits'):
net = slim.avg_pool2d(net, [7, 7], stride=1, scope='AvgPool_0a_7x7')
net = slim.dropout(net,
dropout_keep_prob, scope='Dropout_0b')
logits = slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='Conv2d_0c_1x1')
if spatial_squeeze:
logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze')
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
inception_v1.default_image_size = 224
inception_v1_arg_scope = inception_utils.inception_arg_scope
|
from __future__ import absolute_import
from lark.exceptions import UnexpectedCharacters, UnexpectedInput, UnexpectedToken, ConfigurationError, assert_config
import sys, os, pickle, hashlib
from io import open
import tempfile
from warnings import warn
from .utils import STRING_TYPE, Serialize, SerializeMemoizer, FS, isascii, logger
from .load_grammar import load_grammar, FromPackageLoader
from .tree import Tree
from .common import LexerConf, ParserConf
from .lexer import Lexer, TraditionalLexer, TerminalDef, LexerThread
from .parse_tree_builder import ParseTreeBuilder
from .parser_frontends import get_frontend, _get_lexer_callbacks
from .grammar import Rule
import re
try:
import regex
except ImportError:
regex = None
###{standalone
class LarkOptions(Serialize):
"""Specifies the options for Lark
"""
OPTIONS_DOC = """
**=== General Options ===**
start
The start symbol. Either a string, or a list of strings for multiple possible starts (Default: "start")
debug
Display debug information and extra warnings. Use only when debugging (default: False)
When used with Earley, it generates a forest graph as "sppf.png", if 'dot' is installed.
transformer
Applies the transformer to every parse tree (equivalent to applying it after the parse, but faster)
propagate_positions
Propagates (line, column, end_line, end_column) attributes into all tree branches.
maybe_placeholders
When True, the ``[]`` operator returns ``None`` when not matched.
When ``False``, ``[]`` behaves like the ``?`` operator, and returns no value at all.
(default= ``False``. Recommended to set to ``True``)
cache
Cache the results of the Lark grammar analysis, for x2 to x3 faster loading. LALR only for now.
- When ``False``, does nothing (default)
- When ``True``, caches to a temporary file in the local directory
- When given a string, caches to the path pointed by the string
regex
When True, uses the ``regex`` module instead of the stdlib ``re``.
g_regex_flags
Flags that are applied to all terminals (both regex and strings)
keep_all_tokens
Prevent the tree builder from automagically removing "punctuation" tokens (default: False)
tree_class
Lark will produce trees comprised of instances of this class instead of the default ``lark.Tree``.
**=== Algorithm Options ===**
parser
Decides which parser engine to use. Accepts "earley" or "lalr". (Default: "earley").
(there is also a "cyk" option for legacy)
lexer
Decides whether or not to use a lexer stage
- "auto" (default): Choose for me based on the parser
- "standard": Use a standard lexer
- "contextual": Stronger lexer (only works with parser="lalr")
- "dynamic": Flexible and powerful (only with parser="earley")
- "dynamic_complete": Same as dynamic, but tries *every* variation of tokenizing possible.
ambiguity
Decides how to handle ambiguity in the parse. Only relevant if parser="earley"
- "resolve": The parser will automatically choose the simplest derivation
(it chooses consistently: greedy for tokens, non-greedy for rules)
- "explicit": The parser will return all derivations wrapped in "_ambig" tree nodes (i.e. a forest).
- "forest": The parser will return the root of the shared packed parse forest.
**=== Misc. / Domain Specific Options ===**
postlex
Lexer post-processing (Default: None) Only works with the standard and contextual lexers.
priority
How priorities should be evaluated - auto, none, normal, invert (Default: auto)
lexer_callbacks
Dictionary of callbacks for the lexer. May alter tokens during lexing. Use with caution.
use_bytes
Accept an input of type ``bytes`` instead of ``str`` (Python 3 only).
edit_terminals
A callback for editing the terminals before parse.
import_paths
A List of either paths or loader functions to specify from where grammars are imported
source_path
Override the source of from where the grammar was loaded. Useful for relative imports and unconventional grammar loading
**=== End Options ===**
"""
if __doc__:
__doc__ += OPTIONS_DOC
# Adding a new option needs to be done in multiple places:
# - In the dictionary below. This is the primary truth of which options `Lark.__init__` accepts
# - In the docstring above. It is used both for the docstring of `LarkOptions` and `Lark`, and in readthedocs
# - In `lark-stubs/lark.pyi`:
# - As attribute to `LarkOptions`
# - As parameter to `Lark.__init__`
# - Potentially in `_LOAD_ALLOWED_OPTIONS` below this class, when the option doesn't change how the grammar is loaded
# - Potentially in `lark.tools.__init__`, if it makes sense, and it can easily be passed as a cmd argument
_defaults = {
'debug': False,
'keep_all_tokens': False,
'tree_class': None,
'cache': False,
'postlex': None,
'parser': 'earley',
'lexer': 'auto',
'transformer': None,
'start': 'start',
'priority': 'auto',
'ambiguity': 'auto',
'regex': False,
'propagate_positions': False,
'lexer_callbacks': {},
'maybe_placeholders': False,
'edit_terminals': None,
'g_regex_flags': 0,
'use_bytes': False,
'import_paths': [],
'source_path': None,
}
def __init__(self, options_dict):
o = dict(options_dict)
options = {}
for name, default in self._defaults.items():
if name in o:
value = o.pop(name)
if isinstance(default, bool) and name not in ('cache', 'use_bytes'):
value = bool(value)
else:
value = default
options[name] = value
if isinstance(options['start'], STRING_TYPE):
options['start'] = [options['start']]
self.__dict__['options'] = options
assert_config(self.parser, ('earley', 'lalr', 'cyk', None))
if self.parser == 'earley' and self.transformer:
raise ConfigurationError('Cannot specify an embedded transformer when using the Earley algorithm.'
'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. LALR)')
if o:
raise ConfigurationError("Unknown options: %s" % o.keys())
def __getattr__(self, name):
try:
return self.options[name]
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, name, value):
assert_config(name, self.options.keys(), "%r isn't a valid option. Expected one of: %s")
self.options[name] = value
def serialize(self, memo):
return self.options
@classmethod
def deserialize(cls, data, memo):
return cls(data)
# Options that can be passed to the Lark parser, even when it was loaded from cache/standalone.
# These option are only used outside of `load_grammar`.
_LOAD_ALLOWED_OPTIONS = {'postlex', 'transformer', 'use_bytes', 'debug', 'g_regex_flags', 'regex', 'propagate_positions', 'tree_class'}
_VALID_PRIORITY_OPTIONS = ('auto', 'normal', 'invert', None)
_VALID_AMBIGUITY_OPTIONS = ('auto', 'resolve', 'explicit', 'forest')
class Lark(Serialize):
"""Main interface for the library.
It's mostly a thin wrapper for the many different parsers, and for the tree constructor.
Parameters:
grammar: a string or file-object containing the grammar spec (using Lark's ebnf syntax)
options: a dictionary controlling various aspects of Lark.
Example:
>>> Lark(r'''start: "foo" ''')
Lark(...)
"""
def __init__(self, grammar, **options):
self.options = LarkOptions(options)
# Set regex or re module
use_regex = self.options.regex
if use_regex:
if regex:
re_module = regex
else:
raise ImportError('`regex` module must be installed if calling `Lark(regex=True)`.')
else:
re_module = re
# Some, but not all file-like objects have a 'name' attribute
if self.options.source_path is None:
try:
self.source_path = grammar.name
except AttributeError:
self.source_path = '<string>'
else:
self.source_path = self.options.source_path
# Drain file-like objects to get their contents
try:
read = grammar.read
except AttributeError:
pass
else:
grammar = read()
assert isinstance(grammar, STRING_TYPE)
self.source_grammar = grammar
if self.options.use_bytes:
if not isascii(grammar):
raise ConfigurationError("Grammar must be ascii only, when use_bytes=True")
if sys.version_info[0] == 2 and self.options.use_bytes != 'force':
raise ConfigurationError("`use_bytes=True` may have issues on python2."
"Use `use_bytes='force'` to use it at your own risk.")
cache_fn = None
if self.options.cache:
if self.options.parser != 'lalr':
raise ConfigurationError("cache only works with parser='lalr' for now")
if isinstance(self.options.cache, STRING_TYPE):
cache_fn = self.options.cache
else:
if self.options.cache is not True:
raise ConfigurationError("cache argument must be bool or str")
unhashable = ('transformer', 'postlex', 'lexer_callbacks', 'edit_terminals')
from . import __version__
options_str = ''.join(k+str(v) for k, v in options.items() if k not in unhashable)
s = grammar + options_str + __version__
md5 = hashlib.md5(s.encode()).hexdigest()
cache_fn = tempfile.gettempdir() + '/.lark_cache_%s.tmp' % md5
if FS.exists(cache_fn):
logger.debug('Loading grammar from cache: %s', cache_fn)
# Remove options that aren't relevant for loading from cache
for name in (set(options) - _LOAD_ALLOWED_OPTIONS):
del options[name]
with FS.open(cache_fn, 'rb') as f:
try:
self._load(f, **options)
except Exception:
raise RuntimeError("Failed to load Lark from cache: %r. Try to delete the file and run again." % cache_fn)
return
if self.options.lexer == 'auto':
if self.options.parser == 'lalr':
self.options.lexer = 'contextual'
elif self.options.parser == 'earley':
self.options.lexer = 'dynamic'
elif self.options.parser == 'cyk':
self.options.lexer = 'standard'
else:
assert False, self.options.parser
lexer = self.options.lexer
if isinstance(lexer, type):
assert issubclass(lexer, Lexer) # XXX Is this really important? Maybe just ensure interface compliance
else:
assert_config(lexer, ('standard', 'contextual', 'dynamic', 'dynamic_complete'))
if self.options.ambiguity == 'auto':
if self.options.parser == 'earley':
self.options.ambiguity = 'resolve'
else:
assert_config(self.options.parser, ('earley', 'cyk'), "%r doesn't support disambiguation. Use one of these parsers instead: %s")
if self.options.priority == 'auto':
self.options.priority = 'normal'
if self.options.priority not in _VALID_PRIORITY_OPTIONS:
raise ConfigurationError("invalid priority option: %r. Must be one of %r" % (self.options.priority, _VALID_PRIORITY_OPTIONS))
assert self.options.ambiguity not in ('resolve__antiscore_sum', ), 'resolve__antiscore_sum has been replaced with the option priority="invert"'
if self.options.ambiguity not in _VALID_AMBIGUITY_OPTIONS:
raise ConfigurationError("invalid ambiguity option: %r. Must be one of %r" % (self.options.ambiguity, _VALID_AMBIGUITY_OPTIONS))
# Parse the grammar file and compose the grammars
self.grammar = load_grammar(grammar, self.source_path, self.options.import_paths, self.options.keep_all_tokens)
if self.options.postlex is not None:
terminals_to_keep = set(self.options.postlex.always_accept)
else:
terminals_to_keep = set()
# Compile the EBNF grammar into BNF
self.terminals, self.rules, self.ignore_tokens = self.grammar.compile(self.options.start, terminals_to_keep)
if self.options.edit_terminals:
for t in self.terminals:
self.options.edit_terminals(t)
self._terminals_dict = {t.name: t for t in self.terminals}
# If the user asked to invert the priorities, negate them all here.
# This replaces the old 'resolve__antiscore_sum' option.
if self.options.priority == 'invert':
for rule in self.rules:
if rule.options.priority is not None:
rule.options.priority = -rule.options.priority
# Else, if the user asked to disable priorities, strip them from the
# rules. This allows the Earley parsers to skip an extra forest walk
# for improved performance, if you don't need them (or didn't specify any).
elif self.options.priority is None:
for rule in self.rules:
if rule.options.priority is not None:
rule.options.priority = None
# TODO Deprecate lexer_callbacks?
lexer_callbacks = (_get_lexer_callbacks(self.options.transformer, self.terminals)
if self.options.transformer
else {})
lexer_callbacks.update(self.options.lexer_callbacks)
self.lexer_conf = LexerConf(self.terminals, re_module, self.ignore_tokens, self.options.postlex, lexer_callbacks, self.options.g_regex_flags, use_bytes=self.options.use_bytes)
if self.options.parser:
self.parser = self._build_parser()
elif lexer:
self.lexer = self._build_lexer()
if cache_fn:
logger.debug('Saving grammar to cache: %s', cache_fn)
with FS.open(cache_fn, 'wb') as f:
self.save(f)
if __doc__:
__doc__ += "\n\n" + LarkOptions.OPTIONS_DOC
__serialize_fields__ = 'parser', 'rules', 'options'
def _build_lexer(self, dont_ignore=False):
lexer_conf = self.lexer_conf
if dont_ignore:
from copy import copy
lexer_conf = copy(lexer_conf)
lexer_conf.ignore = ()
return TraditionalLexer(lexer_conf)
def _prepare_callbacks(self):
self.parser_class = get_frontend(self.options.parser, self.options.lexer)
self._callbacks = None
# we don't need these callbacks if we aren't building a tree
if self.options.ambiguity != 'forest':
self._parse_tree_builder = ParseTreeBuilder(
self.rules,
self.options.tree_class or Tree,
self.options.propagate_positions,
self.options.parser != 'lalr' and self.options.ambiguity == 'explicit',
self.options.maybe_placeholders
)
self._callbacks = self._parse_tree_builder.create_callback(self.options.transformer)
def _build_parser(self):
self._prepare_callbacks()
parser_conf = ParserConf(self.rules, self._callbacks, self.options.start)
return self.parser_class(self.lexer_conf, parser_conf, options=self.options)
def save(self, f):
"""Saves the instance into the given file object
Useful for caching and multiprocessing.
"""
data, m = self.memo_serialize([TerminalDef, Rule])
pickle.dump({'data': data, 'memo': m}, f, protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, f):
"""Loads an instance from the given file object
Useful for caching and multiprocessing.
"""
inst = cls.__new__(cls)
return inst._load(f)
def _load(self, f, **kwargs):
if isinstance(f, dict):
d = f
else:
d = pickle.load(f)
memo = d['memo']
data = d['data']
assert memo
memo = SerializeMemoizer.deserialize(memo, {'Rule': Rule, 'TerminalDef': TerminalDef}, {})
options = dict(data['options'])
if (set(kwargs) - _LOAD_ALLOWED_OPTIONS) & set(LarkOptions._defaults):
raise ConfigurationError("Some options are not allowed when loading a Parser: {}"
.format(set(kwargs) - _LOAD_ALLOWED_OPTIONS))
options.update(kwargs)
self.options = LarkOptions.deserialize(options, memo)
self.rules = [Rule.deserialize(r, memo) for r in data['rules']]
self.source_path = '<deserialized>'
self._prepare_callbacks()
self.parser = self.parser_class.deserialize(
data['parser'],
memo,
self._callbacks,
self.options, # Not all, but multiple attributes are used
)
self.lexer_conf = self.parser.lexer_conf
self.terminals = self.parser.lexer_conf.terminals
self._terminals_dict = {t.name: t for t in self.terminals}
return self
@classmethod
def _load_from_dict(cls, data, memo, **kwargs):
inst = cls.__new__(cls)
return inst._load({'data': data, 'memo': memo}, **kwargs)
@classmethod
def open(cls, grammar_filename, rel_to=None, **options):
"""Create an instance of Lark with the grammar given by its filename
If ``rel_to`` is provided, the function will find the grammar filename in relation to it.
Example:
>>> Lark.open("grammar_file.lark", rel_to=__file__, parser="lalr")
Lark(...)
"""
if rel_to:
basepath = os.path.dirname(rel_to)
grammar_filename = os.path.join(basepath, grammar_filename)
with open(grammar_filename, encoding='utf8') as f:
return cls(f, **options)
@classmethod
def open_from_package(cls, package, grammar_path, search_paths=("",), **options):
"""Create an instance of Lark with the grammar loaded from within the package `package`.
This allows grammar loading from zipapps.
Imports in the grammar will use the `package` and `search_paths` provided, through `FromPackageLoader`
Example:
Lark.open_from_package(__name__, "example.lark", ("grammars",), parser=...)
"""
package = FromPackageLoader(package, search_paths)
full_path, text = package(None, grammar_path)
options.setdefault('source_path', full_path)
options.setdefault('import_paths', [])
options['import_paths'].append(package)
return cls(text, **options)
def __repr__(self):
return 'Lark(open(%r), parser=%r, lexer=%r, ...)' % (self.source_path, self.options.parser, self.options.lexer)
def lex(self, text, dont_ignore=False):
"""Only lex (and postlex) the text, without parsing it. Only relevant when lexer='standard'
When dont_ignore=True, the lexer will return all tokens, even those marked for %ignore.
"""
if not hasattr(self, 'lexer') or dont_ignore:
lexer = self._build_lexer(dont_ignore)
else:
lexer = self.lexer
lexer_thread = LexerThread(lexer, text)
stream = lexer_thread.lex(None)
if self.options.postlex:
return self.options.postlex.process(stream)
return stream
def get_terminal(self, name):
"Get information about a terminal"
return self._terminals_dict[name]
def parse(self, text, start=None, on_error=None):
"""Parse the given text, according to the options provided.
Parameters:
text (str): Text to be parsed.
start (str, optional): Required if Lark was given multiple possible start symbols (using the start option).
on_error (function, optional): if provided, will be called on UnexpectedToken error. Return true to resume parsing.
LALR only. See examples/advanced/error_puppet.py for an example of how to use on_error.
Returns:
If a transformer is supplied to ``__init__``, returns whatever is the
result of the transformation. Otherwise, returns a Tree instance.
"""
try:
return self.parser.parse(text, start=start)
except UnexpectedInput as e:
if on_error is None:
raise
while True:
if isinstance(e, UnexpectedCharacters):
s = e.puppet.lexer_state.state
p = s.line_ctr.char_pos
if not on_error(e):
raise e
if isinstance(e, UnexpectedCharacters):
# If user didn't change the character position, then we should
if p == s.line_ctr.char_pos:
s.line_ctr.feed(s.text[p:p+1])
try:
return e.puppet.resume_parse()
except UnexpectedToken as e2:
if isinstance(e, UnexpectedToken) and e.token.type == e2.token.type == '$END' and e.puppet == e2.puppet:
# Prevent infinite loop
raise e2
e = e2
except UnexpectedCharacters as e2:
e = e2
@property
def source(self):
warn("Lark.source attribute has been renamed to Lark.source_path", DeprecationWarning)
return self.source_path
@source.setter
def source(self, value):
self.source_path = value
@property
def grammar_source(self):
warn("Lark.grammar_source attribute has been renamed to Lark.source_grammar", DeprecationWarning)
return self.source_grammar
@grammar_source.setter
def grammar_source(self, value):
self.source_grammar = value
###}
|
import aiohttp
import pytest
from homeassistant.components.hassio.handler import HassioAPIError
async def test_api_ping(hassio_handler, aioclient_mock):
"""Test setup with API ping."""
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
assert await hassio_handler.is_connected()
assert aioclient_mock.call_count == 1
async def test_api_ping_error(hassio_handler, aioclient_mock):
"""Test setup with API ping error."""
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "error"})
assert not (await hassio_handler.is_connected())
assert aioclient_mock.call_count == 1
async def test_api_ping_exeption(hassio_handler, aioclient_mock):
"""Test setup with API ping exception."""
aioclient_mock.get("http://127.0.0.1/supervisor/ping", exc=aiohttp.ClientError())
assert not (await hassio_handler.is_connected())
assert aioclient_mock.call_count == 1
async def test_api_info(hassio_handler, aioclient_mock):
"""Test setup with API generic info."""
aioclient_mock.get(
"http://127.0.0.1/info",
json={
"result": "ok",
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
},
)
data = await hassio_handler.get_info()
assert aioclient_mock.call_count == 1
assert data["hassos"] is None
assert data["homeassistant"] == "0.110.0"
assert data["supervisor"] == "222"
async def test_api_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant info error."""
aioclient_mock.get(
"http://127.0.0.1/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_info()
assert aioclient_mock.call_count == 1
async def test_api_host_info(hassio_handler, aioclient_mock):
"""Test setup with API Host info."""
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
)
data = await hassio_handler.get_host_info()
assert aioclient_mock.call_count == 1
assert data["chassis"] == "vm"
assert data["kernel"] == "4.19.0-6-amd64"
assert data["operating_system"] == "Debian GNU/Linux 10 (buster)"
async def test_api_host_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant info error."""
aioclient_mock.get(
"http://127.0.0.1/host/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_host_info()
assert aioclient_mock.call_count == 1
async def test_api_core_info(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant Core info."""
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0"}},
)
data = await hassio_handler.get_core_info()
assert aioclient_mock.call_count == 1
assert data["version_latest"] == "1.0.0"
async def test_api_core_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant Core info error."""
aioclient_mock.get(
"http://127.0.0.1/core/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_core_info()
assert aioclient_mock.call_count == 1
async def test_api_homeassistant_stop(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant stop."""
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
assert await hassio_handler.stop_homeassistant()
assert aioclient_mock.call_count == 1
async def test_api_homeassistant_restart(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant restart."""
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
assert await hassio_handler.restart_homeassistant()
assert aioclient_mock.call_count == 1
async def test_api_addon_info(hassio_handler, aioclient_mock):
"""Test setup with API Add-on info."""
aioclient_mock.get(
"http://127.0.0.1/addons/test/info",
json={"result": "ok", "data": {"name": "bla"}},
)
data = await hassio_handler.get_addon_info("test")
assert data["name"] == "bla"
assert aioclient_mock.call_count == 1
async def test_api_discovery_message(hassio_handler, aioclient_mock):
"""Test setup with API discovery message."""
aioclient_mock.get(
"http://127.0.0.1/discovery/test",
json={"result": "ok", "data": {"service": "mqtt"}},
)
data = await hassio_handler.get_discovery_message("test")
assert data["service"] == "mqtt"
assert aioclient_mock.call_count == 1
async def test_api_retrieve_discovery(hassio_handler, aioclient_mock):
"""Test setup with API discovery message."""
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={"result": "ok", "data": {"discovery": [{"service": "mqtt"}]}},
)
data = await hassio_handler.retrieve_discovery_messages()
assert data["discovery"][-1]["service"] == "mqtt"
assert aioclient_mock.call_count == 1
async def test_api_ingress_panels(hassio_handler, aioclient_mock):
"""Test setup with API Ingress panels."""
aioclient_mock.get(
"http://127.0.0.1/ingress/panels",
json={
"result": "ok",
"data": {
"panels": {
"slug": {
"enable": True,
"title": "Test",
"icon": "mdi:test",
"admin": False,
}
}
},
},
)
data = await hassio_handler.get_ingress_panels()
assert aioclient_mock.call_count == 1
assert data["panels"]
assert "slug" in data["panels"]
|
import logging
from numato_gpio import NumatoGpioError
from homeassistant.const import (
CONF_DEVICES,
CONF_ID,
CONF_SWITCHES,
DEVICE_DEFAULT_NAME,
)
from homeassistant.helpers.entity import ToggleEntity
from . import CONF_INVERT_LOGIC, CONF_PORTS, DATA_API, DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the configured Numato USB GPIO switch ports."""
if discovery_info is None:
return
api = hass.data[DOMAIN][DATA_API]
switches = []
devices = hass.data[DOMAIN][CONF_DEVICES]
for device in [d for d in devices if CONF_SWITCHES in d]:
device_id = device[CONF_ID]
platform = device[CONF_SWITCHES]
invert_logic = platform[CONF_INVERT_LOGIC]
ports = platform[CONF_PORTS]
for port, port_name in ports.items():
try:
api.setup_output(device_id, port)
api.write_output(device_id, port, 1 if invert_logic else 0)
except NumatoGpioError as err:
_LOGGER.error(
"Failed to initialize switch '%s' on Numato device %s port %s: %s",
port_name,
device_id,
port,
err,
)
continue
switches.append(
NumatoGpioSwitch(
port_name,
device_id,
port,
invert_logic,
api,
)
)
add_entities(switches, True)
class NumatoGpioSwitch(ToggleEntity):
"""Representation of a Numato USB GPIO switch port."""
def __init__(self, name, device_id, port, invert_logic, api):
"""Initialize the port."""
self._name = name or DEVICE_DEFAULT_NAME
self._device_id = device_id
self._port = port
self._invert_logic = invert_logic
self._state = False
self._api = api
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if port is turned on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the port on."""
try:
self._api.write_output(
self._device_id, self._port, 0 if self._invert_logic else 1
)
self._state = True
self.schedule_update_ha_state()
except NumatoGpioError as err:
_LOGGER.error(
"Failed to turn on Numato device %s port %s: %s",
self._device_id,
self._port,
err,
)
def turn_off(self, **kwargs):
"""Turn the port off."""
try:
self._api.write_output(
self._device_id, self._port, 1 if self._invert_logic else 0
)
self._state = False
self.schedule_update_ha_state()
except NumatoGpioError as err:
_LOGGER.error(
"Failed to turn off Numato device %s port %s: %s",
self._device_id,
self._port,
err,
)
|
from scattertext.graphs.GraphStructure import GraphStructure
class TimeStructure(GraphStructure):
def __init__(self,
scatterplot_structure,
graph_renderer,
scatterplot_width=500,
scatterplot_height=700,
d3_url_struct=None,
protocol='http',
template_file_name='time_plot.html'):
GraphStructure.__init__(self,
scatterplot_structure,
graph_renderer,
scatterplot_width,
scatterplot_height,
d3_url_struct,
protocol, template_file_name)
def _replace_html_template(self, autocomplete_css, html_template, javascript_to_insert):
html_template = html_template.replace(
'<!-- EXTRA LIBS -->',
"<script src='../scattertext/scattertext/data/viz/scripts/timelines-chart.js'></script>\n<!--D3URL-->"
)
return GraphStructure._replace_html_template(self, autocomplete_css, html_template, javascript_to_insert)
|
from functools import partial
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_SENSOR_STATE,
ATTR_SENSOR_TYPE_BINARY_SENSOR as ENTITY_TYPE,
ATTR_SENSOR_UNIQUE_ID,
DATA_DEVICES,
DOMAIN,
)
from .entity import MobileAppEntity, sensor_id
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up mobile app binary sensor from a config entry."""
entities = []
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
for config in hass.data[DOMAIN][ENTITY_TYPE].values():
if config[CONF_WEBHOOK_ID] != webhook_id:
continue
device = hass.data[DOMAIN][DATA_DEVICES][webhook_id]
entities.append(MobileAppBinarySensor(config, device, config_entry))
async_add_entities(entities)
@callback
def handle_sensor_registration(webhook_id, data):
if data[CONF_WEBHOOK_ID] != webhook_id:
return
unique_id = sensor_id(data[CONF_WEBHOOK_ID], data[ATTR_SENSOR_UNIQUE_ID])
entity = hass.data[DOMAIN][ENTITY_TYPE][unique_id]
if "added" in entity:
return
entity["added"] = True
device = hass.data[DOMAIN][DATA_DEVICES][data[CONF_WEBHOOK_ID]]
async_add_entities([MobileAppBinarySensor(data, device, config_entry)])
async_dispatcher_connect(
hass,
f"{DOMAIN}_{ENTITY_TYPE}_register",
partial(handle_sensor_registration, webhook_id),
)
class MobileAppBinarySensor(MobileAppEntity, BinarySensorEntity):
"""Representation of an mobile app binary sensor."""
@property
def is_on(self):
"""Return the state of the binary sensor."""
return self._config[ATTR_SENSOR_STATE]
|
import contextlib
import json
from hashlib import sha256
import pkg_resources
import vobject
from radicale import utils
from radicale.item import filter as radicale_filter
INTERNAL_TYPES = ("multifilesystem",)
CACHE_DEPS = ("radicale", "vobject", "python-dateutil",)
CACHE_VERSION = (";".join(pkg_resources.get_distribution(pkg).version
for pkg in CACHE_DEPS) + ";").encode()
def load(configuration):
"""Load the storage module chosen in configuration."""
return utils.load_plugin(
INTERNAL_TYPES, "storage", "Storage", configuration)
class ComponentExistsError(ValueError):
def __init__(self, path):
message = "Component already exists: %r" % path
super().__init__(message)
class ComponentNotFoundError(ValueError):
def __init__(self, path):
message = "Component doesn't exist: %r" % path
super().__init__(message)
class BaseCollection:
@property
def path(self):
"""The sanitized path of the collection without leading or
trailing ``/``."""
raise NotImplementedError
@property
def owner(self):
"""The owner of the collection."""
return self.path.split("/", maxsplit=1)[0]
@property
def is_principal(self):
"""Collection is a principal."""
return bool(self.path) and "/" not in self.path
@property
def etag(self):
"""Encoded as quoted-string (see RFC 2616)."""
etag = sha256()
for item in self.get_all():
etag.update((item.href + "/" + item.etag).encode())
etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
return '"%s"' % etag.hexdigest()
def sync(self, old_token=None):
"""Get the current sync token and changed items for synchronization.
``old_token`` an old sync token which is used as the base of the
delta update. If sync token is missing, all items are returned.
ValueError is raised for invalid or old tokens.
WARNING: This simple default implementation treats all sync-token as
invalid.
"""
token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
if old_token:
raise ValueError("Sync token are not supported")
return token, (item.href for item in self.get_all())
def get_multi(self, hrefs):
"""Fetch multiple items.
It's not required to return the requested items in the correct order.
Duplicated hrefs can be ignored.
Returns tuples with the href and the item or None if the item doesn't
exist.
"""
raise NotImplementedError
def get_all(self):
"""Fetch all items."""
raise NotImplementedError
def get_filtered(self, filters):
"""Fetch all items with optional filtering.
This can largely improve performance of reports depending on
the filters and this implementation.
Returns tuples in the form ``(item, filters_matched)``.
``filters_matched`` is a bool that indicates if ``filters`` are fully
matched.
"""
tag, start, end, simple = radicale_filter.simplify_prefilters(
filters, collection_tag=self.get_meta("tag"))
for item in self.get_all():
if tag:
if tag != item.component_name:
continue
istart, iend = item.time_range
if istart >= end or iend <= start:
continue
item_simple = simple and (start <= istart or iend <= end)
else:
item_simple = simple
yield item, item_simple
def has_uid(self, uid):
"""Check if a UID exists in the collection."""
for item in self.get_all():
if item.uid == uid:
return True
return False
def upload(self, href, item):
"""Upload a new or replace an existing item."""
raise NotImplementedError
def delete(self, href=None):
"""Delete an item.
When ``href`` is ``None``, delete the collection.
"""
raise NotImplementedError
def get_meta(self, key=None):
"""Get metadata value for collection.
Return the value of the property ``key``. If ``key`` is ``None`` return
a dict with all properties
"""
raise NotImplementedError
def set_meta(self, props):
"""Set metadata values for collection.
``props`` a dict with values for properties.
"""
raise NotImplementedError
@property
def last_modified(self):
"""Get the HTTP-datetime of when the collection was modified."""
raise NotImplementedError
def serialize(self):
"""Get the unicode string representing the whole collection."""
if self.get_meta("tag") == "VCALENDAR":
in_vcalendar = False
vtimezones = ""
included_tzids = set()
vtimezone = []
tzid = None
components = ""
# Concatenate all child elements of VCALENDAR from all items
# together, while preventing duplicated VTIMEZONE entries.
# VTIMEZONEs are only distinguished by their TZID, if different
# timezones share the same TZID this produces errornous ouput.
# VObject fails at this too.
for item in self.get_all():
depth = 0
for line in item.serialize().split("\r\n"):
if line.startswith("BEGIN:"):
depth += 1
if depth == 1 and line == "BEGIN:VCALENDAR":
in_vcalendar = True
elif in_vcalendar:
if depth == 1 and line.startswith("END:"):
in_vcalendar = False
if depth == 2 and line == "BEGIN:VTIMEZONE":
vtimezone.append(line + "\r\n")
elif vtimezone:
vtimezone.append(line + "\r\n")
if depth == 2 and line.startswith("TZID:"):
tzid = line[len("TZID:"):]
elif depth == 2 and line.startswith("END:"):
if tzid is None or tzid not in included_tzids:
vtimezones += "".join(vtimezone)
included_tzids.add(tzid)
vtimezone.clear()
tzid = None
elif depth >= 2:
components += line + "\r\n"
if line.startswith("END:"):
depth -= 1
template = vobject.iCalendar()
displayname = self.get_meta("D:displayname")
if displayname:
template.add("X-WR-CALNAME")
template.x_wr_calname.value_param = "TEXT"
template.x_wr_calname.value = displayname
description = self.get_meta("C:calendar-description")
if description:
template.add("X-WR-CALDESC")
template.x_wr_caldesc.value_param = "TEXT"
template.x_wr_caldesc.value = description
template = template.serialize()
template_insert_pos = template.find("\r\nEND:VCALENDAR\r\n") + 2
assert template_insert_pos != -1
return (template[:template_insert_pos] +
vtimezones + components +
template[template_insert_pos:])
if self.get_meta("tag") == "VADDRESSBOOK":
return "".join((item.serialize() for item in self.get_all()))
return ""
class BaseStorage:
def __init__(self, configuration):
"""Initialize BaseStorage.
``configuration`` see ``radicale.config`` module.
The ``configuration`` must not change during the lifetime of
this object, it is kept as an internal reference.
"""
self.configuration = configuration
def discover(self, path, depth="0"):
"""Discover a list of collections under the given ``path``.
``path`` is sanitized.
If ``depth`` is "0", only the actual object under ``path`` is
returned.
If ``depth`` is anything but "0", it is considered as "1" and direct
children are included in the result.
The root collection "/" must always exist.
"""
raise NotImplementedError
def move(self, item, to_collection, to_href):
"""Move an object.
``item`` is the item to move.
``to_collection`` is the target collection.
``to_href`` is the target name in ``to_collection``. An item with the
same name might already exist.
"""
raise NotImplementedError
def create_collection(self, href, items=None, props=None):
"""Create a collection.
``href`` is the sanitized path.
If the collection already exists and neither ``collection`` nor
``props`` are set, this method shouldn't do anything. Otherwise the
existing collection must be replaced.
``collection`` is a list of vobject components.
``props`` are metadata values for the collection.
``props["tag"]`` is the type of collection (VCALENDAR or
VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
collection.
"""
raise NotImplementedError
@contextlib.contextmanager
def acquire_lock(self, mode, user=None):
"""Set a context manager to lock the whole storage.
``mode`` must either be "r" for shared access or "w" for exclusive
access.
``user`` is the name of the logged in user or empty.
"""
raise NotImplementedError
def verify(self):
"""Check the storage for errors."""
raise NotImplementedError
|
import unittest
from pgmpy.independencies import Independencies, IndependenceAssertion
class TestIndependenceAssertion(unittest.TestCase):
def setUp(self):
self.assertion = IndependenceAssertion()
def test_return_list_if_str(self):
self.assertListEqual(self.assertion._return_list_if_str("U"), ["U"])
self.assertListEqual(self.assertion._return_list_if_str(["U", "V"]), ["U", "V"])
def test_get_assertion(self):
self.assertTupleEqual(
IndependenceAssertion("U", "V", "Z").get_assertion(), ({"U"}, {"V"}, {"Z"})
)
self.assertTupleEqual(
IndependenceAssertion("U", "V").get_assertion(), ({"U"}, {"V"}, set())
)
def test_init(self):
self.assertion1 = IndependenceAssertion("U", "V", "Z")
self.assertSetEqual(self.assertion1.event1, {"U"})
self.assertSetEqual(self.assertion1.event2, {"V"})
self.assertSetEqual(self.assertion1.event3, {"Z"})
self.assertion1 = IndependenceAssertion(["U", "V"], ["Y", "Z"], ["A", "B"])
self.assertSetEqual(self.assertion1.event1, {"U", "V"})
self.assertSetEqual(self.assertion1.event2, {"Y", "Z"})
self.assertSetEqual(self.assertion1.event3, {"A", "B"})
def test_init_exceptions(self):
self.assertRaises(ValueError, IndependenceAssertion, event2=["U"], event3="V")
self.assertRaises(ValueError, IndependenceAssertion, event2=["U"])
self.assertRaises(ValueError, IndependenceAssertion, event3=["Z"])
self.assertRaises(ValueError, IndependenceAssertion, event1=["U"])
self.assertRaises(ValueError, IndependenceAssertion, event1=["U"], event3=["Z"])
def tearDown(self):
del self.assertion
class TestIndependeciesAssertionEq(unittest.TestCase):
def setUp(self):
self.i1 = IndependenceAssertion("a", "b", "c")
self.i2 = IndependenceAssertion("a", "b")
self.i3 = IndependenceAssertion("a", ["b", "c", "d"])
self.i4 = IndependenceAssertion("a", ["b", "c", "d"], "e")
self.i5 = IndependenceAssertion("a", ["d", "c", "b"], "e")
self.i6 = IndependenceAssertion("a", ["d", "c"], ["e", "b"])
self.i7 = IndependenceAssertion("a", ["c", "d"], ["b", "e"])
self.i8 = IndependenceAssertion("a", ["f", "d"], ["b", "e"])
self.i9 = IndependenceAssertion("a", ["d", "k", "b"], "e")
self.i10 = IndependenceAssertion(["k", "b", "d"], "a", "e")
def test_eq1(self):
self.assertFalse(self.i1 == "a")
self.assertFalse(self.i2 == 1)
self.assertFalse(self.i4 == [2, "a"])
self.assertFalse(self.i6 == "c")
def test_eq2(self):
self.assertFalse(self.i1 == self.i2)
self.assertFalse(self.i1 == self.i3)
self.assertFalse(self.i2 == self.i4)
self.assertFalse(self.i3 == self.i6)
def test_eq3(self):
self.assertTrue(self.i4 == self.i5)
self.assertTrue(self.i6 == self.i7)
self.assertFalse(self.i7 == self.i8)
self.assertFalse(self.i4 == self.i9)
self.assertFalse(self.i5 == self.i9)
self.assertTrue(self.i10 == self.i9)
self.assertTrue(self.i10 != self.i8)
def tearDown(self):
del self.i1
del self.i2
del self.i3
del self.i4
del self.i5
del self.i6
del self.i7
del self.i8
del self.i9
del self.i10
class TestIndependencies(unittest.TestCase):
def setUp(self):
self.Independencies = Independencies()
self.Independencies3 = Independencies(
["a", ["b", "c", "d"], ["e", "f", "g"]], ["c", ["d", "e", "f"], ["g", "h"]]
)
self.Independencies4 = Independencies(
[["f", "d", "e"], "c", ["h", "g"]], [["b", "c", "d"], "a", ["f", "g", "e"]]
)
self.Independencies5 = Independencies(
["a", ["b", "c", "d"], ["e", "f", "g"]], ["c", ["d", "e", "f"], "g"]
)
def test_init(self):
self.Independencies1 = Independencies(["X", "Y", "Z"])
self.assertEqual(self.Independencies1, Independencies(["X", "Y", "Z"]))
self.Independencies2 = Independencies()
self.assertEqual(self.Independencies2, Independencies())
def test_add_assertions(self):
self.Independencies1 = Independencies(["X", "Y", "Z"])
self.assertEqual(self.Independencies1, Independencies(["X", "Y", "Z"]))
self.Independencies2 = Independencies(["A", "B", "C"], ["D", "E", "F"])
self.assertEqual(
self.Independencies2, Independencies(["A", "B", "C"], ["D", "E", "F"])
)
def test_get_assertions(self):
self.Independencies1 = Independencies(["X", "Y", "Z"])
self.assertEqual(
self.Independencies1.independencies, self.Independencies1.get_assertions()
)
self.Independencies2 = Independencies(["A", "B", "C"], ["D", "E", "F"])
self.assertEqual(
self.Independencies2.independencies, self.Independencies2.get_assertions()
)
def test_get_all_variables(self):
self.assertEqual(
self.Independencies3.get_all_variables(),
frozenset(("a", "b", "c", "d", "e", "f", "g", "h")),
)
self.assertEqual(
self.Independencies4.get_all_variables(),
frozenset(("f", "d", "e", "c", "h", "g", "b", "c", "a")),
)
self.assertEqual(
self.Independencies5.get_all_variables(),
frozenset(("a", "b", "c", "d", "e", "f", "g")),
)
def test_closure(self):
ind1 = Independencies(("A", ["B", "C"], "D"))
self.assertEqual(
ind1.closure(),
Independencies(
("A", ["B", "C"], "D"),
("A", "B", ["C", "D"]),
("A", "C", ["B", "D"]),
("A", "B", "D"),
("A", "C", "D"),
),
)
ind2 = Independencies(("W", ["X", "Y", "Z"]))
self.assertEqual(
ind2.closure(),
Independencies(
("W", "Y"),
("W", "Y", "X"),
("W", "Y", "Z"),
("W", "Y", ["X", "Z"]),
("W", ["Y", "X"]),
("W", "X", ["Y", "Z"]),
("W", ["X", "Z"], "Y"),
("W", "X"),
("W", ["X", "Z"]),
("W", ["Y", "Z"], "X"),
("W", ["Y", "X", "Z"]),
("W", "X", "Z"),
("W", ["Y", "Z"]),
("W", "Z", "X"),
("W", "Z"),
("W", ["Y", "X"], "Z"),
("W", "X", "Y"),
("W", "Z", ["Y", "X"]),
("W", "Z", "Y"),
),
)
ind3 = Independencies(
("c", "a", ["b", "e", "d"]),
(["e", "c"], "b", ["a", "d"]),
(["b", "d"], "e", "a"),
("e", ["b", "d"], "c"),
("e", ["b", "c"], "d"),
(["e", "c"], "a", "b"),
)
self.assertEqual(len(ind3.closure().get_assertions()), 78)
def test_entails(self):
ind1 = Independencies([["A", "B"], ["C", "D"], "E"])
ind2 = Independencies(["A", "C", "E"])
self.assertTrue(ind1.entails(ind2))
self.assertFalse(ind2.entails(ind1))
ind3 = Independencies(("W", ["X", "Y", "Z"]))
self.assertTrue(ind3.entails(ind3.closure()))
self.assertTrue(ind3.closure().entails(ind3))
def test_is_equivalent(self):
ind1 = Independencies(["X", ["Y", "W"], "Z"])
ind2 = Independencies(["X", "Y", "Z"], ["X", "W", "Z"])
ind3 = Independencies(["X", "Y", "Z"], ["X", "W", "Z"], ["X", "Y", ["W", "Z"]])
self.assertFalse(ind1.is_equivalent(ind2))
self.assertTrue(ind1.is_equivalent(ind3))
def test_eq(self):
self.assertTrue(self.Independencies3 == self.Independencies4)
self.assertFalse(self.Independencies3 != self.Independencies4)
self.assertTrue(self.Independencies3 != self.Independencies5)
self.assertFalse(self.Independencies4 == self.Independencies5)
self.assertFalse(Independencies() == Independencies(["A", "B", "C"]))
self.assertFalse(Independencies(["A", "B", "C"]) == Independencies())
self.assertTrue(Independencies() == Independencies())
def tearDown(self):
del self.Independencies
del self.Independencies3
del self.Independencies4
del self.Independencies5
if __name__ == "__main__":
unittest.main()
|
from lemur.plugins.lemur_aws.sts import sts_client
@sts_client("ec2")
def get_regions(**kwargs):
regions = kwargs["client"].describe_regions()
return [x["RegionName"] for x in regions["Regions"]]
@sts_client("ec2")
def get_all_instances(**kwargs):
"""
Fetches all instance objects for a given account and region.
"""
paginator = kwargs["client"].get_paginator("describe_instances")
return paginator.paginate()
|
from unittest.mock import call
import pytest
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.core import State
from tests.common import MockConfigEntry, mock_restore_cache
from tests.components.rfxtrx.conftest import create_rfx_test_cfg
async def test_one_cover(hass, rfxtrx):
"""Test with 1 cover."""
entry_data = create_rfx_test_cfg(
devices={"0b1400cd0213c7f20d010f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("cover.lightwaverf_siemens_0213c7_242")
assert state
await hass.services.async_call(
"cover",
"open_cover",
{"entity_id": "cover.lightwaverf_siemens_0213c7_242"},
blocking=True,
)
await hass.services.async_call(
"cover",
"close_cover",
{"entity_id": "cover.lightwaverf_siemens_0213c7_242"},
blocking=True,
)
await hass.services.async_call(
"cover",
"stop_cover",
{"entity_id": "cover.lightwaverf_siemens_0213c7_242"},
blocking=True,
)
assert rfxtrx.transport.send.mock_calls == [
call(bytearray(b"\n\x14\x00\x00\x02\x13\xc7\xf2\x0f\x00\x00")),
call(bytearray(b"\n\x14\x00\x00\x02\x13\xc7\xf2\r\x00\x00")),
call(bytearray(b"\n\x14\x00\x00\x02\x13\xc7\xf2\x0e\x00\x00")),
]
@pytest.mark.parametrize("state", ["open", "closed"])
async def test_state_restore(hass, rfxtrx, state):
"""State restoration."""
entity_id = "cover.lightwaverf_siemens_0213c7_242"
mock_restore_cache(hass, [State(entity_id, state)])
entry_data = create_rfx_test_cfg(
devices={"0b1400cd0213c7f20d010f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == state
async def test_several_covers(hass, rfxtrx):
"""Test with 3 covers."""
entry_data = create_rfx_test_cfg(
devices={
"0b1400cd0213c7f20d010f51": {"signal_repetitions": 1},
"0A1400ADF394AB010D0060": {"signal_repetitions": 1},
"09190000009ba8010100": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("cover.lightwaverf_siemens_0213c7_242")
assert state
assert state.state == "closed"
assert state.attributes.get("friendly_name") == "LightwaveRF, Siemens 0213c7:242"
state = hass.states.get("cover.lightwaverf_siemens_f394ab_1")
assert state
assert state.state == "closed"
assert state.attributes.get("friendly_name") == "LightwaveRF, Siemens f394ab:1"
state = hass.states.get("cover.rollertrol_009ba8_1")
assert state
assert state.state == "closed"
assert state.attributes.get("friendly_name") == "RollerTrol 009ba8:1"
async def test_discover_covers(hass, rfxtrx_automatic):
"""Test with discovery of covers."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal("0a140002f38cae010f0070")
state = hass.states.get("cover.lightwaverf_siemens_f38cae_1")
assert state
assert state.state == "open"
await rfxtrx.signal("0a1400adf394ab020e0060")
state = hass.states.get("cover.lightwaverf_siemens_f394ab_2")
assert state
assert state.state == "open"
async def test_duplicate_cover(hass, rfxtrx):
"""Test with 2 duplicate covers."""
entry_data = create_rfx_test_cfg(
devices={
"0b1400cd0213c7f20d010f51": {"signal_repetitions": 1},
"0b1400cd0213c7f20d010f50": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("cover.lightwaverf_siemens_0213c7_242")
assert state
assert state.state == "closed"
assert state.attributes.get("friendly_name") == "LightwaveRF, Siemens 0213c7:242"
|
from struct import Struct
from collections import namedtuple
from math import modf
from datetime import datetime
from os import SEEK_END
import numpy as np
from ...utils import warn
def _read_teeg(f, teeg_offset):
"""
Read TEEG structure from an open CNT file.
# from TEEG structure in http://paulbourke.net/dataformats/eeg/
typedef struct {
char Teeg; /* Either 1 or 2 */
long Size; /* Total length of all the events */
long Offset; /* Hopefully always 0 */
} TEEG;
"""
# we use a more descriptive names based on TEEG doc comments
Teeg = namedtuple('Teeg', 'event_type total_length offset')
teeg_parser = Struct('<Bll')
f.seek(teeg_offset)
return Teeg(*teeg_parser.unpack(f.read(teeg_parser.size)))
CNTEventType1 = namedtuple('CNTEventType1',
('StimType KeyBoard KeyPad_Accept Offset'))
# typedef struct {
# unsigned short StimType; /* range 0-65535 */
# unsigned char KeyBoard; /* range 0-11 corresponding to fcn keys +1 */
# char KeyPad_Accept; /* 0->3 range 0-15 bit coded response pad */
# /* 4->7 values 0xd=Accept 0xc=Reject */
# long Offset; /* file offset of event */
# } EVENT1;
CNTEventType2 = namedtuple('CNTEventType2',
('StimType KeyBoard KeyPad_Accept Offset Type '
'Code Latency EpochEvent Accept2 Accuracy'))
# unsigned short StimType; /* range 0-65535 */
# unsigned char KeyBoard; /* range 0-11 corresponding to fcn keys +1 */
# char KeyPad_Accept; /* 0->3 range 0-15 bit coded response pad */
# /* 4->7 values 0xd=Accept 0xc=Reject */
# long Offset; /* file offset of event */
# short Type;
# short Code;
# float Latency;
# char EpochEvent;
# char Accept2;
# char Accuracy;
# needed for backward compat: EVENT type 3 has the same structure as type 2
CNTEventType3 = namedtuple('CNTEventType3',
('StimType KeyBoard KeyPad_Accept Offset Type '
'Code Latency EpochEvent Accept2 Accuracy'))
def _get_event_parser(event_type):
if event_type == 1:
event_maker = CNTEventType1
struct_pattern = '<HBcl'
elif event_type == 2:
event_maker = CNTEventType2
struct_pattern = '<HBclhhfccc'
elif event_type == 3:
event_maker = CNTEventType3
struct_pattern = '<HBclhhfccc' # Same as event type 2
else:
raise ValueError('unknown CNT even type %s' % event_type)
def parser(buffer):
struct = Struct(struct_pattern)
for chunk in struct.iter_unpack(buffer):
yield event_maker(*chunk)
return parser
def _session_date_2_meas_date(session_date, date_format):
try:
frac_part, int_part = modf(datetime
.strptime(session_date, date_format)
.timestamp())
except ValueError:
warn(' Could not parse meas date from the header. Setting to None.')
return None
else:
return (int_part, frac_part)
def _compute_robust_event_table_position(fid, data_format='int32'):
"""Compute `event_table_position`.
When recording event_table_position is computed (as accomulation). If the
file recording is large then this value overflows and ends up pointing
somewhere else. (SEE #gh-6535)
If the file is smaller than 2G the value in the SETUP is returned.
Otherwise, the address of the table position is computed from:
n_samples, n_channels, and the bytes size.
"""
SETUP_NCHANNELS_OFFSET = 370
SETUP_NSAMPLES_OFFSET = 864
SETUP_EVENTTABLEPOS_OFFSET = 886
fid_origin = fid.tell() # save the state
if fid.seek(0, SEEK_END) < 2e9:
fid.seek(SETUP_EVENTTABLEPOS_OFFSET)
(event_table_pos,) = np.frombuffer(fid.read(4), dtype='<i4')
else:
if data_format == 'auto':
warn('Using `data_format=\'auto\' for a CNT file larger'
' than 2Gb is not granted to work. Please pass'
' \'int16\' or \'int32\'.` (assuming int32)')
n_bytes = 2 if data_format == 'int16' else 4
fid.seek(SETUP_NSAMPLES_OFFSET)
(n_samples,) = np.frombuffer(fid.read(4), dtype='<i4')
fid.seek(SETUP_NCHANNELS_OFFSET)
(n_channels,) = np.frombuffer(fid.read(2), dtype='<u2')
event_table_pos = (900 +
75 * int(n_channels) +
n_bytes * int(n_channels) * int(n_samples))
fid.seek(fid_origin) # restore the state
return event_table_pos
|
from ... import event
from . import Widget
class GroupWidget(Widget):
""" Widget to collect widgets in a named group.
It does not provide a layout. This is similar to a QGroupBox or an
HTML fieldset.
The ``node`` of this widget is a
`<fieldset> <https://developer.mozilla.org/docs/Web/HTML/Element/fieldset>`_.
"""
CSS = """
.flx-GroupWidget {
margin: 0;
padding: 5px;
border: 2px solid #ccc;
border-radius: 3px;
}
.flx-GroupWidget > .flx-Layout {
width: calc(100% - 10px);
height: calc(100% - 25px);
}
"""
def _create_dom(self):
global window
node = window.document.createElement('fieldset')
self._legend = window.document.createElement('legend')
node.appendChild(self._legend)
return node
def _render_dom(self):
nodes = [self._legend]
for widget in self.children:
nodes.append(widget.outernode)
return nodes
def _query_min_max_size(self):
w1, w2, h1, h2 = super()._query_min_max_size()
w1 += 10
h1 += 30
return w1, w2, h1, h2
@event.reaction('title')
def _title_changed(self, *events):
self._legend.textContent = '\u00A0' + self.title + '\u00A0'
|
import logging
from pymochad import device
from pymochad.exceptions import MochadException
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_NAME, CONF_PLATFORM
from homeassistant.helpers import config_validation as cv
from . import CONF_COMM_TYPE, DOMAIN, REQ_LOCK
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): DOMAIN,
CONF_DEVICES: [
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_ADDRESS): cv.x10_address,
vol.Optional(CONF_COMM_TYPE): cv.string,
}
],
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up X10 switches over a mochad controller."""
mochad_controller = hass.data[DOMAIN]
devs = config.get(CONF_DEVICES)
add_entities([MochadSwitch(hass, mochad_controller.ctrl, dev) for dev in devs])
return True
class MochadSwitch(SwitchEntity):
"""Representation of a X10 switch over Mochad."""
def __init__(self, hass, ctrl, dev):
"""Initialize a Mochad Switch Device."""
self._controller = ctrl
self._address = dev[CONF_ADDRESS]
self._name = dev.get(CONF_NAME, "x10_switch_dev_%s" % self._address)
self._comm_type = dev.get(CONF_COMM_TYPE, "pl")
self.switch = device.Device(ctrl, self._address, comm_type=self._comm_type)
# Init with false to avoid locking HA for long on CM19A (goes from rf
# to pl via TM751, but not other way around)
if self._comm_type == "pl":
self._state = self._get_device_status()
else:
self._state = False
@property
def name(self):
"""Get the name of the switch."""
return self._name
def turn_on(self, **kwargs):
"""Turn the switch on."""
_LOGGER.debug("Reconnect %s:%s", self._controller.server, self._controller.port)
with REQ_LOCK:
try:
# Recycle socket on new command to recover mochad connection
self._controller.reconnect()
self.switch.send_cmd("on")
# No read data on CM19A which is rf only
if self._comm_type == "pl":
self._controller.read_data()
self._state = True
except (MochadException, OSError) as exc:
_LOGGER.error("Error with mochad communication: %s", exc)
def turn_off(self, **kwargs):
"""Turn the switch off."""
_LOGGER.debug("Reconnect %s:%s", self._controller.server, self._controller.port)
with REQ_LOCK:
try:
# Recycle socket on new command to recover mochad connection
self._controller.reconnect()
self.switch.send_cmd("off")
# No read data on CM19A which is rf only
if self._comm_type == "pl":
self._controller.read_data()
self._state = False
except (MochadException, OSError) as exc:
_LOGGER.error("Error with mochad communication: %s", exc)
def _get_device_status(self):
"""Get the status of the switch from mochad."""
with REQ_LOCK:
status = self.switch.get_status().rstrip()
return status == "on"
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
|
from collections import defaultdict
import json
import numpy as np
import os
import PIL.Image
import PIL.ImageDraw
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.coco.coco_utils import get_coco
from chainercv import utils
try:
from pycocotools import mask as coco_mask
except ImportError:
pass
class COCOInstancesBaseDataset(GetterDataset):
def __init__(self, data_dir='auto', split='train', year='2017',
use_crowded=False):
if year == '2017' and split in ['minival', 'valminusminival']:
raise ValueError(
'coco2017 dataset does not support given split: {}'
.format(split))
super(COCOInstancesBaseDataset, self).__init__()
self.use_crowded = use_crowded
if split in ['val', 'minival', 'valminusminival']:
img_split = 'val'
else:
img_split = 'train'
if data_dir == 'auto':
data_dir = get_coco(split, img_split, year, 'instances')
self.img_root = os.path.join(
data_dir, 'images', '{}{}'.format(img_split, year))
anno_path = os.path.join(
data_dir, 'annotations', 'instances_{}{}.json'.format(split, year))
self.data_dir = data_dir
annos = json.load(open(anno_path, 'r'))
self.id_to_prop = {}
for prop in annos['images']:
self.id_to_prop[prop['id']] = prop
self.ids = sorted(list(self.id_to_prop.keys()))
self.cat_ids = [cat['id'] for cat in annos['categories']]
self.id_to_anno = defaultdict(list)
for anno in annos['annotations']:
self.id_to_anno[anno['image_id']].append(anno)
self.add_getter('img', self._get_image)
self.add_getter('mask', self._get_mask)
self.add_getter(
['bbox', 'label', 'area', 'crowded'],
self._get_annotations)
def __len__(self):
return len(self.ids)
def _get_image(self, i):
img_path = os.path.join(
self.img_root, self.id_to_prop[self.ids[i]]['file_name'])
img = utils.read_image(img_path, dtype=np.float32, color=True)
return img
def _get_mask(self, i):
# List[{'segmentation', 'area', 'iscrowd',
# 'image_id', 'bbox', 'category_id', 'id'}]
annotation = self.id_to_anno[self.ids[i]]
H = self.id_to_prop[self.ids[i]]['height']
W = self.id_to_prop[self.ids[i]]['width']
mask = []
crowded = []
for anno in annotation:
msk = self._segm_to_mask(anno['segmentation'], (H, W))
# FIXME: some of minival annotations are malformed.
if msk.shape != (H, W):
continue
mask.append(msk)
crowded.append(anno['iscrowd'])
mask = np.array(mask, dtype=np.bool)
crowded = np.array(crowded, dtype=np.bool)
if len(mask) == 0:
mask = np.zeros((0, H, W), dtype=np.bool)
if not self.use_crowded:
not_crowded = np.logical_not(crowded)
mask = mask[not_crowded]
return mask
def _get_annotations(self, i):
# List[{'segmentation', 'area', 'iscrowd',
# 'image_id', 'bbox', 'category_id', 'id'}]
annotation = self.id_to_anno[self.ids[i]]
bbox = np.array([ann['bbox'] for ann in annotation],
dtype=np.float32)
if len(bbox) == 0:
bbox = np.zeros((0, 4), dtype=np.float32)
# (x, y, width, height) -> (x_min, y_min, x_max, y_max)
bbox[:, 2] = bbox[:, 0] + bbox[:, 2]
bbox[:, 3] = bbox[:, 1] + bbox[:, 3]
# (x_min, y_min, x_max, y_max) -> (y_min, x_min, y_max, x_max)
bbox = bbox[:, [1, 0, 3, 2]]
label = np.array([self.cat_ids.index(ann['category_id'])
for ann in annotation], dtype=np.int32)
area = np.array([ann['area']
for ann in annotation], dtype=np.float32)
crowded = np.array([ann['iscrowd']
for ann in annotation], dtype=np.bool)
# Remove invalid boxes
bbox_area = np.prod(bbox[:, 2:] - bbox[:, :2], axis=1)
keep_mask = np.logical_and(bbox[:, 0] <= bbox[:, 2],
bbox[:, 1] <= bbox[:, 3])
keep_mask = np.logical_and(keep_mask, bbox_area > 0)
if not self.use_crowded:
keep_mask = np.logical_and(keep_mask, np.logical_not(crowded))
bbox = bbox[keep_mask]
label = label[keep_mask]
area = area[keep_mask]
crowded = crowded[keep_mask]
return bbox, label, area, crowded
def _segm_to_mask(self, segm, size):
# Copied from pycocotools.coco.COCO.annToMask
H, W = size
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
mask = np.zeros((H, W), dtype=np.uint8)
mask = PIL.Image.fromarray(mask)
for sgm in segm:
xy = np.array(sgm).reshape((-1, 2))
xy = [tuple(xy_i) for xy_i in xy]
PIL.ImageDraw.Draw(mask).polygon(xy=xy, outline=1, fill=1)
mask = np.asarray(mask)
elif isinstance(segm['counts'], list):
rle = coco_mask.frPyObjects(segm, H, W)
mask = coco_mask.decode(rle)
else:
mask = coco_mask.decode(segm)
return mask.astype(np.bool)
|
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_DEVICE
import homeassistant.helpers.config_validation as cv
from .const import DATA_ENOCEAN, DOMAIN, ENOCEAN_DONGLE
from .dongle import EnOceanDongle
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.string})}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass, config):
"""Set up the EnOcean component."""
# support for text-based configuration (legacy)
if DOMAIN not in config:
return True
if hass.config_entries.async_entries(DOMAIN):
# We can only have one dongle. If there is already one in the config,
# there is no need to import the yaml based config.
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Set up an EnOcean dongle for the given entry."""
enocean_data = hass.data.setdefault(DATA_ENOCEAN, {})
usb_dongle = EnOceanDongle(hass, config_entry.data[CONF_DEVICE])
await usb_dongle.async_setup()
enocean_data[ENOCEAN_DONGLE] = usb_dongle
return True
async def async_unload_entry(hass, config_entry):
"""Unload ENOcean config entry."""
enocean_dongle = hass.data[DATA_ENOCEAN][ENOCEAN_DONGLE]
enocean_dongle.unload()
hass.data.pop(DATA_ENOCEAN)
return True
|
from builtins import range
import numpy as np
from hypertools.tools.reduce import reduce as reducer
from hypertools.plot.plot import plot
data = [np.random.multivariate_normal(np.zeros(4), np.eye(4), size=10) for i in range(2)]
reduced_data_2d = reducer(data,ndims=2)
reduced_data_1d = reducer(data,ndims=1)
def test_reduce_is_list():
reduced_data_3d = reducer(data)
assert type(reduced_data_3d) is list
def test_reduce_is_array():
reduced_data_3d = reducer(data, ndims=3)
assert isinstance(reduced_data_3d[0],np.ndarray)
def test_reduce_dims_3d():
reduced_data_3d = reducer(data, ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_dims_2d():
reduced_data_2d = reducer(data, ndims=2)
assert reduced_data_2d[0].shape==(10,2)
def test_reduce_dims_1d():
reduced_data_1d = reducer(data, ndims=1)
assert reduced_data_1d[0].shape==(10,1)
def test_reduce_geo():
geo = plot(data, show=False)
reduced_data_3d = reducer(geo, ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_PCA():
reduced_data_3d = reducer(data, reduce='PCA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_IncrementalPCA():
reduced_data_3d = reducer(data, reduce='IncrementalPCA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_SparsePCA():
reduced_data_3d = reducer(data, reduce='SparsePCA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_MiniBatchSparsePCA():
reduced_data_3d = reducer(data, reduce='MiniBatchSparsePCA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_KernelPCA():
reduced_data_3d = reducer(data, reduce='KernelPCA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_FastICA():
reduced_data_3d = reducer(data, reduce='FastICA', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_FactorAnalysis():
reduced_data_3d = reducer(data, reduce='FactorAnalysis', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_TruncatedSVD():
reduced_data_3d = reducer(data, reduce='TruncatedSVD', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_DictionaryLearning():
reduced_data_3d = reducer(data, reduce='DictionaryLearning', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_MiniBatchDictionaryLearning():
reduced_data_3d = reducer(data, reduce='MiniBatchDictionaryLearning', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_TSNE():
reduced_data_3d = reducer(data, reduce='TSNE', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_Isomap():
reduced_data_3d = reducer(data, reduce='Isomap', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_SpectralEmbedding():
reduced_data_3d = reducer(data, reduce='SpectralEmbedding', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_LocallyLinearEmbedding():
reduced_data_3d = reducer(data, reduce='LocallyLinearEmbedding', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_MDS():
reduced_data_3d = reducer(data, reduce='MDS', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_UMAP():
reduced_data_3d = reducer(data, reduce='UMAP', ndims=3)
assert reduced_data_3d[0].shape==(10,3)
def test_reduce_params_UMAP():
from umap import UMAP
data1 = np.random.rand(20, 10)
params = {'n_neighbors': 5, 'n_components': 2, 'metric': 'correlation', 'random_state': 1234}
# testing override of n_dims by n_components. Should raise UserWarning due to conflict
hyp_data = reducer(data1, reduce={'model': 'UMAP', 'params': params}, ndims=3)
umap_data = UMAP(**params).fit_transform(data1)
np.testing.assert_array_equal(hyp_data, umap_data)
|
from ... import event
from .._widget import Widget, create_element
loop = event.loop
# todo: icons
# todo: tooltips
# todo: a variant that can load data dynamically from Python, for biggish data
class TreeWidget(Widget):
"""
A Widget that can be used to structure information in a list or a tree.
It's items are represented by its children, which may only be TreeItem
objects. Sub items can be created by instantiating TreeItems in the context
of another TreeItem.
When the items in the tree have no sub-items themselves, the TreeWidget is
in "list mode". Otherwise, items can be collapsed/expanded etc.
The ``node`` of this widget is a
`<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_
with some child elements and quite a bit of CSS for rendering.
**Style**
This widget can be fully styled using CSS, using the following CSS classes:
* ``flx-listmode`` is set on the widget's node if no items have sub items.
Style classes for a TreeItem's elements:
* ``flx-TreeItem`` indicates the row of an item (its text, icon, and checkbox).
* ``flx-TreeItem > collapsebut`` the element used to collapse/expand an item.
* ``flx-TreeItem > checkbut`` the element used to check/uncheck an item.
* ``flx-TreeItem > text`` the element that contains the text of the item.
* ``flx-TreeItem > title`` the element that contains the title of the item.
Style classes applied to the TreeItem, corresponding to its properties:
* ``visible-true`` and ``visible-false`` indicate visibility.
* ``selected-true`` and ``selected-false`` indicate selection state.
* ``checked-true``, ``checked-false`` and ``checked-null`` indicate checked
state, with the ``null`` variant indicating not-checkable.
* ``collapsed-true``, ``collapsed-false`` and ``collapsed-null`` indicate
collapse state, with the ``null`` variant indicating not-collapsable.
"""
DEFAULT_MIN_SIZE = 100, 50
CSS = """
/* ----- Tree Widget Mechanics ----- */
.flx-TreeWidget {
height: 100%;
overflow-y: scroll;
overflow-x: hidden;
cursor: default;
}
.flx-TreeWidget > ul {
position: absolute; /* avoid having an implicit width */
left: 0;
right: 0;
}
.flx-TreeWidget .flx-TreeItem {
display: inline-block;
margin: 0;
padding-left: 2px;
width: 100%;
user-select: none;
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
}
.flx-TreeWidget .flx-TreeItem > .text {
display: inline-block;
position: absolute;
right: 0;
}
.flx-TreeWidget .flx-TreeItem > .title:empty + .text {
position: static; /* .text width is not used*/
}
.flx-TreeWidget ul {
list-style-type: none;
padding: 0;
margin: 0;
}
.flx-TreeWidget li {
outline-offset: -1px;
}
.flx-TreeWidget li.visible-false {
display: none;
}
.flx-TreeWidget li.collapsed-true ul {
display: none;
}
/* collapse button */
.flx-TreeWidget .flx-TreeItem > .collapsebut {
display: inline-block;
width: 1.5em; /* must match with ul padding-left */
text-align: center;
margin-left: -1px; /* aligns better with indentation guide */
}
.flx-TreeWidget .flx-TreeItem.collapsed-null > .collapsebut {
visibility: hidden;
}
.flx-TreeWidget.flx-listmode .flx-TreeItem > .collapsebut {
display: none;
}
/* indentation guides */
.flx-TreeWidget ul {
padding-left: 0.75em;
}
.flx-TreeWidget > ul {
padding-left: 0em;
}
.flx-TreeWidget.flx-listmode ul {
padding-left: 0.25em;
}
/* ----- Tree Widget Style ----- */
.flx-TreeWidget {
border: 2px groove black;
padding: 3px;
}
.flx-TreeItem.selected-true {
background: rgba(128, 128, 128, 0.35);
}
.flx-TreeItem.highlighted-true {
box-shadow: inset 0 0 3px 1px rgba(0, 0, 255, 0.4);
}
.flx-TreeWidget .flx-TreeItem.collapsed-true > .collapsebut::after {
vertical-align: top;
content: '\\25B8'; /* small right triangle */
}
.flx-TreeWidget .flx-TreeItem.collapsed-false > .collapsebut::after {
vertical-align: top;
content: '\\25BE'; /* small down triangle */
}
.flx-TreeWidget .flx-TreeItem > .collapsebut {
color: rgba(128, 128, 128, 0.6);
}
.flx-TreeWidget li.collapsed-false > ul > li {
border-left: 1px solid rgba(128, 128, 128, 0.3);
}
.flx-TreeWidget li.collapsed-false.selected-true > ul > li {
border-left: 1px solid rgba(128, 128, 128, 0.6);
}
.flx-TreeItem.checked-null > .checkbut {
content: '\\2611\\00a0';
/* display: none; /* could also be visibility: hidden */
}
.flx-TreeItem.checked-true > .checkbut::after {
vertical-align: top;
content: '\\2611\\00a0';
}
.flx-TreeItem.checked-false > .checkbut::after {
vertical-align: top;
content: '\\2610\\00a0';
}
.flx-TreeWidget .flx-TreeItem > .text.hastitle {
width: 50%;
}
/* ----- End Tree Widget ----- */
"""
max_selected = event.IntProp(0, settable=True, doc="""
The maximum number of selected items:
* If 0 (default) there is no selection.
* If 1, there can be one selected item.
* If > 1, up to this number of items can be selected by clicking them.
* If -1, any number of items can be selected by holding Ctrl or Shift.
""")
def init(self):
self._highlight_on = False
self._last_highlighted_hint = ''
self._last_selected = None
def get_all_items(self):
""" Get a flat list of all TreeItem instances in this Tree
(including sub children and sub-sub children, etc.), in the order that
they are shown in the tree.
"""
items = []
def collect(x):
items.append(x)
for i in x.children:
if i:
collect(i)
for x in self.children:
collect(x)
return items
def _render_dom(self):
nodes = [i.outernode for i in self.children if isinstance(i, TreeItem)]
return [create_element('ul', {}, nodes)]
@event.reaction('children', 'children*.children')
def __check_listmode(self, *events):
listmode = True
for i in self.children:
listmode = listmode and len(i.children) == 0 and i.collapsed is None
if listmode:
self.node.classList.add('flx-listmode')
else:
self.node.classList.remove('flx-listmode')
@event.reaction('max_selected')
def __max_selected_changed(self, *events):
if self.max_selected == 0:
# Deselect all
for i in self.get_all_items():
i.set_selected(False)
elif self.max_selected < 0:
# No action needed
pass
else:
# Deselect all if the count exceeds the max
count = 0
for i in self.get_all_items():
count += int(i.selected)
if count > self.max_selected:
for i in self.children:
i.set_selected(False)
@event.reaction('!children**.pointer_click', '!children**.pointer_double_click')
def _handle_item_clicked(self, *events):
self._last_highlighted_hint = events[-1].source.id
if self._highlight_on: # highhlight tracks clicks
self.highlight_show_item(events[-1].source)
if self.max_selected == 0:
# No selection allowed
pass
elif self.max_selected < 0:
# Select/deselect any, but only with CTRL and SHIFT
for ev in events:
item = ev.source
modifiers = ev.modifiers if ev.modifiers else []
if 'Shift' in modifiers: # Ctrl can also be in modifiers
# Select everything between last selected and current
if self._last_selected is not None:
if self._last_selected is not item:
mark_selected = False
for i in self.get_all_items():
if mark_selected == True: # noqa - PScript perf
if i is item or i is self._last_selected:
break
i.user_selected(True)
else:
if i is item or i is self._last_selected:
mark_selected = True
item.user_selected(True)
self._last_selected = item
elif 'Ctrl' in modifiers:
# Toggle
select = not item.selected
item.user_selected(select)
self._last_selected = item if select else None
else:
# Similar as when max_selected is 1
for i in self.get_all_items():
if i.selected and i is not item:
i.user_selected(False)
select = not item.selected
item.user_selected(select)
self._last_selected = item if select else None
elif self.max_selected == 1:
# Selecting one, deselects others
item = events[-1].source
gets_selected = not item.selected
if gets_selected:
for i in self.get_all_items():
if i.selected and i is not item:
i.user_selected(False)
item.user_selected(gets_selected) # set the item last
else:
# Select to a certain max
item = events[-1].source
if item.selected:
item.user_selected(False)
else:
count = 0
for i in self.get_all_items():
count += int(i.selected)
if count < self.max_selected:
item.user_selected(True)
# NOTE: this highlight API is currently not documented, as it lives
# in JS only. The big refactoring will change all that.
def highlight_hide(self):
""" Stop highlighting the "current" item.
"""
all_items = self._get_all_items_annotated()
self._de_highlight_and_get_highlighted_index(all_items)
self._highlight_on = False
def highlight_show_item(self, item):
""" Highlight the given item.
"""
classname = 'highlighted-true'
all_items = self._get_all_items_annotated()
self._highlight_on = True
self._de_highlight_and_get_highlighted_index(all_items)
item._row.classList.add(classname)
self._last_highlighted_hint = item.id
def highlight_show(self, step=0):
""" Highlight the "current" item, optionally moving step items.
"""
classname = 'highlighted-true'
all_items = self._get_all_items_annotated()
self._highlight_on = True
index1 = self._de_highlight_and_get_highlighted_index(all_items)
index2 = 0 if index1 is None else index1 + step
while 0 <= index2 < len(all_items):
visible, _ = all_items[index2]
if visible:
break
index2 += step
else:
index2 = index1
if index2 is not None:
_, item = all_items[index2]
item._row.classList.add(classname)
self._last_highlighted_hint = item.id
# Scroll into view when needed
y1 = item._row.offsetTop - 20
y2 = item._row.offsetTop + item._row.offsetHeight + 20
if self.node.scrollTop > y1:
self.node.scrollTop = y1
if self.node.scrollTop + self.node.offsetHeight < y2:
self.node.scrollTop = y2 - self.node.offsetHeight
def highlight_get(self):
""" Get the "current" item. This is the currently highlighted
item if there is one. Otherwise it can be the last highlighted item
or the last clicked item.
"""
classname = 'highlighted-true'
all_items = self._get_all_items_annotated()
index = self._de_highlight_and_get_highlighted_index(all_items)
if index is not None:
_, item = all_items[index]
item._row.classList.add(classname)
return item
def highlight_toggle_selected(self):
""" Convenience method to toggle the "selected" property of the
current item.
"""
item = self.highlight_get()
if item is not None:
self._handle_item_clicked(dict(source=item)) # simulate click
def highlight_toggle_checked(self):
""" Convenience method to toggle the "checked" property of the
current item.
"""
item = self.highlight_get()
if item is not None:
if item.checked is not None: # is it checkable?
item.user_checked(not item.checked)
def _de_highlight_and_get_highlighted_index(self, all_items):
""" Unhighlight all items and get the index of the item that was
highlighted, or which otherwise represents the "current" item, e.g.
because it was just clicked.
"""
classname = 'highlighted-true'
index = None
hint = None
for i in range(len(all_items)):
visible, item = all_items[i]
if item._row.classList.contains(classname):
item._row.classList.remove(classname)
if index is None:
index = i
if hint is None and item.id == self._last_highlighted_hint:
hint = i
if index is not None:
return index
else:
return hint
def _get_all_items_annotated(self):
""" Get a flat list of all TreeItem instances in this Tree,
including visibility information due to collapsed parents.
"""
items = []
def collect(x, parent_collapsed):
visible = x.visible and not parent_collapsed
items.append((visible, x))
for i in x.children:
if i:
collect(i, parent_collapsed or x.collapsed)
for x in self.children:
collect(x, False)
return items
class TreeItem(Widget):
""" An item to put in a TreeWidget. This widget should only be used inside
a TreeWidget or another TreeItem.
Items are collapsable/expandable if their ``collapsed`` property
is set to ``True`` or ``False`` (i.e. not ``None``), or if they
have sub items. Items are checkable if their ``checked`` property
is set to ``True`` or ``False`` (i.e. not ``None``). Items are
selectable depending on the selection policy defined by
``TreeWidget.max_selected``.
If needed, the ``_render_title()`` and ``_render_text()`` methods can
be overloaded to display items in richer ways. See the documentation
of ``Widget._render_dom()`` for details.
The ``outernode`` of this widget is a
`<li> <https://developer.mozilla.org/docs/Web/HTML/Element/li>`_
(a list-item in the tree or parent item's ``<ul>``.
The ``node`` of this widget is a
`<span> <https://developer.mozilla.org/docs/Web/HTML/Element/span>`_
that represents the row for this item (but not its children).
"""
text = event.StringProp('', settable=True, doc="""
The text for this item. Can be used in combination with
``title`` to obtain two columns.
""")
title = event.StringProp('', settable=True, doc="""
The title for this item that appears before the text. Intended
for display of key-value pairs. If a title is given, the text is
positioned in a second (virtual) column of the tree widget.
""")
visible = event.BoolProp(True, settable=True, doc="""
Whether this item (and its sub items) is visible.
""")
selected = event.BoolProp(False, settable=True, doc="""
Whether this item is selected. Depending on the TreeWidget's
policy (max_selected), this can be set/unset on clicking the item.
""")
checked = event.TriStateProp(settable=True, doc="""
Whether this item is checked (i.e. has its checkbox set).
The value can be None, True or False. None (the default)
means that the item is not checkable.
""")
collapsed = event.TriStateProp(settable=True, doc="""
Whether this item is expanded (i.e. shows its children).
The value can be None, True or False. None (the default)
means that the item is not collapsable (unless it has sub items).
""")
@event.emitter
def user_selected(self, selected):
""" Event emitted when the user (un)selects this item. Has ``old_value``
and ``new_value`` attributes. One can call this emitter directly to
emulate a user-selection, but note that this bypasses the max_selected
policy.
"""
d = {'old_value': self.selected, 'new_value': selected}
self.set_selected(selected)
return d
@event.emitter
def user_checked(self, checked):
""" Event emitted when the user (un)checks this item. Has ``old_value``
and ``new_value`` attributes.
"""
d = {'old_value': self.checked, 'new_value': checked}
self.set_checked(checked)
return d
@event.emitter
def user_collapsed(self, collapsed):
""" Event emitted when the user (un)collapses this item. Has ``old_value``
and ``new_value`` attributes.
"""
d = {'old_value': self.collapsed, 'new_value': collapsed}
self.set_collapsed(collapsed)
return d
@event.action
def set_parent(self, parent, pos=None):
# Verify that this class is used correctly
# Note that this action is already called from the init by Widget.
if not (parent is None or
isinstance(parent, TreeItem) or
isinstance(parent, TreeWidget)):
raise RuntimeError('TreeItems can only be created in the context '
'of a TreeWidget or TreeItem.')
super().set_parent(parent, pos)
def _create_dom(self):
global window
node = window.document.createElement('li')
self._row = window.document.createElement('span') # we need this node
node.appendChild(self._row)
self._addEventListener(node, 'click', self._on_click)
self._addEventListener(node, 'dblclick', self._on_double_click)
return node, self._row
def _render_dom(self):
# We render more or less this:
# <li>
# <span class='flx-TreeItem'> # the row that represents the item
# <span class='padder'></span> # padding
# <span class='collapsebut'></span> # the collapse button
# <span class='checkbut'></span> # the check button
# <span class='title'></span> # the title text for this item
# <span class='text'></span> # the text for this item
# </span>
# <ul></ul> # to hold sub items
# </li>
subnodes = [item.outernode for item in self.children]
# Get class names to apply to the li and row. We apply the clases to
# both to allow styling both depending on these values, but strictly
# speaking visible and collapsed are only needed for the li and
# selected and checked for the span.
cnames = []
collapsed = bool(self.collapsed) if len(self.children) > 0 else self.collapsed
for name, val in [('visible', self.visible),
('collapsed', collapsed),
('selected', self.selected),
('checked', self.checked),
]:
cnames.append(name + '-' + str(val))
cnames = ' '.join(cnames)
# Get title and text content
title, text = self._render_title(), self._render_text()
# Note that the outernode (the <li>) has not flx-Widget nor flx-TreeItem
text_class = 'text hastitle' if len(title) > 0 else 'text'
return create_element('li', {'className': cnames},
create_element('span', {'className': 'flx-TreeItem ' + cnames},
create_element('span', {'className': 'padder'}),
create_element('span', {'className': 'collapsebut'}),
create_element('span', {'className': 'checkbut'}),
create_element('span', {'className': 'title'}, title),
create_element('span', {'className': text_class}, text),
),
create_element('ul', {}, subnodes),
)
def _render_title(self):
""" Return a node for title. Can be overloaded to e.g. format with html.
"""
return self.title
def _render_text(self):
""" Return a node for text. Can be overloaded.
"""
return self.text
def _on_click(self, e):
# Handle JS mouse click event
e.stopPropagation() # don't click parent items
if e.target.classList.contains('collapsebut'):
self.user_collapsed(not self.collapsed)
elif e.target.classList.contains('checkbut'):
self.user_checked(not self.checked)
else:
self.pointer_click(e)
def _on_double_click(self, e):
# Handle JS mouse double click event
e.stopPropagation() # don't click parent items
c1 = e.target.classList.contains('collapsebut')
c2 = e.target.classList.contains('checkbut')
if not (c1 or c2):
self.pointer_double_click(e)
|
import platform
import sys
from django import db
from django.conf import settings
from weblate.utils.management.base import BaseCommand
from weblate.utils.requirements import get_versions_list
class Command(BaseCommand):
help = "lists versions of required software components"
def write_item(self, prefix, value):
self.stdout.write(f" * {prefix}: {value}")
def handle(self, *args, **options):
"""Print versions of dependencies."""
for version in get_versions_list():
self.write_item(version[0], version[2])
self.write_item(
"Database backends",
", ".join(conn["ENGINE"] for conn in db.connections.databases.values()),
)
self.write_item(
"Cache backends",
", ".join(
"{}:{}".format(key, value["BACKEND"].split(".")[-1])
for key, value in settings.CACHES.items()
),
)
self.write_item(
"Email setup", f"{settings.EMAIL_BACKEND}: {settings.EMAIL_HOST}"
)
self.write_item(
"OS encoding",
"filesystem={}, default={}".format(
sys.getfilesystemencoding(), sys.getdefaultencoding()
),
)
self.write_item(
"Celery",
"{}, {}, {}".format(
getattr(settings, "CELERY_BROKER_URL", "N/A"),
getattr(settings, "CELERY_RESULT_BACKEND", "N/A"),
"eager" if settings.CELERY_TASK_ALWAYS_EAGER else "regular",
),
)
self.write_item(
"Platform",
"{} {} ({})".format(
platform.system(), platform.release(), platform.machine()
),
)
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
# TODO add valid states here
VALID_STATES = {STATE_ON, STATE_OFF}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if (
cur_state.state == state.state
and
# TODO this is an example attribute
cur_state.attributes.get("color") == state.attributes.get("color")
):
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
# TODO determine the services to call to achieve desired state
if state.state == STATE_ON:
service = SERVICE_TURN_ON
if "color" in state.attributes:
service_data["color"] = state.attributes["color"]
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce NEW_NAME states."""
# TODO pick one and remove other one
# Reproduce states in parallel.
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
# Alternative: Reproduce states in sequence
# for state in states:
# await _async_reproduce_state(hass, state, context=context, reproduce_options=reproduce_options)
|
from collections import Iterable
from itertools import combinations, chain
def _variable_or_iterable_to_set(x):
"""
Convert variable, set, or iterable x to a frozenset.
If x is None, returns the empty set.
Parameters
---------
x : None, str or Iterable[str]
Returns
-------
frozenset : frozenset representation of string or iterable input
"""
if x is None:
return frozenset([])
if isinstance(x, str):
return frozenset([x])
if not isinstance(x, Iterable) or not all(isinstance(xx, str) for xx in x):
raise ValueError(
f"{x} is expected to be either a string, set of strings, or an iterable of strings"
)
return frozenset(x)
def _powerset(iterable):
"""
https://docs.python.org/3/library/itertools.html#recipes
powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)
Parameters
----------
iterable: any iterable
Returns
-------
chain: a generator of the powerset of the input
"""
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
from nad_receiver import NADReceiver, NADReceiverTCP, NADReceiverTelnet
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
DEFAULT_TYPE = "RS232"
DEFAULT_SERIAL_PORT = "/dev/ttyUSB0"
DEFAULT_PORT = 53
DEFAULT_NAME = "NAD Receiver"
DEFAULT_MIN_VOLUME = -92
DEFAULT_MAX_VOLUME = -20
DEFAULT_VOLUME_STEP = 4
SUPPORT_NAD = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_SELECT_SOURCE
)
CONF_TYPE = "type"
CONF_SERIAL_PORT = "serial_port" # for NADReceiver
CONF_PORT = "port" # for NADReceiverTelnet
CONF_MIN_VOLUME = "min_volume"
CONF_MAX_VOLUME = "max_volume"
CONF_VOLUME_STEP = "volume_step" # for NADReceiverTCP
CONF_SOURCE_DICT = "sources" # for NADReceiver
SOURCE_DICT_SCHEMA = vol.Schema({vol.Range(min=1, max=10): cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE): vol.In(
["RS232", "Telnet", "TCP"]
),
vol.Optional(CONF_SERIAL_PORT, default=DEFAULT_SERIAL_PORT): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MIN_VOLUME, default=DEFAULT_MIN_VOLUME): int,
vol.Optional(CONF_MAX_VOLUME, default=DEFAULT_MAX_VOLUME): int,
vol.Optional(CONF_SOURCE_DICT, default={}): SOURCE_DICT_SCHEMA,
vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NAD platform."""
if config.get(CONF_TYPE) in ("RS232", "Telnet"):
add_entities(
[NAD(config)],
True,
)
else:
add_entities(
[NADtcp(config)],
True,
)
class NAD(MediaPlayerEntity):
"""Representation of a NAD Receiver."""
def __init__(self, config):
"""Initialize the NAD Receiver device."""
self.config = config
self._instantiate_nad_receiver()
self._min_volume = config[CONF_MIN_VOLUME]
self._max_volume = config[CONF_MAX_VOLUME]
self._source_dict = config[CONF_SOURCE_DICT]
self._reverse_mapping = {value: key for key, value in self._source_dict.items()}
self._volume = self._state = self._mute = self._source = None
def _instantiate_nad_receiver(self) -> NADReceiver:
if self.config[CONF_TYPE] == "RS232":
self._nad_receiver = NADReceiver(self.config[CONF_SERIAL_PORT])
else:
host = self.config.get(CONF_HOST)
port = self.config[CONF_PORT]
self._nad_receiver = NADReceiverTelnet(host, port)
@property
def name(self):
"""Return the name of the device."""
return self.config[CONF_NAME]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Return the icon for the device."""
return "mdi:speaker-multiple"
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_NAD
def turn_off(self):
"""Turn the media player off."""
self._nad_receiver.main_power("=", "Off")
def turn_on(self):
"""Turn the media player on."""
self._nad_receiver.main_power("=", "On")
def volume_up(self):
"""Volume up the media player."""
self._nad_receiver.main_volume("+")
def volume_down(self):
"""Volume down the media player."""
self._nad_receiver.main_volume("-")
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._nad_receiver.main_volume("=", self.calc_db(volume))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
if mute:
self._nad_receiver.main_mute("=", "On")
else:
self._nad_receiver.main_mute("=", "Off")
def select_source(self, source):
"""Select input source."""
self._nad_receiver.main_source("=", self._reverse_mapping.get(source))
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return sorted(list(self._reverse_mapping))
@property
def available(self):
"""Return if device is available."""
return self._state is not None
def update(self) -> None:
"""Retrieve latest state."""
power_state = self._nad_receiver.main_power("?")
if not power_state:
self._state = None
return
self._state = (
STATE_ON if self._nad_receiver.main_power("?") == "On" else STATE_OFF
)
if self._state == STATE_ON:
self._mute = self._nad_receiver.main_mute("?") == "On"
volume = self._nad_receiver.main_volume("?")
# Some receivers cannot report the volume, e.g. C 356BEE,
# instead they only support stepping the volume up or down
self._volume = self.calc_volume(volume) if volume is not None else None
self._source = self._source_dict.get(self._nad_receiver.main_source("?"))
def calc_volume(self, decibel):
"""
Calculate the volume given the decibel.
Return the volume (0..1).
"""
return abs(self._min_volume - decibel) / abs(
self._min_volume - self._max_volume
)
def calc_db(self, volume):
"""
Calculate the decibel given the volume.
Return the dB.
"""
return self._min_volume + round(
abs(self._min_volume - self._max_volume) * volume
)
class NADtcp(MediaPlayerEntity):
"""Representation of a NAD Digital amplifier."""
def __init__(self, config):
"""Initialize the amplifier."""
self._name = config[CONF_NAME]
self._nad_receiver = NADReceiverTCP(config.get(CONF_HOST))
self._min_vol = (config[CONF_MIN_VOLUME] + 90) * 2 # from dB to nad vol (0-200)
self._max_vol = (config[CONF_MAX_VOLUME] + 90) * 2 # from dB to nad vol (0-200)
self._volume_step = config[CONF_VOLUME_STEP]
self._state = None
self._mute = None
self._nad_volume = None
self._volume = None
self._source = None
self._source_list = self._nad_receiver.available_sources()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_NAD
def turn_off(self):
"""Turn the media player off."""
self._nad_receiver.power_off()
def turn_on(self):
"""Turn the media player on."""
self._nad_receiver.power_on()
def volume_up(self):
"""Step volume up in the configured increments."""
self._nad_receiver.set_volume(self._nad_volume + 2 * self._volume_step)
def volume_down(self):
"""Step volume down in the configured increments."""
self._nad_receiver.set_volume(self._nad_volume - 2 * self._volume_step)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
nad_volume_to_set = int(
round(volume * (self._max_vol - self._min_vol) + self._min_vol)
)
self._nad_receiver.set_volume(nad_volume_to_set)
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
if mute:
self._nad_receiver.mute()
else:
self._nad_receiver.unmute()
def select_source(self, source):
"""Select input source."""
self._nad_receiver.select_source(source)
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._nad_receiver.available_sources()
def update(self):
"""Get the latest details from the device."""
try:
nad_status = self._nad_receiver.status()
except OSError:
return
if nad_status is None:
return
# Update on/off state
if nad_status["power"]:
self._state = STATE_ON
else:
self._state = STATE_OFF
# Update current volume
self._volume = self.nad_vol_to_internal_vol(nad_status["volume"])
self._nad_volume = nad_status["volume"]
# Update muted state
self._mute = nad_status["muted"]
# Update current source
self._source = nad_status["source"]
def nad_vol_to_internal_vol(self, nad_volume):
"""Convert nad volume range (0-200) to internal volume range.
Takes into account configured min and max volume.
"""
if nad_volume < self._min_vol:
volume_internal = 0.0
elif nad_volume > self._max_vol:
volume_internal = 1.0
else:
volume_internal = (nad_volume - self._min_vol) / (
self._max_vol - self._min_vol
)
return volume_internal
|
import numpy as np
import os
import shutil
import tempfile
import unittest
from chainer import testing
from chainercv.datasets import directory_parsing_label_names
from chainercv.datasets import DirectoryParsingLabelDataset
from chainercv.utils import assert_is_label_dataset
from chainercv.utils import write_image
def _save_img_file(path, size, color):
if color:
img = np.random.randint(
0, 255, size=(3,) + size, dtype=np.uint8)
else:
img = np.random.randint(
0, 255, size=(1,) + size, dtype=np.uint8)
write_image(img, path)
def _setup_depth_one_dummy_data(tmp_dir, n_class, n_img_per_class,
size, color, suffix):
for i in range(n_class):
class_dir = os.path.join(tmp_dir, 'class_{}'.format(i))
os.makedirs(class_dir)
for j in range(n_img_per_class):
path = os.path.join(class_dir, 'img{}.{}'.format(j, suffix))
_save_img_file(path, size, color)
open(os.path.join(class_dir, 'dummy_file.XXX'), 'a').close()
def _setup_depth_two_dummy_data(tmp_dir, n_class, n_img_per_class,
n_sub_directory, size, color, suffix):
for i in range(n_class):
class_dir = os.path.join(tmp_dir, 'class_{}'.format(i))
os.makedirs(class_dir)
for j in range(n_sub_directory):
nested_dir = os.path.join(class_dir, 'nested_{}'.format(j))
os.makedirs(nested_dir)
for k in range(n_img_per_class):
path = os.path.join(
nested_dir, 'img{}.{}'.format(k, suffix))
_save_img_file(path, size, color)
open(os.path.join(nested_dir, 'dummy_file.XXX'), 'a').close()
@testing.parameterize(*testing.product({
'size': [(48, 32)],
'color': [True, False],
'n_class': [2, 3],
'suffix': ['bmp', 'jpg', 'png', 'ppm', 'jpeg'],
'depth': [1, 2]}
))
class TestDirectoryParsingLabelDataset(unittest.TestCase):
n_img_per_class = 5
n_sub_directory = 6
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
if self.depth == 1:
_setup_depth_one_dummy_data(self.tmp_dir, self.n_class,
self.n_img_per_class, self.size,
self.color, self.suffix)
elif self.depth == 2:
_setup_depth_two_dummy_data(self.tmp_dir, self.n_class,
self.n_img_per_class,
self.n_sub_directory, self.size,
self.color, self.suffix)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_directory_parsing_label_dataset(self):
dataset = DirectoryParsingLabelDataset(
self.tmp_dir, color=self.color)
if self.depth == 1:
expected_legnth = self.n_img_per_class * self.n_class
elif self.depth == 2:
expected_legnth =\
self.n_img_per_class * self.n_sub_directory * self.n_class
self.assertEqual(len(dataset), expected_legnth)
assert_is_label_dataset(dataset, self.n_class, color=self.color)
label_names = directory_parsing_label_names(self.tmp_dir)
self.assertEqual(
label_names, ['class_{}'.format(i) for i in range(self.n_class)])
if self.depth == 1:
self.assertEqual(
dataset.img_paths,
['{}/class_{}/img{}.{}'.format(self.tmp_dir, i, j, self.suffix)
for i in range(self.n_class)
for j in range(self.n_img_per_class)])
elif self.depth == 2:
self.assertEqual(
dataset.img_paths,
['{}/class_{}/nested_{}/img{}.{}'.format(
self.tmp_dir, i, j, k, self.suffix)
for i in range(self.n_class)
for j in range(self.n_sub_directory)
for k in range(self.n_img_per_class)])
class TestNumericalSortDirectoryParsingLabelDataset(
unittest.TestCase):
n_class = 11
n_img_per_class = 1
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
for i in range(self.n_class):
class_dir = os.path.join(self.tmp_dir, '{}'.format(i))
os.makedirs(class_dir)
_save_img_file(os.path.join(class_dir, 'img_0.png'),
(48, 32), color=True)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_numerical_sort(self):
dataset = DirectoryParsingLabelDataset(
self.tmp_dir, numerical_sort=True)
assert_is_label_dataset(dataset, self.n_class)
label_names = directory_parsing_label_names(
self.tmp_dir, numerical_sort=True)
self.assertEqual(
label_names, ['{}'.format(i) for i in range(self.n_class)])
testing.run_module(__name__, __file__)
|
import argparse
import logging
from collections import defaultdict
from paasta_tools.config_utils import AutoConfigUpdater
from paasta_tools.contrib.paasta_update_soa_memcpu import get_report_from_splunk
from paasta_tools.utils import DEFAULT_SOA_CONFIGS_GIT_URL
from paasta_tools.utils import format_git_url
from paasta_tools.utils import load_system_paasta_config
NULL = "null"
def parse_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument(
"-s",
"--splunk-creds",
help="Service credentials for Splunk API, user:pass",
dest="splunk_creds",
required=True,
)
parser.add_argument(
"-f",
"--criteria-filter",
help="Filter Splunk search results criteria field. Default: *",
dest="criteria_filter",
required=False,
default="*",
)
parser.add_argument(
"-c",
"--csv-report",
help="Splunk csv file from which to pull data.",
required=True,
dest="csv_report",
)
parser.add_argument(
"--app",
help="Splunk app of the CSV file",
default="yelp_computeinfra",
required=False,
dest="splunk_app",
)
parser.add_argument(
"--git-remote",
help="Master git repo for soaconfigs",
default=None,
dest="git_remote",
)
parser.add_argument(
"--branch",
help="Branch name to push to. Defaults to master",
default="master",
required=False,
dest="branch",
)
parser.add_argument(
"--push-to-remote",
help="Actually push to remote. Otherwise files will only be modified and validated.",
action="store_true",
dest="push_to_remote",
)
parser.add_argument(
"--local-dir",
help="Act on configs in the local directory rather than cloning the git_remote",
required=False,
default=None,
dest="local_dir",
)
parser.add_argument(
"--source-id",
help="String to attribute the changes in the commit message. Defaults to csv report name",
required=False,
default=None,
dest="source_id",
)
parser.add_argument(
"-v",
"--verbose",
help="Logging verbosity",
action="store_true",
dest="verbose",
)
return parser.parse_args()
def get_default_git_remote():
system_paasta_config = load_system_paasta_config()
repo_config = system_paasta_config.get_git_repo_config("yelpsoa-configs")
default_git_remote = format_git_url(
system_paasta_config.get_git_config()["git_user"],
repo_config.get("git_server", DEFAULT_SOA_CONFIGS_GIT_URL),
repo_config["repo_name"],
)
return default_git_remote
def get_recommendation_from_result(result):
rec = {}
cpus = result.get("cpus")
if cpus and cpus != NULL:
rec["cpus"] = float(cpus)
mem = result.get("mem")
if mem and mem != NULL:
rec["mem"] = max(128, round(float(mem)))
disk = result.get("disk")
if disk and disk != NULL:
rec["disk"] = max(128, round(float(disk)))
hacheck_cpus = result.get("hacheck_cpus")
if hacheck_cpus and hacheck_cpus != NULL:
hacheck_cpus_value = max(0.1, min(float(hacheck_cpus), 1))
rec["sidecar_resource_requirements"] = {
"hacheck": {
"requests": {"cpu": hacheck_cpus_value,},
"limits": {"cpu": hacheck_cpus_value,},
},
}
return rec
def get_recommendations_by_service_file(results):
results_by_service_file = defaultdict(dict)
for result in results.values():
key = (
result["service"],
result["cluster"],
) # e.g. (foo, marathon-norcal-stagef)
rec = get_recommendation_from_result(result)
if not rec:
continue
results_by_service_file[key][result["instance"]] = rec
return results_by_service_file
def get_extra_message(splunk_search_string):
return f"""This review is based on results from the following Splunk search:\n
{splunk_search_string}
"""
def main(args):
report = get_report_from_splunk(
args.splunk_creds, args.splunk_app, args.csv_report, args.criteria_filter
)
extra_message = get_extra_message(report["search"])
config_source = args.source_id or args.csv_report
results = get_recommendations_by_service_file(report["results"])
updater = AutoConfigUpdater(
config_source=config_source,
git_remote=args.git_remote or get_default_git_remote(),
branch=args.branch,
working_dir=args.local_dir or "/nail/tmp",
do_clone=args.local_dir is None,
)
with updater:
for (service, extra_info), instance_recommendations in results.items():
existing_recommendations = updater.get_existing_configs(service, extra_info)
for instance_name, recommendation in instance_recommendations.items():
existing_recommendations.setdefault(instance_name, {})
existing_recommendations[instance_name].update(recommendation)
updater.write_configs(service, extra_info, existing_recommendations)
if args.push_to_remote:
updater.commit_to_remote(extra_message=extra_message)
else:
updater.validate()
if __name__ == "__main__":
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
main(args)
|
from urllib.parse import quote
from django.conf import settings
from requests.exceptions import HTTPError
from weblate.machinery.base import MachineTranslation, MissingConfiguration
AMAGAMA_LIVE = "https://amagama-live.translatehouse.org/api/v1"
class TMServerTranslation(MachineTranslation):
"""tmserver machine translation support."""
name = "tmserver"
def __init__(self):
"""Check configuration."""
super().__init__()
self.url = self.get_server_url()
@staticmethod
def get_server_url():
"""Return URL of a server."""
if settings.MT_TMSERVER is None:
raise MissingConfiguration("Not configured tmserver URL")
return settings.MT_TMSERVER.rstrip("/")
def map_language_code(self, code):
"""Convert language to service specific code."""
return super().map_language_code(code).replace("-", "_").lower()
def download_languages(self):
"""Download list of supported languages from a service."""
try:
# This will raise exception in DEBUG mode
response = self.request("get", f"{self.url}/languages/")
data = response.json()
except HTTPError as error:
if error.response.status_code == 404:
return []
raise
return [
(src, tgt)
for src in data["sourceLanguages"]
for tgt in data["targetLanguages"]
]
def is_supported(self, source, language):
"""Check whether given language combination is supported."""
if not self.supported_languages:
# Fallback for old tmserver which does not export list of
# supported languages
return True
return (source, language) in self.supported_languages
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
url = "{}/{}/{}/unit/{}".format(
self.url,
quote(source, b""),
quote(language, b""),
quote(text[:500].replace("\r", " ").encode(), b""),
)
response = self.request("get", url)
payload = response.json()
for line in payload:
yield {
"text": line["target"],
"quality": int(line["quality"]),
"service": self.name,
"source": line["source"],
}
class AmagamaTranslation(TMServerTranslation):
"""Specific instance of tmserver ran by Virtaal authors."""
name = "Amagama"
@staticmethod
def get_server_url():
return AMAGAMA_LIVE
|
_resolve_mongodb_hook = lambda env: env
_log_exception_hook = lambda *args, **kwargs: None
_get_auth_hook = lambda *args, **kwargs: None
def get_mongodb_uri(host):
"""
Return the MongoDB URI for the passed in host-alias / environment.
Allows an indirection point for mapping aliases to particular
MongoDB instances.
"""
return _resolve_mongodb_hook(host)
def register_resolve_mongodb_hook(hook):
global _resolve_mongodb_hook
_resolve_mongodb_hook = hook
def log_exception(fn_name, exception, retry_count, **kwargs):
"""
External exception logging hook.
"""
_log_exception_hook(fn_name, exception, retry_count, **kwargs)
def register_log_exception_hook(hook):
global _log_exception_hook
_log_exception_hook = hook
def register_get_auth_hook(hook):
global _get_auth_hook
_get_auth_hook = hook
|
import unittest
from wordbatch.extractors import WordBag
from wordbatch.pipelines import WordBatch
class TestWordBatch(unittest.TestCase):
def test_wordbatch(self):
WordBatch(extractor=(WordBag, {
"hash_ngrams":2,
"hash_ngrams_weights":[0.5, -1.0],
"hash_size":2**23,
"norm":'l2',
"tf":'log',
"idf":50.0}))
|
import pytest
from homeassistant.components.mobile_app.const import DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@pytest.fixture
async def setup_push_receiver(hass, aioclient_mock):
"""Fixture that sets up a mocked push receiver."""
push_url = "https://mobile-push.home-assistant.dev/push"
from datetime import datetime, timedelta
now = datetime.now() + timedelta(hours=24)
iso_time = now.strftime("%Y-%m-%dT%H:%M:%SZ")
aioclient_mock.post(
push_url,
json={
"rateLimits": {
"attempts": 1,
"successful": 1,
"errors": 0,
"total": 1,
"maximum": 150,
"remaining": 149,
"resetsAt": iso_time,
}
},
)
entry = MockConfigEntry(
connection_class="cloud_push",
data={
"app_data": {"push_token": "PUSH_TOKEN", "push_url": push_url},
"app_id": "io.homeassistant.mobile_app",
"app_name": "mobile_app tests",
"app_version": "1.0",
"device_id": "4d5e6f",
"device_name": "Test",
"manufacturer": "Home Assistant",
"model": "mobile_app",
"os_name": "Linux",
"os_version": "5.0.6",
"secret": "123abc",
"supports_encryption": False,
"user_id": "1a2b3c",
"webhook_id": "webhook_id",
},
domain=DOMAIN,
source="registration",
title="mobile_app test entry",
version=1,
)
entry.add_to_hass(hass)
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
loaded_late_entry = MockConfigEntry(
connection_class="cloud_push",
data={
"app_data": {"push_token": "PUSH_TOKEN2", "push_url": f"{push_url}2"},
"app_id": "io.homeassistant.mobile_app",
"app_name": "mobile_app tests",
"app_version": "1.0",
"device_id": "4d5e6f2",
"device_name": "Loaded Late",
"manufacturer": "Home Assistant",
"model": "mobile_app",
"os_name": "Linux",
"os_version": "5.0.6",
"secret": "123abc2",
"supports_encryption": False,
"user_id": "1a2b3c2",
"webhook_id": "webhook_id_2",
},
domain=DOMAIN,
source="registration",
title="mobile_app 2 test entry",
version=1,
)
loaded_late_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(loaded_late_entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service("notify", "mobile_app_loaded_late")
assert await hass.config_entries.async_remove(loaded_late_entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service("notify", "mobile_app_test")
assert not hass.services.has_service("notify", "mobile_app_loaded_late")
loaded_late_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(loaded_late_entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service("notify", "mobile_app_test")
assert hass.services.has_service("notify", "mobile_app_loaded_late")
async def test_notify_works(hass, aioclient_mock, setup_push_receiver):
"""Test notify works."""
assert hass.services.has_service("notify", "mobile_app_test") is True
assert await hass.services.async_call(
"notify", "mobile_app_test", {"message": "Hello world"}, blocking=True
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["push_token"] == "PUSH_TOKEN"
assert call_json["message"] == "Hello world"
assert call_json["registration_info"]["app_id"] == "io.homeassistant.mobile_app"
assert call_json["registration_info"]["app_version"] == "1.0"
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from ntpd import NtpdCollector
##########################################################################
class TestNtpdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NtpdCollector', {})
self.collector = NtpdCollector(config, None)
def test_import(self):
self.assertTrue(NtpdCollector)
@patch.object(Collector, 'publish')
def test_should_work_wtih_real_data(self, publish_mock):
ntpq_data = Mock(
return_value=self.getFixture('ntpq').getvalue())
ntpdc_kerninfo_data = Mock(
return_value=self.getFixture('ntpdc_kerninfo').getvalue())
ntpdc_sysinfo_data = Mock(
return_value=self.getFixture('ntpdc_sysinfo').getvalue())
collector_mock = patch.multiple(
NtpdCollector,
get_ntpq_output=ntpq_data,
get_ntpdc_kerninfo_output=ntpdc_kerninfo_data,
get_ntpdc_sysinfo_output=ntpdc_sysinfo_data)
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'jitter': 0.026,
'when': 39,
'stratum': 2,
'reach': 377,
'delay': 0.127,
'poll': 1024,
'max_error': 0.039793,
'est_error': 5.1e-05,
'frequency': -14.24,
'offset': -5.427e-06,
'root_distance': 0.07663,
'root_dispersion': 0.09311
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
ntpq_data = Mock(return_value='')
ntpdc_kerninfo_data = Mock(return_value='')
ntpdc_sysinfo_data = Mock(return_value='')
collector_mock = patch.multiple(
NtpdCollector,
get_ntpq_output=ntpq_data,
get_ntpdc_kerninfo_output=ntpdc_kerninfo_data,
get_ntpdc_sysinfo_output=ntpdc_sysinfo_data)
collector_mock.start()
self.collector.collect()
collector_mock.stop()
self.assertPublishedMany(publish_mock, {})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import numpy as np
import pytest
from tensornetwork.block_sparse.charge import (U1Charge, fuse_charges,
charge_equal, BaseCharge)
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparsetensor import (ChargeArray,
BlockSparseTensor)
from tensornetwork.block_sparse.blocksparse_utils import _find_diagonal_sparse_blocks #pylint: disable=line-too-long
from tensornetwork.block_sparse.utils import unique
from tensornetwork import ncon
from tensornetwork.block_sparse.linalg import (norm, diag, reshape, transpose,
conj, svd, qr, eigh, eig, inv,
sqrt, trace, eye, pinv, sign)
import tensornetwork.block_sparse.linalg as linalg
np_dtypes = [np.float64, np.complex128]
np_tensordot_dtypes = [np.float64, np.complex128]
@pytest.mark.parametrize('dtype', np_dtypes)
def test_norm(dtype):
np.random.seed(10)
Ds = np.asarray([8, 9, 10, 11])
rank = Ds.shape[0]
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), flows[n])
for n in range(4)
]
arr = BlockSparseTensor.random(indices, dtype=dtype)
dense_norm = np.linalg.norm(arr.todense())
np.testing.assert_allclose(norm(arr), dense_norm)
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('Ds', [[200, 100], [100, 200]])
@pytest.mark.parametrize('flow', [False, True])
def test_get_diag(dtype, num_charges, Ds, flow):
np.random.seed(10)
np_flow = -np.int((np.int(flow) - 0.5) * 2)
indices = [
Index(
BaseCharge(
np.random.randint(-2, 3, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flow) for n in range(2)
]
arr = BlockSparseTensor.random(indices, dtype=dtype)
fused = fuse_charges(arr.flat_charges, arr.flat_flows)
inds = np.nonzero(fused == np.zeros((1, num_charges), dtype=np.int16))[0]
# pylint: disable=no-member
left, _ = np.divmod(inds, Ds[1])
unique_charges = unique(np_flow * (indices[0]._charges[0].charges[left, :]))
diagonal = diag(arr)
sparse_blocks, _, block_shapes = _find_diagonal_sparse_blocks(
arr.flat_charges, arr.flat_flows, 1)
data = np.concatenate([
np.diag(np.reshape(arr.data[sparse_blocks[n]], block_shapes[:, n]))
for n in range(len(sparse_blocks))
])
np.testing.assert_allclose(data, diagonal.data)
np.testing.assert_allclose(unique_charges,
diagonal.flat_charges[0].unique_charges)
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('Ds', [[0, 100], [100, 0]])
def test_get_empty_diag(dtype, num_charges, Ds):
np.random.seed(10)
indices = [
Index(
BaseCharge(
np.random.randint(-2, 3, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), False) for n in range(2)
]
arr = BlockSparseTensor.random(indices, dtype=dtype)
diagonal = diag(arr)
np.testing.assert_allclose([], diagonal.data)
for c in diagonal.flat_charges:
assert len(c) == 0
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('flow', [False, True])
def test_create_diag(dtype, num_charges, flow):
np.random.seed(10)
D = 200
index = Index(
BaseCharge(
np.random.randint(-2, 3, (D, num_charges)),
charge_types=[U1Charge] * num_charges), flow)
arr = ChargeArray.random([index], dtype=dtype)
diagarr = diag(arr)
dense = np.ravel(diagarr.todense())
np.testing.assert_allclose(
np.sort(dense[dense != 0.0]), np.sort(diagarr.data[diagarr.data != 0.0]))
sparse_blocks, charges, block_shapes = _find_diagonal_sparse_blocks(
diagarr.flat_charges, diagarr.flat_flows, 1)
for n, block in enumerate(sparse_blocks):
shape = block_shapes[:, n]
block_diag = np.diag(np.reshape(diagarr.data[block], shape))
np.testing.assert_allclose(
arr.data[np.squeeze((index._charges[0] * flow) == charges[n])],
block_diag)
def test_diag_raises():
np.random.seed(10)
Ds = [8, 9, 10]
rank = len(Ds)
indices = [
Index(
BaseCharge(
np.random.randint(-2, 3, (Ds[n], 1)), charge_types=[U1Charge]),
False) for n in range(rank)
]
arr = BlockSparseTensor.random(indices)
chargearr = ChargeArray.random([indices[0], indices[1]])
with pytest.raises(ValueError):
diag(arr)
with pytest.raises(ValueError):
diag(chargearr)
@pytest.mark.parametrize('dtype', np_dtypes)
def test_tn_reshape(dtype):
np.random.seed(10)
Ds = [8, 9, 10, 11]
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), False)
for n in range(4)
]
arr = BlockSparseTensor.random(indices, dtype=dtype)
arr2 = reshape(arr, [72, 110])
for n in range(2):
for m in range(2):
assert charge_equal(arr2.charges[n][m], indices[n * 2 + m].charges)
np.testing.assert_allclose(arr2.shape, [72, 110])
np.testing.assert_allclose(arr2._order, [[0, 1], [2, 3]])
np.testing.assert_allclose(arr2.flows, [[False, False], [False, False]])
assert arr2.ndim == 2
arr3 = reshape(arr, Ds)
for n in range(4):
assert charge_equal(arr3.charges[n][0], indices[n].charges)
np.testing.assert_allclose(arr3.shape, Ds)
np.testing.assert_allclose(arr3._order, [[0], [1], [2], [3]])
np.testing.assert_allclose(arr3.flows, [[False], [False], [False], [False]])
assert arr3.ndim == 4
def test_tn_transpose():
np.random.seed(10)
Ds = np.array([8, 9, 10, 11])
flows = [True, False, True, False]
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), flows[n])
for n in range(4)
]
arr = BlockSparseTensor.random(indices)
order = [2, 1, 0, 3]
arr2 = transpose(arr, order)
np.testing.assert_allclose(Ds[order], arr2.shape)
np.testing.assert_allclose(arr2._order, [[2], [1], [0], [3]])
np.testing.assert_allclose(arr2.flows, [[True], [False], [True], [False]])
def test_tn_transpose_reshape():
np.random.seed(10)
Ds = np.array([8, 9, 10, 11])
flows = [True, False, True, False]
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), flows[n])
for n in range(4)
]
arr = BlockSparseTensor.random(indices)
arr2 = transpose(arr, [2, 0, 1, 3])
arr3 = reshape(arr2, [80, 99])
np.testing.assert_allclose(arr3.shape, [80, 99])
np.testing.assert_allclose(arr3._order, [[2, 0], [1, 3]])
np.testing.assert_allclose(arr3.flows, [[True, True], [False, False]])
arr4 = transpose(arr3, [1, 0])
np.testing.assert_allclose(arr4.shape, [99, 80])
np.testing.assert_allclose(arr4._order, [[1, 3], [2, 0]])
np.testing.assert_allclose(arr4.flows, [[False, False], [True, True]])
arr5 = reshape(arr4, [9, 11, 10, 8])
np.testing.assert_allclose(arr5.shape, [9, 11, 10, 8])
np.testing.assert_allclose(arr5._order, [[1], [3], [2], [0]])
np.testing.assert_allclose(arr5.flows, [[False], [False], [True], [True]])
@pytest.mark.parametrize('dtype', np_dtypes)
def test_tn_conj(dtype):
np.random.seed(10)
indices = [
Index(U1Charge.random(dimension=10, minval=-5, maxval=5), False)
for _ in range(4)
]
a = BlockSparseTensor.randn(indices, dtype=dtype)
b = conj(a)
np.testing.assert_allclose(b.data, np.conj(a.data))
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("Ds, R1", [([20, 21], 1), ([18, 19, 20], 2),
([18, 19, 20], 1), ([0, 10], 1),
([10, 0], 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_svd_prod(dtype, Ds, R1, num_charges):
np.random.seed(10)
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
d1 = np.prod(Ds[:R1])
d2 = np.prod(Ds[R1:])
A = A.reshape([d1, d2])
U, S, V = svd(A, full_matrices=False)
A_ = U @ diag(S) @ V
assert A_.dtype == A.dtype
np.testing.assert_allclose(A.data, A_.data)
for n in range(len(A._charges)):
assert charge_equal(A_._charges[n], A._charges[n])
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("Ds, R1", [([20, 21], 1), ([18, 19, 20], 2),
([18, 19, 20], 1), ([0, 10], 1),
([10, 0], 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_svd_singvals(dtype, Ds, R1, num_charges):
np.random.seed(10)
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
d1 = np.prod(Ds[:R1])
d2 = np.prod(Ds[R1:])
A = A.reshape([d1, d2])
_, S1, _ = svd(A, full_matrices=False)
S2 = svd(A, full_matrices=False, compute_uv=False)
np.testing.assert_allclose(S1.data, S2.data)
Sdense = np.linalg.svd(A.todense(), compute_uv=False)
np.testing.assert_allclose(
np.sort(Sdense[Sdense > 1E-15]), np.sort(S2.data[S2.data > 0.0]))
def test_svd_raises():
np.random.seed(10)
dtype = np.float64
Ds = [10, 11, 12]
R = len(Ds)
charges = [
BaseCharge(np.random.randint(-5, 6, (Ds[n], 1)), charge_types=[U1Charge])
for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
with pytest.raises(NotImplementedError):
svd(A, full_matrices=False, compute_uv=False)
#A sanity check that does not use symmetries (all charges are 0)
def test_qr_r_mode():
Ds = [10, 11]
dtype = np.float64
np.random.seed(10)
rank = len(Ds)
charges = [
BaseCharge(np.zeros((Ds[n], 1)), charge_types=[U1Charge] * 1)
for n in range(rank)
]
flows = [True] * rank
A = BlockSparseTensor.random(
[Index(charges[n], flows[n]) for n in range(rank)], dtype=dtype)
d1 = np.prod(Ds[:1])
d2 = np.prod(Ds[1:])
A = A.reshape([d1, d2])
R = qr(A, mode='r')
R_np = np.linalg.qr(A.todense(), mode='r')
np.testing.assert_allclose(
np.abs(np.diag(R.todense())), np.abs(np.diag(R_np)))
@pytest.mark.parametrize("mode", ['complete', 'reduced'])
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("Ds, R1", [([20, 21], 1), ([18, 19, 20], 2),
([18, 19, 20], 1), ([10, 0], 1),
([0, 10], 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_qr_prod(dtype, Ds, R1, mode, num_charges):
np.random.seed(10)
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
d1 = np.prod(Ds[:R1])
d2 = np.prod(Ds[R1:])
A = A.reshape([d1, d2])
Q, R = qr(A, mode=mode)
A_ = Q @ R
assert A_.dtype == A.dtype
np.testing.assert_allclose(A.data, A_.data)
for n in range(len(A._charges)):
assert charge_equal(A_._charges[n], A._charges[n])
def test_qr_raises():
np.random.seed(10)
dtype = np.float64
num_charges = 1
Ds = [20, 21, 22]
R1 = 2
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [True] * R
A = BlockSparseTensor.random([Index(charges[n], flows[n]) for n in range(R)],
dtype=dtype)
d1 = np.prod(Ds[:R1])
d2 = np.prod(Ds[R1:])
B = A.reshape([d1, d2])
with pytest.raises(ValueError, match='unknown value'):
qr(B, mode='fake_mode')
with pytest.raises(NotImplementedError, match="mode `raw`"):
qr(B, mode='raw')
with pytest.raises(NotImplementedError, match="qr currently"):
qr(A)
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("Ds", [[20], [9, 10], [6, 7, 8], [0]])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_eigh_prod(dtype, Ds, num_charges):
np.random.seed(10)
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [False] * R
inds = [Index(charges[n], flows[n]) for n in range(R)]
A = BlockSparseTensor.random(
inds + [i.copy().flip_flow() for i in inds], dtype=dtype)
dims = np.prod(Ds)
A = A.reshape([dims, dims])
B = A + A.T.conj()
E, V = eigh(B)
B_ = V @ diag(E) @ V.conj().T
np.testing.assert_allclose(B.contiguous(inplace=True).data, B_.data)
for n in range(len(B._charges)):
assert charge_equal(B_._charges[n], B._charges[n])
def test_eigh_raises():
np.random.seed(10)
num_charges = 1
D = 20
R = 3
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [False] * R
inds = [Index(charges[n], flows[n]) for n in range(R)]
A = BlockSparseTensor.random(inds)
with pytest.raises(NotImplementedError):
eigh(A)
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_inv(dtype, num_charges):
np.random.seed(10)
R = 2
D = 10
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
flows = [True, False]
A = BlockSparseTensor.random([Index(charge, flows[n]) for n in range(R)],
(-0.5, 0.5),
dtype=dtype)
invA = inv(A)
left_eye = invA @ A
blocks, _, shapes = _find_diagonal_sparse_blocks(left_eye.flat_charges,
left_eye.flat_flows, 1)
for n, block in enumerate(blocks):
t = np.reshape(left_eye.data[block], shapes[:, n])
assert np.linalg.norm(t - np.eye(t.shape[0], t.shape[1])) < 1E-12
right_eye = A @ invA
blocks, _, shapes = _find_diagonal_sparse_blocks(right_eye.flat_charges,
right_eye.flat_flows, 1)
for n, block in enumerate(blocks):
t = np.reshape(right_eye.data[block], shapes[:, n])
assert np.linalg.norm(t - np.eye(t.shape[0], t.shape[1])) < 1E-12
def test_inv_raises():
num_charges = 1
np.random.seed(10)
R = 3
D = 10
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
A = BlockSparseTensor.random([Index(charge, False) for n in range(R)],
(-0.5, 0.5))
with pytest.raises(ValueError):
inv(A)
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("Ds", [[20], [9, 10], [6, 7, 8], [0]])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_eig_prod(dtype, Ds, num_charges):
np.random.seed(10)
R = len(Ds)
charges = [
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [False] * R
inds = [Index(charges[n], flows[n]) for n in range(R)]
A = BlockSparseTensor.random(
inds + [i.copy().flip_flow() for i in inds], dtype=dtype)
dims = np.prod(Ds)
A = A.reshape([dims, dims])
E, V = eig(A)
A_ = V @ diag(E) @ inv(V)
np.testing.assert_allclose(A.contiguous(inplace=True).data, A_.data)
def test_eig_raises():
np.random.seed(10)
num_charges = 1
D = 20
R = 3
charges = [
BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = [False] * R
inds = [Index(charges[n], flows[n]) for n in range(R)]
A = BlockSparseTensor.random(inds)
with pytest.raises(NotImplementedError):
eig(A)
#Note the case num_charges=4 is most likely testing empty tensors
@pytest.mark.parametrize("dtype", np_tensordot_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize("Ds", [[20], [9, 10], [6, 7, 8], [9, 8, 0, 10]])
def test_sqrt(dtype, num_charges, Ds):
np.random.seed(10)
R = len(Ds)
flows = np.random.choice([True, False], replace=True, size=R)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(R)
]
arr = BlockSparseTensor.random(indices, dtype=dtype)
sqrtarr = sqrt(arr)
np.testing.assert_allclose(sqrtarr.data, np.sqrt(arr.data))
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('D', [0, 10])
def test_eye(dtype, num_charges, D):
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
flow = False
index = Index(charge, flow)
A = eye(index, dtype=dtype)
blocks, _, shapes = _find_diagonal_sparse_blocks(A.flat_charges, A.flat_flows,
1)
for n, block in enumerate(blocks):
t = np.reshape(A.data[block], shapes[:, n])
np.testing.assert_almost_equal(t, np.eye(t.shape[0], t.shape[1]))
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('D', [0, 100])
def test_trace_matrix(dtype, num_charges, D):
np.random.seed(10)
R = 2
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
flows = [True, False]
matrix = BlockSparseTensor.random([Index(charge, flows[n]) for n in range(R)],
dtype=dtype)
res = trace(matrix)
res_dense = np.trace(matrix.todense())
np.testing.assert_allclose(res.data, res_dense)
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('D1, D2', [(10, 12), (0, 10)])
def test_trace_tensor(dtype, num_charges, D1, D2):
np.random.seed(10)
charge1 = BaseCharge(
np.random.randint(-5, 6, (D1, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
charge2 = BaseCharge(
np.random.randint(-5, 6, (D2, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
indices = [Index(charge1, False), Index(charge2, False), Index(charge1, True)]
tensor = BlockSparseTensor.random(indices, dtype=dtype)
res = trace(tensor, (0, 2))
assert res.sparse_shape[0] == indices[1]
res_dense = np.trace(tensor.todense(), axis1=0, axis2=2)
np.testing.assert_allclose(res.todense(), res_dense)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_trace_raises(num_charges):
np.random.seed(10)
D = 20
charge1 = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
A1 = BlockSparseTensor.random([Index(charge1, False)])
with pytest.raises(ValueError, match="trace can only"):
trace(A1)
charge2 = BaseCharge(
np.random.randint(-5, 6, (D + 1, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
indices = [
Index(charge1, False),
Index(charge2, True),
Index(charge1, False)
]
A2 = BlockSparseTensor.random(indices)
with pytest.raises(ValueError, match="not matching"):
trace(A2, axes=(0, 1))
with pytest.raises(ValueError, match="non-matching flows"):
trace(A2, axes=(0, 2))
with pytest.raises(ValueError, match="has to be 2"):
trace(A2, axes=(0, 1, 2))
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_pinv(dtype, num_charges):
np.random.seed(10)
R = 2
D = 10
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
flows = [True, False]
A = BlockSparseTensor.random([Index(charge, flows[n]) for n in range(R)],
(-0.5, 0.5),
dtype=dtype)
invA = pinv(A)
left_eye = invA @ A
blocks, _, shapes = _find_diagonal_sparse_blocks(left_eye.flat_charges,
left_eye.flat_flows, 1)
for n, block in enumerate(blocks):
t = np.reshape(left_eye.data[block], shapes[:, n])
assert np.linalg.norm(t - np.eye(t.shape[0], t.shape[1])) < 1E-12
right_eye = A @ invA
blocks, _, shapes = _find_diagonal_sparse_blocks(right_eye.flat_charges,
right_eye.flat_flows, 1)
for n, block in enumerate(blocks):
t = np.reshape(right_eye.data[block], shapes[:, n])
assert np.linalg.norm(t - np.eye(t.shape[0], t.shape[1])) < 1E-12
def test_pinv_raises():
num_charges = 1
np.random.seed(10)
R = 3
D = 10
charge = BaseCharge(
np.random.randint(-5, 6, (D, num_charges), dtype=np.int16),
charge_types=[U1Charge] * num_charges)
A = BlockSparseTensor.random([Index(charge, False) for n in range(R)],
(-0.5, 0.5))
with pytest.raises(ValueError):
pinv(A)
def test_abs():
np.random.seed(10)
Ds = np.array([8, 9, 10, 11])
flows = [True, False, True, False]
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), flows[n])
for n in range(4)
]
arr = BlockSparseTensor.random(indices)
np.testing.assert_allclose(linalg.abs(arr).data, np.abs(arr.data))
def test_sign():
np.random.seed(10)
Ds = np.array([8, 9, 10, 11])
flows = [True, False, True, False]
indices = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), flows[n])
for n in range(4)
]
arr = BlockSparseTensor.random(indices)
np.testing.assert_allclose(sign(arr).data, np.sign(arr.data))
|
from __future__ import with_statement
import logging
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import MetaData
from sqlalchemy import pool
from flask import current_app
from alembic import context
USE_TWOPHASE = False
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
bind_names = []
for bind in current_app.config.get("SQLALCHEMY_BINDS"):
context.config.set_section_option(
bind, "sqlalchemy.url",
str(current_app.extensions['migrate'].db.get_engine(
current_app, bind).url).replace('%', '%%'))
bind_names.append(bind)
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata(bind):
"""Return the metadata for a bind."""
if bind == '':
bind = None
m = MetaData()
for t in target_metadata.tables.values():
if t.info.get('bind_key') == bind:
t.tometadata(m)
return m
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
# for the --sql use case, run migrations for each URL into
# individual files.
engines = {
'': {
'url': context.config.get_main_option('sqlalchemy.url')
}
}
for name in bind_names:
engines[name] = rec = {}
rec['url'] = context.config.get_section_option(name, "sqlalchemy.url")
for name, rec in engines.items():
logger.info("Migrating database %s" % (name or '<default>'))
file_ = "%s.sql" % name
logger.info("Writing output to %s" % file_)
with open(file_, 'w') as buffer:
context.configure(
url=rec['url'],
output_buffer=buffer,
target_metadata=get_metadata(name),
literal_binds=True,
)
with context.begin_transaction():
context.run_migrations(engine_name=name)
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if len(script.upgrade_ops_list) >= len(bind_names) + 1:
empty = True
for upgrade_ops in script.upgrade_ops_list:
if not upgrade_ops.is_empty():
empty = False
if empty:
directives[:] = []
logger.info('No changes in schema detected.')
# for the direct-to-DB use case, start a transaction on all
# engines, then run all migrations, then commit all transactions.
engines = {
'': {
'engine': engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
}
}
for name in bind_names:
engines[name] = rec = {}
rec['engine'] = engine_from_config(
context.config.get_section(name),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
for name, rec in engines.items():
engine = rec['engine']
rec['connection'] = conn = engine.connect()
if USE_TWOPHASE:
rec['transaction'] = conn.begin_twophase()
else:
rec['transaction'] = conn.begin()
try:
for name, rec in engines.items():
logger.info("Migrating database %s" % (name or '<default>'))
context.configure(
connection=rec['connection'],
upgrade_token="%s_upgrades" % name,
downgrade_token="%s_downgrades" % name,
target_metadata=get_metadata(name),
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
context.run_migrations(engine_name=name)
if USE_TWOPHASE:
for rec in engines.values():
rec['transaction'].prepare()
for rec in engines.values():
rec['transaction'].commit()
except: # noqa: E722
for rec in engines.values():
rec['transaction'].rollback()
raise
finally:
for rec in engines.values():
rec['connection'].close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
import asyncio
import os
import shutil
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
DOMAIN as DOMAIN_MP,
SERVICE_PLAY_MEDIA,
)
import homeassistant.components.tts as tts
from homeassistant.config import async_process_ha_core_config
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import assert_setup_component, get_test_home_assistant, mock_service
from tests.components.tts.test_init import mutagen_mock # noqa: F401
class TestTTSGooglePlatform:
"""Test the Google speech component."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
asyncio.run_coroutine_threadsafe(
async_process_ha_core_config(
self.hass, {"internal_url": "http://example.local:8123"}
),
self.hass.loop,
)
self.url = "https://translate.google.com/translate_tts"
self.url_param = {
"tl": "en",
"q": "90%25%20of%20I%20person%20is%20on%20front%20of%20your%20door.",
"tk": 5,
"client": "tw-ob",
"textlen": 41,
"total": 1,
"idx": 0,
"ie": "UTF-8",
}
def teardown_method(self):
"""Stop everything that was started."""
default_tts = self.hass.config.path(tts.DEFAULT_CACHE_DIR)
if os.path.isdir(default_tts):
shutil.rmtree(default_tts)
self.hass.stop()
def test_setup_component(self):
"""Test setup component."""
config = {tts.DOMAIN: {"platform": "google_translate"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say(self, mock_calculate, aioclient_mock):
"""Test service call say."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
aioclient_mock.get(self.url, params=self.url_param, status=200, content=b"test")
config = {tts.DOMAIN: {"platform": "google_translate"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_translate_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "90% of I person is on front of your door.",
},
)
self.hass.block_till_done()
assert len(calls) == 1
assert len(aioclient_mock.mock_calls) == 1
assert calls[0].data[ATTR_MEDIA_CONTENT_ID].find(".mp3") != -1
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say_german_config(self, mock_calculate, aioclient_mock):
"""Test service call say with german code in the config."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
self.url_param["tl"] = "de"
aioclient_mock.get(self.url, params=self.url_param, status=200, content=b"test")
config = {tts.DOMAIN: {"platform": "google_translate", "language": "de"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_translate_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "90% of I person is on front of your door.",
},
)
self.hass.block_till_done()
assert len(calls) == 1
assert len(aioclient_mock.mock_calls) == 1
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say_german_service(self, mock_calculate, aioclient_mock):
"""Test service call say with german code in the service."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
self.url_param["tl"] = "de"
aioclient_mock.get(self.url, params=self.url_param, status=200, content=b"test")
config = {
tts.DOMAIN: {"platform": "google_translate", "service_name": "google_say"}
}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "90% of I person is on front of your door.",
tts.ATTR_LANGUAGE: "de",
},
)
self.hass.block_till_done()
assert len(calls) == 1
assert len(aioclient_mock.mock_calls) == 1
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say_error(self, mock_calculate, aioclient_mock):
"""Test service call say with http response 400."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
aioclient_mock.get(self.url, params=self.url_param, status=400, content=b"test")
config = {tts.DOMAIN: {"platform": "google_translate"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_translate_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "90% of I person is on front of your door.",
},
)
self.hass.block_till_done()
assert len(calls) == 0
assert len(aioclient_mock.mock_calls) == 1
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say_timeout(self, mock_calculate, aioclient_mock):
"""Test service call say with http timeout."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
aioclient_mock.get(self.url, params=self.url_param, exc=asyncio.TimeoutError())
config = {tts.DOMAIN: {"platform": "google_translate"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_translate_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "90% of I person is on front of your door.",
},
)
self.hass.block_till_done()
assert len(calls) == 0
assert len(aioclient_mock.mock_calls) == 1
@patch("gtts_token.gtts_token.Token.calculate_token", autospec=True, return_value=5)
def test_service_say_long_size(self, mock_calculate, aioclient_mock):
"""Test service call say with a lot of text."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
self.url_param["total"] = 9
self.url_param["q"] = "I%20person%20is%20on%20front%20of%20your%20door"
self.url_param["textlen"] = 33
for idx in range(9):
self.url_param["idx"] = idx
aioclient_mock.get(
self.url, params=self.url_param, status=200, content=b"test"
)
config = {
tts.DOMAIN: {"platform": "google_translate", "service_name": "google_say"}
}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
self.hass.services.call(
tts.DOMAIN,
"google_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: (
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
"I person is on front of your door."
),
},
)
self.hass.block_till_done()
assert len(calls) == 1
assert len(aioclient_mock.mock_calls) == 9
assert calls[0].data[ATTR_MEDIA_CONTENT_ID].find(".mp3") != -1
|
import os
from unittest import mock
from django.test import TestCase
from django.utils.encoding import force_str
from tablib.core import UnsupportedFormat
from import_export.formats import base_formats
class FormatTest(TestCase):
@mock.patch('import_export.formats.base_formats.HTML.get_format', side_effect=ImportError)
def test_format_non_available1(self, mocked):
self.assertFalse(base_formats.HTML.is_available())
@mock.patch('import_export.formats.base_formats.HTML.get_format', side_effect=UnsupportedFormat)
def test_format_non_available2(self, mocked):
self.assertFalse(base_formats.HTML.is_available())
def test_format_available(self):
self.assertTrue(base_formats.CSV.is_available())
class XLSTest(TestCase):
def test_binary_format(self):
self.assertTrue(base_formats.XLS().is_binary())
class XLSXTest(TestCase):
def setUp(self):
self.format = base_formats.XLSX()
def test_binary_format(self):
self.assertTrue(self.format.is_binary())
def test_import(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books.xlsx')
with open(filename, self.format.get_read_mode()) as in_stream:
self.format.create_dataset(in_stream.read())
class CSVTest(TestCase):
def setUp(self):
self.format = base_formats.CSV()
def test_import_dos(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-dos.csv')
with open(filename, self.format.get_read_mode()) as in_stream:
actual = in_stream.read()
expected = 'id,name,author_email\n1,Some book,[email protected]\n'
self.assertEqual(actual, expected)
def test_import_mac(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-mac.csv')
with open(filename, self.format.get_read_mode()) as in_stream:
actual = in_stream.read()
expected = 'id,name,author_email\n1,Some book,[email protected]\n'
self.assertEqual(actual, expected)
def test_import_unicode(self):
# importing csv UnicodeEncodeError 347
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.csv')
with open(filename, self.format.get_read_mode()) as in_stream:
data = force_str(in_stream.read())
base_formats.CSV().create_dataset(data)
class TSVTest(TestCase):
def setUp(self):
self.format = base_formats.TSV()
def test_import_mac(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-mac.tsv')
with open(filename, self.format.get_read_mode()) as in_stream:
actual = in_stream.read()
expected = 'id\tname\tauthor_email\n1\tSome book\[email protected]\n'
self.assertEqual(actual, expected)
def test_import_unicode(self):
# importing tsv UnicodeEncodeError
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.tsv')
with open(filename, self.format.get_read_mode()) as in_stream:
data = force_str(in_stream.read())
base_formats.TSV().create_dataset(data)
|
import asyncio
from datetime import timedelta
import logging
from typing import Optional
import voluptuous as vol
from yeelight import Bulb, BulbException, discover_bulbs
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_DEVICES,
CONF_HOST,
CONF_ID,
CONF_NAME,
CONF_SCAN_INTERVAL,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
_LOGGER = logging.getLogger(__name__)
DOMAIN = "yeelight"
DATA_YEELIGHT = DOMAIN
DATA_UPDATED = "yeelight_{}_data_updated"
DEVICE_INITIALIZED = f"{DOMAIN}_device_initialized"
DEFAULT_NAME = "Yeelight"
DEFAULT_TRANSITION = 350
DEFAULT_MODE_MUSIC = False
DEFAULT_SAVE_ON_CHANGE = False
DEFAULT_NIGHTLIGHT_SWITCH = False
CONF_MODEL = "model"
CONF_TRANSITION = "transition"
CONF_SAVE_ON_CHANGE = "save_on_change"
CONF_MODE_MUSIC = "use_music_mode"
CONF_FLOW_PARAMS = "flow_params"
CONF_CUSTOM_EFFECTS = "custom_effects"
CONF_NIGHTLIGHT_SWITCH_TYPE = "nightlight_switch_type"
CONF_NIGHTLIGHT_SWITCH = "nightlight_switch"
CONF_DEVICE = "device"
DATA_CONFIG_ENTRIES = "config_entries"
DATA_CUSTOM_EFFECTS = "custom_effects"
DATA_SCAN_INTERVAL = "scan_interval"
DATA_DEVICE = "device"
DATA_UNSUB_UPDATE_LISTENER = "unsub_update_listener"
ATTR_COUNT = "count"
ATTR_ACTION = "action"
ATTR_TRANSITIONS = "transitions"
ACTION_RECOVER = "recover"
ACTION_STAY = "stay"
ACTION_OFF = "off"
ACTIVE_MODE_NIGHTLIGHT = "1"
ACTIVE_COLOR_FLOWING = "1"
NIGHTLIGHT_SWITCH_TYPE_LIGHT = "light"
SCAN_INTERVAL = timedelta(seconds=30)
DISCOVERY_INTERVAL = timedelta(seconds=60)
YEELIGHT_RGB_TRANSITION = "RGBTransition"
YEELIGHT_HSV_TRANSACTION = "HSVTransition"
YEELIGHT_TEMPERATURE_TRANSACTION = "TemperatureTransition"
YEELIGHT_SLEEP_TRANSACTION = "SleepTransition"
YEELIGHT_FLOW_TRANSITION_SCHEMA = {
vol.Optional(ATTR_COUNT, default=0): cv.positive_int,
vol.Optional(ATTR_ACTION, default=ACTION_RECOVER): vol.Any(
ACTION_RECOVER, ACTION_OFF, ACTION_STAY
),
vol.Required(ATTR_TRANSITIONS): [
{
vol.Exclusive(YEELIGHT_RGB_TRANSITION, CONF_TRANSITION): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Exclusive(YEELIGHT_HSV_TRANSACTION, CONF_TRANSITION): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Exclusive(YEELIGHT_TEMPERATURE_TRANSACTION, CONF_TRANSITION): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Exclusive(YEELIGHT_SLEEP_TRANSACTION, CONF_TRANSITION): vol.All(
cv.ensure_list, [cv.positive_int]
),
}
],
}
DEVICE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TRANSITION, default=DEFAULT_TRANSITION): cv.positive_int,
vol.Optional(CONF_MODE_MUSIC, default=False): cv.boolean,
vol.Optional(CONF_SAVE_ON_CHANGE, default=False): cv.boolean,
vol.Optional(CONF_NIGHTLIGHT_SWITCH_TYPE): vol.Any(
NIGHTLIGHT_SWITCH_TYPE_LIGHT
),
vol.Optional(CONF_MODEL): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA},
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
vol.Optional(CONF_CUSTOM_EFFECTS): [
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_FLOW_PARAMS): YEELIGHT_FLOW_TRANSITION_SCHEMA,
}
],
}
)
},
extra=vol.ALLOW_EXTRA,
)
UPDATE_REQUEST_PROPERTIES = [
"power",
"main_power",
"bright",
"ct",
"rgb",
"hue",
"sat",
"color_mode",
"flowing",
"bg_power",
"bg_lmode",
"bg_flowing",
"bg_ct",
"bg_bright",
"bg_hue",
"bg_sat",
"bg_rgb",
"nl_br",
"active_mode",
]
PLATFORMS = ["binary_sensor", "light"]
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the Yeelight bulbs."""
conf = config.get(DOMAIN, {})
hass.data[DOMAIN] = {
DATA_CUSTOM_EFFECTS: conf.get(CONF_CUSTOM_EFFECTS, {}),
DATA_CONFIG_ENTRIES: {},
DATA_SCAN_INTERVAL: conf.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL),
}
# Import manually configured devices
for host, device_config in config.get(DOMAIN, {}).get(CONF_DEVICES, {}).items():
_LOGGER.debug("Importing configured %s", host)
entry_config = {
CONF_HOST: host,
**device_config,
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=entry_config,
),
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Yeelight from a config entry."""
async def _initialize(host: str, capabilities: Optional[dict] = None) -> None:
device = await _async_setup_device(hass, host, entry, capabilities)
hass.data[DOMAIN][DATA_CONFIG_ENTRIES][entry.entry_id][DATA_DEVICE] = device
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
# Move options from data for imported entries
# Initialize options with default values for other entries
if not entry.options:
hass.config_entries.async_update_entry(
entry,
data={
CONF_HOST: entry.data.get(CONF_HOST),
CONF_ID: entry.data.get(CONF_ID),
},
options={
CONF_NAME: entry.data.get(CONF_NAME, ""),
CONF_MODEL: entry.data.get(CONF_MODEL, ""),
CONF_TRANSITION: entry.data.get(CONF_TRANSITION, DEFAULT_TRANSITION),
CONF_MODE_MUSIC: entry.data.get(CONF_MODE_MUSIC, DEFAULT_MODE_MUSIC),
CONF_SAVE_ON_CHANGE: entry.data.get(
CONF_SAVE_ON_CHANGE, DEFAULT_SAVE_ON_CHANGE
),
CONF_NIGHTLIGHT_SWITCH: entry.data.get(
CONF_NIGHTLIGHT_SWITCH, DEFAULT_NIGHTLIGHT_SWITCH
),
},
)
hass.data[DOMAIN][DATA_CONFIG_ENTRIES][entry.entry_id] = {
DATA_UNSUB_UPDATE_LISTENER: entry.add_update_listener(_async_update_listener)
}
if entry.data.get(CONF_HOST):
# manually added device
await _initialize(entry.data[CONF_HOST])
else:
# discovery
scanner = YeelightScanner.async_get(hass)
scanner.async_register_callback(entry.data[CONF_ID], _initialize)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
data = hass.data[DOMAIN][DATA_CONFIG_ENTRIES].pop(entry.entry_id)
data[DATA_UNSUB_UPDATE_LISTENER]()
data[DATA_DEVICE].async_unload()
if entry.data[CONF_ID]:
# discovery
scanner = YeelightScanner.async_get(hass)
scanner.async_unregister_callback(entry.data[CONF_ID])
return unload_ok
async def _async_setup_device(
hass: HomeAssistant,
host: str,
entry: ConfigEntry,
capabilities: Optional[dict],
) -> None:
# Get model from config and capabilities
model = entry.options.get(CONF_MODEL)
if not model and capabilities is not None:
model = capabilities.get("model")
# Set up device
bulb = Bulb(host, model=model or None)
if capabilities is None:
capabilities = await hass.async_add_executor_job(bulb.get_capabilities)
device = YeelightDevice(hass, host, entry.options, bulb, capabilities)
await hass.async_add_executor_job(device.update)
await device.async_setup()
return device
@callback
def _async_unique_name(capabilities: dict) -> str:
"""Generate name from capabilities."""
model = capabilities["model"]
unique_id = capabilities["id"]
return f"yeelight_{model}_{unique_id}"
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
class YeelightScanner:
"""Scan for Yeelight devices."""
_scanner = None
@classmethod
@callback
def async_get(cls, hass: HomeAssistant):
"""Get scanner instance."""
if cls._scanner is None:
cls._scanner = cls(hass)
return cls._scanner
def __init__(self, hass: HomeAssistant):
"""Initialize class."""
self._hass = hass
self._seen = {}
self._callbacks = {}
self._scan_task = None
async def _async_scan(self):
_LOGGER.debug("Yeelight scanning")
# Run 3 times as packets can get lost
for _ in range(3):
devices = await self._hass.async_add_executor_job(discover_bulbs)
for device in devices:
unique_id = device["capabilities"]["id"]
if unique_id in self._seen:
continue
host = device["ip"]
self._seen[unique_id] = host
_LOGGER.debug("Yeelight discovered at %s", host)
if unique_id in self._callbacks:
self._hass.async_create_task(self._callbacks[unique_id](host))
self._callbacks.pop(unique_id)
if len(self._callbacks) == 0:
self._async_stop_scan()
await asyncio.sleep(SCAN_INTERVAL.seconds)
self._scan_task = self._hass.loop.create_task(self._async_scan())
@callback
def _async_start_scan(self):
"""Start scanning for Yeelight devices."""
_LOGGER.debug("Start scanning")
# Use loop directly to avoid home assistant track this task
self._scan_task = self._hass.loop.create_task(self._async_scan())
@callback
def _async_stop_scan(self):
"""Stop scanning."""
_LOGGER.debug("Stop scanning")
if self._scan_task is not None:
self._scan_task.cancel()
self._scan_task = None
@callback
def async_register_callback(self, unique_id, callback_func):
"""Register callback function."""
host = self._seen.get(unique_id)
if host is not None:
self._hass.async_create_task(callback_func(host))
else:
self._callbacks[unique_id] = callback_func
if len(self._callbacks) == 1:
self._async_start_scan()
@callback
def async_unregister_callback(self, unique_id):
"""Unregister callback function."""
if unique_id not in self._callbacks:
return
self._callbacks.pop(unique_id)
if len(self._callbacks) == 0:
self._async_stop_scan()
class YeelightDevice:
"""Represents single Yeelight device."""
def __init__(self, hass, host, config, bulb, capabilities):
"""Initialize device."""
self._hass = hass
self._config = config
self._host = host
self._bulb_device = bulb
self._capabilities = capabilities or {}
self._device_type = None
self._available = False
self._remove_time_tracker = None
self._name = host # Default name is host
if capabilities:
# Generate name from model and id when capabilities is available
self._name = _async_unique_name(capabilities)
if config.get(CONF_NAME):
# Override default name when name is set in config
self._name = config[CONF_NAME]
@property
def bulb(self):
"""Return bulb device."""
return self._bulb_device
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def config(self):
"""Return device config."""
return self._config
@property
def host(self):
"""Return hostname."""
return self._host
@property
def available(self):
"""Return true is device is available."""
return self._available
@property
def model(self):
"""Return configured/autodetected device model."""
return self._bulb_device.model
@property
def fw_version(self):
"""Return the firmware version."""
return self._capabilities.get("fw_ver")
@property
def is_nightlight_supported(self) -> bool:
"""
Return true / false if nightlight is supported.
Uses brightness as it appears to be supported in both ceiling and other lights.
"""
return self._nightlight_brightness is not None
@property
def is_nightlight_enabled(self) -> bool:
"""Return true / false if nightlight is currently enabled."""
if self.bulb is None:
return False
# Only ceiling lights have active_mode, from SDK docs:
# active_mode 0: daylight mode / 1: moonlight mode (ceiling light only)
if self._active_mode is not None:
return self._active_mode == ACTIVE_MODE_NIGHTLIGHT
if self._nightlight_brightness is not None:
return int(self._nightlight_brightness) > 0
return False
@property
def is_color_flow_enabled(self) -> bool:
"""Return true / false if color flow is currently running."""
return self._color_flow == ACTIVE_COLOR_FLOWING
@property
def _active_mode(self):
return self.bulb.last_properties.get("active_mode")
@property
def _color_flow(self):
return self.bulb.last_properties.get("flowing")
@property
def _nightlight_brightness(self):
return self.bulb.last_properties.get("nl_br")
@property
def type(self):
"""Return bulb type."""
if not self._device_type:
self._device_type = self.bulb.bulb_type
return self._device_type
def turn_on(self, duration=DEFAULT_TRANSITION, light_type=None, power_mode=None):
"""Turn on device."""
try:
self.bulb.turn_on(
duration=duration, light_type=light_type, power_mode=power_mode
)
except BulbException as ex:
_LOGGER.error("Unable to turn the bulb on: %s", ex)
def turn_off(self, duration=DEFAULT_TRANSITION, light_type=None):
"""Turn off device."""
try:
self.bulb.turn_off(duration=duration, light_type=light_type)
except BulbException as ex:
_LOGGER.error(
"Unable to turn the bulb off: %s, %s: %s", self._host, self.name, ex
)
def _update_properties(self):
"""Read new properties from the device."""
if not self.bulb:
return
try:
self.bulb.get_properties(UPDATE_REQUEST_PROPERTIES)
self._available = True
except BulbException as ex:
if self._available: # just inform once
_LOGGER.error(
"Unable to update device %s, %s: %s", self._host, self.name, ex
)
self._available = False
return self._available
def _get_capabilities(self):
"""Request device capabilities."""
try:
self.bulb.get_capabilities()
_LOGGER.debug(
"Device %s, %s capabilities: %s",
self._host,
self.name,
self.bulb.capabilities,
)
except BulbException as ex:
_LOGGER.error(
"Unable to get device capabilities %s, %s: %s",
self._host,
self.name,
ex,
)
def update(self):
"""Update device properties and send data updated signal."""
self._update_properties()
dispatcher_send(self._hass, DATA_UPDATED.format(self._host))
async def async_setup(self):
"""Set up the device."""
async def _async_update(_):
await self._hass.async_add_executor_job(self.update)
await _async_update(None)
self._remove_time_tracker = async_track_time_interval(
self._hass, _async_update, self._hass.data[DOMAIN][DATA_SCAN_INTERVAL]
)
@callback
def async_unload(self):
"""Unload the device."""
self._remove_time_tracker()
class YeelightEntity(Entity):
"""Represents single Yeelight entity."""
def __init__(self, device: YeelightDevice, entry: ConfigEntry):
"""Initialize the entity."""
self._device = device
self._unique_id = entry.entry_id
if entry.unique_id is not None:
# Use entry unique id (device id) whenever possible
self._unique_id = entry.unique_id
@property
def unique_id(self) -> str:
"""Return the unique ID."""
return self._unique_id
@property
def device_info(self) -> dict:
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self._unique_id)},
"name": self._device.name,
"manufacturer": "Yeelight",
"model": self._device.model,
"sw_version": self._device.fw_version,
}
@property
def available(self) -> bool:
"""Return if bulb is available."""
return self._device.available
@property
def should_poll(self) -> bool:
"""No polling needed."""
return False
def update(self) -> None:
"""Update the entity."""
self._device.update()
|
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_CLASS,
CONF_ENTITY_ID,
CONF_NAME,
STATE_UNKNOWN,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_state_change_event
_LOGGER = logging.getLogger(__name__)
ATTR_HYSTERESIS = "hysteresis"
ATTR_LOWER = "lower"
ATTR_POSITION = "position"
ATTR_SENSOR_VALUE = "sensor_value"
ATTR_TYPE = "type"
ATTR_UPPER = "upper"
CONF_HYSTERESIS = "hysteresis"
CONF_LOWER = "lower"
CONF_UPPER = "upper"
DEFAULT_NAME = "Threshold"
DEFAULT_HYSTERESIS = 0.0
POSITION_ABOVE = "above"
POSITION_BELOW = "below"
POSITION_IN_RANGE = "in_range"
POSITION_UNKNOWN = "unknown"
TYPE_LOWER = "lower"
TYPE_RANGE = "range"
TYPE_UPPER = "upper"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce(float),
vol.Optional(CONF_LOWER): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UPPER): vol.Coerce(float),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Threshold sensor."""
entity_id = config.get(CONF_ENTITY_ID)
name = config.get(CONF_NAME)
lower = config.get(CONF_LOWER)
upper = config.get(CONF_UPPER)
hysteresis = config.get(CONF_HYSTERESIS)
device_class = config.get(CONF_DEVICE_CLASS)
async_add_entities(
[
ThresholdSensor(
hass, entity_id, name, lower, upper, hysteresis, device_class
)
],
True,
)
class ThresholdSensor(BinarySensorEntity):
"""Representation of a Threshold sensor."""
def __init__(self, hass, entity_id, name, lower, upper, hysteresis, device_class):
"""Initialize the Threshold sensor."""
self._hass = hass
self._entity_id = entity_id
self._name = name
self._threshold_lower = lower
self._threshold_upper = upper
self._hysteresis = hysteresis
self._device_class = device_class
self._state_position = None
self._state = False
self.sensor_value = None
@callback
def async_threshold_sensor_state_listener(event):
"""Handle sensor state changes."""
new_state = event.data.get("new_state")
if new_state is None:
return
try:
self.sensor_value = (
None if new_state.state == STATE_UNKNOWN else float(new_state.state)
)
except (ValueError, TypeError):
self.sensor_value = None
_LOGGER.warning("State is not numerical")
hass.async_add_job(self.async_update_ha_state, True)
async_track_state_change_event(
hass, [entity_id], async_threshold_sensor_state_listener
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_class(self):
"""Return the sensor class of the sensor."""
return self._device_class
@property
def threshold_type(self):
"""Return the type of threshold this sensor represents."""
if self._threshold_lower is not None and self._threshold_upper is not None:
return TYPE_RANGE
if self._threshold_lower is not None:
return TYPE_LOWER
if self._threshold_upper is not None:
return TYPE_UPPER
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {
ATTR_ENTITY_ID: self._entity_id,
ATTR_HYSTERESIS: self._hysteresis,
ATTR_LOWER: self._threshold_lower,
ATTR_POSITION: self._state_position,
ATTR_SENSOR_VALUE: self.sensor_value,
ATTR_TYPE: self.threshold_type,
ATTR_UPPER: self._threshold_upper,
}
async def async_update(self):
"""Get the latest data and updates the states."""
def below(threshold):
"""Determine if the sensor value is below a threshold."""
return self.sensor_value < (threshold - self._hysteresis)
def above(threshold):
"""Determine if the sensor value is above a threshold."""
return self.sensor_value > (threshold + self._hysteresis)
if self.sensor_value is None:
self._state_position = POSITION_UNKNOWN
self._state = False
elif self.threshold_type == TYPE_LOWER:
if below(self._threshold_lower):
self._state_position = POSITION_BELOW
self._state = True
elif above(self._threshold_lower):
self._state_position = POSITION_ABOVE
self._state = False
elif self.threshold_type == TYPE_UPPER:
if above(self._threshold_upper):
self._state_position = POSITION_ABOVE
self._state = True
elif below(self._threshold_upper):
self._state_position = POSITION_BELOW
self._state = False
elif self.threshold_type == TYPE_RANGE:
if below(self._threshold_lower):
self._state_position = POSITION_BELOW
self._state = False
if above(self._threshold_upper):
self._state_position = POSITION_ABOVE
self._state = False
elif above(self._threshold_lower) and below(self._threshold_upper):
self._state_position = POSITION_IN_RANGE
self._state = True
|
import unittest
import itertools
import numpy
import dedupe
DATA = {100: {"name": "Bob", "age": "50"},
105: {"name": "Charlie", "age": "75"},
110: {"name": "Meredith", "age": "40"},
115: {"name": "Sue", "age": "10"},
120: {"name": "Jimmy", "age": "20"},
125: {"name": "Jimbo", "age": "21"},
130: {"name": "Willy", "age": "35"},
135: {"name": "William", "age": "35"},
140: {"name": "Martha", "age": "19"},
145: {"name": "Kyle", "age": "27"}
}
DATA_SAMPLE = (({'age': '27', 'name': 'Kyle'},
{'age': '50', 'name': 'Bob'}),
({'age': '27', 'name': 'Kyle'},
{'age': '35', 'name': 'William'}),
({'age': '10', 'name': 'Sue'},
{'age': '35', 'name': 'William'}),
({'age': '27', 'name': 'Kyle'},
{'age': '20', 'name': 'Jimmy'}),
({'age': '75', 'name': 'Charlie'},
{'age': '21', 'name': 'Jimbo'}))
class DataModelTest(unittest.TestCase):
def test_data_model(self):
DataModel = dedupe.datamodel.DataModel
self.assertRaises(TypeError, DataModel)
data_model = DataModel([{'field': 'a',
'variable name': 'a',
'type': 'String'},
{'field': 'b',
'variable name': 'b',
'type': 'String'},
{'type': 'Interaction',
'interaction variables': ['a', 'b']}])
assert data_model._interaction_indices == [[0, 1]]
data_model = DataModel([{'field': 'a',
'variable name': 'a',
'type': 'String',
'has missing': True},
{'field': 'b',
'variable name': 'b',
'type': 'String'},
{'type': 'Interaction',
'interaction variables': ['a', 'b']}])
assert data_model._missing_field_indices == [0, 2]
data_model = DataModel([{'field': 'a',
'variable name': 'a',
'type': 'String',
'has missing': False},
{'field': 'b',
'variable name': 'b',
'type': 'String'},
{'type': 'Interaction',
'interaction variables': ['a', 'b']}])
assert data_model._missing_field_indices == []
class ConnectedComponentsTest(unittest.TestCase):
def test_components(self):
G = numpy.array([((1, 2), .1),
((2, 3), .2),
((4, 5), .2),
((4, 6), .2),
((7, 9), .2),
((8, 9), .2),
((10, 11), .2),
((12, 13), .2),
((12, 14), .5),
((11, 12), .2)],
dtype=[('pairs', 'i4', 2), ('score', 'f4')])
components = dedupe.clustering.connected_components
G_components = {frozenset(tuple(edge) for edge, _ in component)
for component in components(G, 30000)}
assert G_components == {frozenset(((1, 2), (2, 3))),
frozenset(((4, 5), (4, 6))),
frozenset(
((12, 13), (12, 14), (10, 11), (11, 12))),
frozenset(((7, 9), (8, 9)))}
class ClusteringTest(unittest.TestCase):
def setUp(self):
# Fully connected star network
self.dupes = numpy.array([((1, 2), .86),
((1, 3), .72),
((1, 4), .2),
((1, 5), .6),
((2, 3), .86),
((2, 4), .2),
((2, 5), .72),
((3, 4), .3),
((3, 5), .5),
((4, 5), .72),
((10, 11), .9)],
dtype=[('pairs', 'i4', 2),
('score', 'f4')])
# Dupes with Ids as String
self.str_dupes = numpy.array([(('1', '2'), .86),
(('1', '3'), .72),
(('1', '4'), .2),
(('1', '5'), .6),
(('2', '3'), .86),
(('2', '4'), .2),
(('2', '5'), .72),
(('3', '4'), .3),
(('3', '5'), .5),
(('4', '5'), .72)],
dtype=[('pairs', 'S4', 2),
('score', 'f4')])
self.bipartite_dupes = (((1, 5), .1),
((1, 6), .72),
((1, 7), .2),
((1, 8), .6),
((2, 5), .2),
((2, 6), .2),
((2, 7), .72),
((2, 8), .3),
((3, 5), .24),
((3, 6), .72),
((3, 7), .24),
((3, 8), .65),
((4, 5), .63),
((4, 6), .96),
((4, 7), .23),
((5, 8), .24))
def clusterEquals(self, x, y):
if [] == x == y:
return True
if len(x) != len(y):
return False
for cluster_a, cluster_b in zip(x, y):
if cluster_a[0] != cluster_b[0]:
return False
for score_a, score_b in zip(cluster_a[1], cluster_b[1]):
if abs(score_a - score_b) > 0.001:
return False
else:
return True
def test_hierarchical(self):
hierarchical = dedupe.clustering.cluster
assert self.clusterEquals(list(hierarchical(self.dupes, 1)),
[])
assert self.clusterEquals(list(hierarchical(self.dupes, 0.5)),
[((1, 2, 3),
(0.778,
0.860,
0.778)),
((4, 5),
(0.720,
0.720)),
((10, 11),
(0.899,
0.899))])
print(hierarchical(self.dupes, 0.0))
assert self.clusterEquals(list(hierarchical(self.dupes, 0)),
[((1, 2, 3, 4, 5),
(0.526,
0.564,
0.542,
0.320,
0.623)),
((10, 11),
(0.899,
0.899))])
assert list(hierarchical(self.str_dupes, 1)) == []
assert list(zip(*hierarchical(self.str_dupes, 0.5)))[0] == ((b'1', b'2', b'3'),
(b'4', b'5'))
assert list(zip(*hierarchical(self.str_dupes, 0)))[0] == ((b'1', b'2', b'3', b'4', b'5'),)
def test_greedy_matching(self):
greedyMatch = dedupe.clustering.greedyMatching
bipartite_dupes = numpy.array(list(self.bipartite_dupes),
dtype=[('ids', int, 2),
('score', float)])
assert list(greedyMatch(bipartite_dupes)) == [((4, 6), 0.96),
((2, 7), 0.72),
((3, 8), 0.65),
((1, 5), 0.1)]
def test_gazette_matching(self):
gazetteMatch = dedupe.clustering.gazetteMatching
blocked_dupes = itertools.groupby(self.bipartite_dupes,
key=lambda x: x[0][0])
def to_numpy(x):
return numpy.array(x, dtype=[('ids', int, 2),
('score', float)])
blocked_dupes = [to_numpy(list(block)) for _, block in blocked_dupes]
target = [(((1, 6), 0.72), ((1, 8), 0.6)),
(((2, 7), 0.72), ((2, 8), 0.3)),
(((3, 6), 0.72), ((3, 8), 0.65)),
(((4, 6), 0.96), ((4, 5), 0.63)),
(((5, 8), 0.24),)]
assert [tuple((tuple(pair), score) for pair, score in each.tolist())
for each in gazetteMatch(blocked_dupes, n_matches=2)] == target
class PredicatesTest(unittest.TestCase):
def test_predicates_correctness(self):
field = '123 16th st'
assert dedupe.predicates.sortedAcronym(field) == ('11s',)
assert dedupe.predicates.wholeFieldPredicate(field) == ('123 16th st',)
assert dedupe.predicates.firstTokenPredicate(field) == ('123',)
assert dedupe.predicates.firstTokenPredicate('') == ()
assert dedupe.predicates.firstTokenPredicate('123/') == ('123',)
assert dedupe.predicates.tokenFieldPredicate(' ') == set([])
assert dedupe.predicates.tokenFieldPredicate(
field) == set(['123', '16th', 'st'])
assert dedupe.predicates.commonIntegerPredicate(
field) == set(['123', '16'])
assert dedupe.predicates.commonIntegerPredicate('foo') == set([])
assert dedupe.predicates.firstIntegerPredicate('foo') == ()
assert dedupe.predicates.firstIntegerPredicate('1foo') == ('1',)
assert dedupe.predicates.firstIntegerPredicate('f1oo') == ()
assert dedupe.predicates.sameThreeCharStartPredicate(field) == ('123',)
assert dedupe.predicates.sameThreeCharStartPredicate('12') == ('12', )
assert dedupe.predicates.commonFourGram('12') == set([])
assert dedupe.predicates.sameFiveCharStartPredicate(
field) == ('12316',)
assert dedupe.predicates.sameSevenCharStartPredicate(
field) == ('12316th',)
assert dedupe.predicates.nearIntegersPredicate(
field) == set(['15', '17', '16', '122', '123', '124'])
assert dedupe.predicates.commonFourGram(field) == set(
['1231', '2316', '316t', '16th', '6ths', 'thst'])
assert dedupe.predicates.commonSixGram(field) == set(
['12316t', '2316th', '316ths', '16thst'])
assert dedupe.predicates.initials(field, 12) == ('123 16th st',)
assert dedupe.predicates.initials(field, 7) == ('123 16t',)
assert dedupe.predicates.ngrams(
field, 3) == ['123', '23 ', '3 1', ' 16', '16t', '6th', 'th ', 'h s', ' st']
assert dedupe.predicates.commonTwoElementsPredicate(
(1, 2, 3)) == set(('1 2', '2 3'))
assert dedupe.predicates.commonTwoElementsPredicate((1,)) == set([])
assert dedupe.predicates.commonThreeElementsPredicate(
(1, 2, 3)) == set(('1 2 3',))
assert dedupe.predicates.commonThreeElementsPredicate((1,)) == set([])
assert dedupe.predicates.fingerprint(
'time sandwich') == (u'sandwichtime',)
assert dedupe.predicates.oneGramFingerprint(
'sandwich time') == (u'acdehimnstw',)
assert dedupe.predicates.twoGramFingerprint(
'sandwich time') == (u'anchdwhticimmendsatiwi',)
assert dedupe.predicates.twoGramFingerprint('1') == ()
assert dedupe.predicates.commonTwoTokens(
'foo bar') == set([u'foo bar'])
assert dedupe.predicates.commonTwoTokens('foo') == set([])
if __name__ == "__main__":
unittest.main()
|
from datetime import timedelta
import pytest
from homeassistant.components.homekit.const import (
ATTR_VALUE,
TYPE_FAUCET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_VALVE,
)
from homeassistant.components.homekit.type_switches import (
DockVacuum,
Outlet,
Switch,
Valve,
)
from homeassistant.components.vacuum import (
DOMAIN as VACUUM_DOMAIN,
SERVICE_RETURN_TO_BASE,
SERVICE_START,
STATE_CLEANING,
STATE_DOCKED,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_TYPE, STATE_OFF, STATE_ON
from homeassistant.core import split_entity_id
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed, async_mock_service
async def test_outlet_set_state(hass, hk_driver, events):
"""Test if Outlet accessory and HA are updated accordingly."""
entity_id = "switch.outlet_test"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Outlet(hass, hk_driver, "Outlet", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 7 # Outlet
assert acc.char_on.value is False
assert acc.char_outlet_in_use.value is True
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.char_on.value is True
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_on.value is False
# Set from HomeKit
call_turn_on = async_mock_service(hass, "switch", "turn_on")
call_turn_off = async_mock_service(hass, "switch", "turn_off")
await hass.async_add_executor_job(acc.char_on.client_update_value, True)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_on.client_update_value, False)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
@pytest.mark.parametrize(
"entity_id, attrs",
[
("automation.test", {}),
("input_boolean.test", {}),
("remote.test", {}),
("script.test", {}),
("switch.test", {}),
],
)
async def test_switch_set_state(hass, hk_driver, entity_id, attrs, events):
"""Test if accessory and HA are updated accordingly."""
domain = split_entity_id(entity_id)[0]
hass.states.async_set(entity_id, None, attrs)
await hass.async_block_till_done()
acc = Switch(hass, hk_driver, "Switch", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 8 # Switch
assert acc.activate_only is False
assert acc.char_on.value is False
hass.states.async_set(entity_id, STATE_ON, attrs)
await hass.async_block_till_done()
assert acc.char_on.value is True
hass.states.async_set(entity_id, STATE_OFF, attrs)
await hass.async_block_till_done()
assert acc.char_on.value is False
# Set from HomeKit
call_turn_on = async_mock_service(hass, domain, "turn_on")
call_turn_off = async_mock_service(hass, domain, "turn_off")
await hass.async_add_executor_job(acc.char_on.client_update_value, True)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_on.client_update_value, False)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
async def test_valve_set_state(hass, hk_driver, events):
"""Test if Valve accessory and HA are updated accordingly."""
entity_id = "switch.valve_test"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET})
await acc.run_handler()
await hass.async_block_till_done()
assert acc.category == 29 # Faucet
assert acc.char_valve_type.value == 3 # Water faucet
acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_SHOWER})
await acc.run_handler()
await hass.async_block_till_done()
assert acc.category == 30 # Shower
assert acc.char_valve_type.value == 2 # Shower head
acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_SPRINKLER})
await acc.run_handler()
await hass.async_block_till_done()
assert acc.category == 28 # Sprinkler
assert acc.char_valve_type.value == 1 # Irrigation
acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_VALVE})
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 29 # Faucet
assert acc.char_active.value == 0
assert acc.char_in_use.value == 0
assert acc.char_valve_type.value == 0 # Generic Valve
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.char_active.value == 1
assert acc.char_in_use.value == 1
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_active.value == 0
assert acc.char_in_use.value == 0
# Set from HomeKit
call_turn_on = async_mock_service(hass, "switch", "turn_on")
call_turn_off = async_mock_service(hass, "switch", "turn_off")
await hass.async_add_executor_job(acc.char_active.client_update_value, 1)
await hass.async_block_till_done()
assert acc.char_in_use.value == 1
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_active.client_update_value, 0)
await hass.async_block_till_done()
assert acc.char_in_use.value == 0
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
async def test_vacuum_set_state(hass, hk_driver, events):
"""Test if Vacuum accessory and HA are updated accordingly."""
entity_id = "vacuum.roomba"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = DockVacuum(hass, hk_driver, "DockVacuum", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 8 # Switch
assert acc.char_on.value == 0
hass.states.async_set(entity_id, STATE_CLEANING)
await hass.async_block_till_done()
assert acc.char_on.value == 1
hass.states.async_set(entity_id, STATE_DOCKED)
await hass.async_block_till_done()
assert acc.char_on.value == 0
# Set from HomeKit
call_start = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_START)
call_return_to_base = async_mock_service(
hass, VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE
)
await hass.async_add_executor_job(acc.char_on.client_update_value, 1)
await hass.async_block_till_done()
assert acc.char_on.value == 1
assert call_start
assert call_start[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_on.client_update_value, 0)
await hass.async_block_till_done()
assert acc.char_on.value == 0
assert call_return_to_base
assert call_return_to_base[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
async def test_reset_switch(hass, hk_driver, events):
"""Test if switch accessory is reset correctly."""
domain = "scene"
entity_id = "scene.test"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Switch(hass, hk_driver, "Switch", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.activate_only is True
assert acc.char_on.value is False
call_turn_on = async_mock_service(hass, domain, "turn_on")
call_turn_off = async_mock_service(hass, domain, "turn_off")
await hass.async_add_executor_job(acc.char_on.client_update_value, True)
await hass.async_block_till_done()
assert acc.char_on.value is True
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert acc.char_on.value is False
assert len(events) == 1
assert not call_turn_off
await hass.async_add_executor_job(acc.char_on.client_update_value, False)
await hass.async_block_till_done()
assert acc.char_on.value is False
assert len(events) == 1
async def test_reset_switch_reload(hass, hk_driver, events):
"""Test reset switch after script reload."""
entity_id = "script.test"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Switch(hass, hk_driver, "Switch", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.activate_only is False
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
assert acc.char_on.value is False
|
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .common import (
ATTR_CONFIG,
CONF_DIMMER,
CONF_DISCOVERY,
CONF_LIGHT,
CONF_STRIP,
CONF_SWITCH,
SmartDevices,
async_discover_devices,
get_static_devices,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "tplink"
TPLINK_HOST_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string})
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_LIGHT, default=[]): vol.All(
cv.ensure_list, [TPLINK_HOST_SCHEMA]
),
vol.Optional(CONF_SWITCH, default=[]): vol.All(
cv.ensure_list, [TPLINK_HOST_SCHEMA]
),
vol.Optional(CONF_STRIP, default=[]): vol.All(
cv.ensure_list, [TPLINK_HOST_SCHEMA]
),
vol.Optional(CONF_DIMMER, default=[]): vol.All(
cv.ensure_list, [TPLINK_HOST_SCHEMA]
),
vol.Optional(CONF_DISCOVERY, default=True): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the TP-Link component."""
conf = config.get(DOMAIN)
hass.data[DOMAIN] = {}
hass.data[DOMAIN][ATTR_CONFIG] = conf
if conf is not None:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigType):
"""Set up TPLink from a config entry."""
config_data = hass.data[DOMAIN].get(ATTR_CONFIG)
# These will contain the initialized devices
lights = hass.data[DOMAIN][CONF_LIGHT] = []
switches = hass.data[DOMAIN][CONF_SWITCH] = []
# Add static devices
static_devices = SmartDevices()
if config_data is not None:
static_devices = get_static_devices(config_data)
lights.extend(static_devices.lights)
switches.extend(static_devices.switches)
# Add discovered devices
if config_data is None or config_data[CONF_DISCOVERY]:
discovered_devices = await async_discover_devices(hass, static_devices)
lights.extend(discovered_devices.lights)
switches.extend(discovered_devices.switches)
forward_setup = hass.config_entries.async_forward_entry_setup
if lights:
_LOGGER.debug(
"Got %s lights: %s", len(lights), ", ".join([d.host for d in lights])
)
hass.async_create_task(forward_setup(config_entry, "light"))
if switches:
_LOGGER.debug(
"Got %s switches: %s", len(switches), ", ".join([d.host for d in switches])
)
hass.async_create_task(forward_setup(config_entry, "switch"))
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
forward_unload = hass.config_entries.async_forward_entry_unload
remove_lights = remove_switches = False
if hass.data[DOMAIN][CONF_LIGHT]:
remove_lights = await forward_unload(entry, "light")
if hass.data[DOMAIN][CONF_SWITCH]:
remove_switches = await forward_unload(entry, "switch")
if remove_lights or remove_switches:
hass.data[DOMAIN].clear()
return True
# We were not able to unload the platforms, either because there
# were none or one of the forward_unloads failed.
return False
|
import logging
import math
import random
import string
from ..core import compat
from ..core import driver
from ..core import exceptions
from nose import SkipTest # noqa
from nose import tools
logger = logging.getLogger(__name__)
class Driver(object):
def __init__(self, scheme=None, path=None, config=None):
self.scheme = scheme
self.path = path
self.config = config
# Load the requested driver
def setUp(self):
storage = driver.fetch(self.scheme)
self._storage = storage(self.path, self.config)
def tearDown(self):
pass
def gen_random_string(self, length=16):
return ''.join([random.choice(string.ascii_uppercase + string.digits)
for x in range(length)]).lower()
def simplehelp(self, path, content, expected, size=0):
self._storage.put_content(path, content)
assert self._storage.get_content(path) == expected
assert self._storage.get_content(path) == expected
if size:
assert self._storage.get_size(path) == size
def unicodehelp(self, path, content, expected):
self._storage.put_unicode(path, content)
assert self._storage.get_unicode(path) == expected
assert self._storage.get_unicode(path) == expected
def jsonhelp(self, path, content, expected):
self._storage.put_json(path, content)
assert self._storage.get_json(path) == expected
assert self._storage.get_json(path) == expected
def test_exists_non_existent(self):
filename = self.gen_random_string()
assert not self._storage.exists(filename)
def test_exists_existent(self):
filename = self.gen_random_string()
self._storage.put_content(filename, b'')
assert self._storage.exists(filename)
# get / put
def test_write_read_1(self):
filename = self.gen_random_string()
content = b'a'
expected = b'a'
self.simplehelp(filename, content, expected, len(expected))
def test_write_read_2(self):
filename = self.gen_random_string()
content = b'\xc3\x9f'
expected = b'\xc3\x9f'
self.simplehelp(filename, content, expected, len(expected))
def test_write_read_3(self):
filename = self.gen_random_string()
content = u'ß'.encode('utf8')
expected = b'\xc3\x9f'
self.simplehelp(filename, content, expected, len(expected))
def test_write_read_4(self):
filename = self.gen_random_string()
content = 'ß'
if compat.is_py2:
content = content.decode('utf8')
content = content.encode('utf8')
expected = b'\xc3\x9f'
self.simplehelp(filename, content, expected, len(expected))
def test_write_read_5(self):
filename = self.gen_random_string()
content = self.gen_random_string().encode('utf8')
expected = content
self.simplehelp(filename, content, expected, len(expected))
def test_write_read_6(self):
filename = self.gen_random_string()
content = self.gen_random_string(1024 * 1024).encode('utf8')
expected = content
self.simplehelp(filename, content, expected, len(expected))
# get / put unicode
def test_unicode_1(self):
filename = self.gen_random_string()
content = 'a'
expected = u'a'
self.unicodehelp(filename, content, expected)
def test_unicode_2(self):
filename = self.gen_random_string()
content = b'\xc3\x9f'.decode('utf8')
expected = u'ß'
self.unicodehelp(filename, content, expected)
def test_unicode_3(self):
filename = self.gen_random_string()
content = u'ß'
expected = u'ß'
self.unicodehelp(filename, content, expected)
def test_unicode_4(self):
filename = self.gen_random_string()
content = 'ß'
if compat.is_py2:
content = content.decode('utf8')
expected = u'ß'
self.unicodehelp(filename, content, expected)
def test_unicode_5(self):
filename = self.gen_random_string()
content = self.gen_random_string()
expected = content
self.unicodehelp(filename, content, expected)
def test_unicode_6(self):
filename = self.gen_random_string()
content = self.gen_random_string(1024 * 1024)
expected = content
self.unicodehelp(filename, content, expected)
# JSON
def test_json(self):
filename = self.gen_random_string()
content = {u"ß": u"ß"}
expected = {u"ß": u"ß"}
self.jsonhelp(filename, content, expected)
# Removes
def test_remove_existent(self):
filename = self.gen_random_string()
content = self.gen_random_string().encode('utf8')
self._storage.put_content(filename, content)
self._storage.remove(filename)
assert not self._storage.exists(filename)
def test_remove_folder(self):
dirname = self.gen_random_string()
filename1 = self.gen_random_string()
filename2 = self.gen_random_string()
content = self.gen_random_string().encode('utf8')
self._storage.put_content('%s/%s' % (dirname, filename1), content)
self._storage.put_content('%s/%s' % (dirname, filename2), content)
self._storage.remove(dirname)
assert not self._storage.exists(filename1)
assert not self._storage.exists(filename2)
assert not self._storage.exists(dirname)
# Check the lru is ok
try:
self._storage.get_content(filename1)
assert False
except Exception:
pass
try:
self._storage.get_content(filename2)
assert False
except Exception:
pass
@tools.raises(exceptions.FileNotFoundError)
def test_remove_inexistent(self):
filename = self.gen_random_string()
self._storage.remove(filename)
@tools.raises(exceptions.FileNotFoundError)
def test_read_inexistent(self):
filename = self.gen_random_string()
self._storage.get_content(filename)
@tools.raises(exceptions.FileNotFoundError)
def test_get_size_inexistent(self):
filename = self.gen_random_string()
self._storage.get_size(filename)
def test_stream(self):
filename = self.gen_random_string()
# test 7MB
content = self.gen_random_string(7).encode('utf8') # * 1024 * 1024
# test exists
io = compat.StringIO(content)
logger.debug("%s should NOT exists still" % filename)
assert not self._storage.exists(filename)
self._storage.stream_write(filename, io)
io.close()
logger.debug("%s should exist now" % filename)
assert self._storage.exists(filename)
# test read / write
data = compat.bytes()
for buf in self._storage.stream_read(filename):
data += buf
assert content == data
# test bytes_range only if the storage backend suppports it
if self._storage.supports_bytes_range:
b = random.randint(0, math.floor(len(content) / 2))
bytes_range = (b, random.randint(b + 1, len(content) - 1))
data = compat.bytes()
for buf in self._storage.stream_read(filename, bytes_range):
data += buf
expected_content = content[bytes_range[0]:bytes_range[1] + 1]
assert data == expected_content
# logger.debug("Content length is %s" % len(content))
# logger.debug("And retrieved content length should equal it: %s" %
# len(data))
# logger.debug("got content %s" % content)
# logger.debug("got data %s" % data)
# test remove
self._storage.remove(filename)
assert not self._storage.exists(filename)
@tools.raises(exceptions.FileNotFoundError)
def test_stream_read_inexistent(self):
filename = self.gen_random_string()
data = compat.bytes()
for buf in self._storage.stream_read(filename):
data += buf
@tools.raises(exceptions.FileNotFoundError)
def test_inexistent_list_directory(self):
notexist = self.gen_random_string()
iterator = self._storage.list_directory(notexist)
next(iterator)
# XXX only elliptics return StopIteration for now - though we should
# return probably that for all
@tools.raises(exceptions.FileNotFoundError, StopIteration)
def test_empty_list_directory(self):
path = self.gen_random_string()
content = self.gen_random_string().encode('utf8')
self._storage.put_content(path, content)
iterator = self._storage.list_directory(path)
next(iterator)
def test_list_directory(self):
base = self.gen_random_string()
filename1 = self.gen_random_string()
filename2 = self.gen_random_string()
fb1 = '%s/%s' % (base, filename1)
fb2 = '%s/%s' % (base, filename2)
content = self.gen_random_string().encode('utf8')
self._storage.put_content(fb1, content)
self._storage.put_content(fb2, content)
assert sorted([fb1, fb2]
) == sorted(list(self._storage.list_directory(base)))
def test_list_directory_with_subdir(self):
if self.scheme == 's3':
raise SkipTest("Check GH #596.")
base = self.gen_random_string()
dir1 = self.gen_random_string()
dir2 = self.gen_random_string()
filename1 = self.gen_random_string()
filename2 = self.gen_random_string()
fd1 = '%s/%s' % (base, dir1)
fd2 = '%s/%s' % (base, dir2)
fb1 = '%s/%s' % (fd1, filename1)
fb2 = '%s/%s' % (fd2, filename2)
content = self.gen_random_string().encode('utf8')
self._storage.put_content(fb1, content)
self._storage.put_content(fb2, content)
assert sorted([fd1, fd2]
) == sorted(list(self._storage.list_directory(base)))
# def test_root_list_directory(self):
# fb1 = self.gen_random_string()
# fb2 = self.gen_random_string()
# content = self.gen_random_string()
# self._storage.put_content(fb1, content)
# self._storage.put_content(fb2, content)
# print(list(self._storage.list_directory()))
# assert sorted([fb1, fb2]
# ) == sorted(list(self._storage.list_directory()))
@tools.raises(exceptions.FileNotFoundError, StopIteration)
def test_empty_after_remove_list_directory(self):
base = self.gen_random_string()
filename1 = self.gen_random_string()
filename2 = self.gen_random_string()
fb1 = '%s/%s' % (base, filename1)
fb2 = '%s/%s' % (base, filename2)
content = self.gen_random_string().encode('utf8')
self._storage.put_content(fb1, content)
self._storage.put_content(fb2, content)
self._storage.remove(fb1)
self._storage.remove(fb2)
iterator = self._storage.list_directory(base)
next(iterator)
def test_paths(self):
namespace = 'namespace'
repository = 'repository'
tag = 'sometag'
image_id = 'imageid'
p = self._storage.images_list_path(namespace, repository)
assert not self._storage.exists(p)
p = self._storage.image_json_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_mark_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_checksum_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_layer_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_ancestry_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_files_path(image_id)
assert not self._storage.exists(p)
p = self._storage.image_diff_path(image_id)
assert not self._storage.exists(p)
p = self._storage.repository_path(namespace, repository)
assert not self._storage.exists(p)
p = self._storage.tag_path(namespace, repository)
assert not self._storage.exists(p)
p = self._storage.tag_path(namespace, repository, tag)
assert not self._storage.exists(p)
p = self._storage.repository_json_path(namespace, repository)
assert not self._storage.exists(p)
p = self._storage.repository_tag_json_path(namespace, repository, tag)
assert not self._storage.exists(p)
p = self._storage.index_images_path(namespace, repository)
assert not self._storage.exists(p)
p = self._storage.private_flag_path(namespace, repository)
assert not self._storage.exists(p)
|
import abc
import typing
from pathlib import Path
import dill
import numpy as np
import keras
import keras.backend as K
import pandas as pd
import matchzoo
from matchzoo import DataGenerator
from matchzoo.engine import hyper_spaces
from matchzoo.engine.base_preprocessor import BasePreprocessor
from matchzoo.engine.base_metric import BaseMetric
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.param import Param
from matchzoo import tasks
class BaseModel(abc.ABC):
"""
Abstract base class of all MatchZoo models.
MatchZoo models are wrapped over keras models, and the actual keras model
built can be accessed by `model.backend`. `params` is a set of model
hyper-parameters that deterministically builds a model. In other words,
`params['model_class'](params=params)` of the same `params` always create
models with the same structure.
:param params: Model hyper-parameters. (default: return value from
:meth:`get_default_params`)
:param backend: A keras model as the model backend. Usually not passed as
an argument.
Example:
>>> BaseModel() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
TypeError: Can't instantiate abstract class BaseModel ...
>>> class MyModel(BaseModel):
... def build(self):
... pass
>>> isinstance(MyModel(), BaseModel)
True
"""
BACKEND_WEIGHTS_FILENAME = 'backend_weights.h5'
PARAMS_FILENAME = 'params.dill'
def __init__(
self,
params: typing.Optional[ParamTable] = None,
backend: typing.Optional[keras.models.Model] = None
):
"""Init."""
self._params = params or self.get_default_params()
self._backend = backend
@classmethod
def get_default_params(
cls,
with_embedding=False,
with_multi_layer_perceptron=False
) -> ParamTable:
"""
Model default parameters.
The common usage is to instantiate :class:`matchzoo.engine.ModelParams`
first, then set the model specific parametrs.
Examples:
>>> class MyModel(BaseModel):
... def build(self):
... print(self._params['num_eggs'], 'eggs')
... print('and', self._params['ham_type'])
...
... @classmethod
... def get_default_params(cls):
... params = ParamTable()
... params.add(Param('num_eggs', 512))
... params.add(Param('ham_type', 'Parma Ham'))
... return params
>>> my_model = MyModel()
>>> my_model.build()
512 eggs
and Parma Ham
Notice that all parameters must be serialisable for the entire model
to be serialisable. Therefore, it's strongly recommended to use python
native data types to store parameters.
:return: model parameters
"""
params = ParamTable()
params.add(Param(
name='model_class', value=cls,
desc="Model class. Used internally for save/load. "
"Changing this may cause unexpected behaviors."
))
params.add(Param(
name='input_shapes',
desc="Dependent on the model and data. Should be set manually."
))
params.add(Param(
name='task',
desc="Decides model output shape, loss, and metrics."
))
params.add(Param(
name='optimizer', value='adam',
))
if with_embedding:
params.add(Param(
name='with_embedding', value=True,
desc="A flag used help `auto` module. Shouldn't be changed."
))
params.add(Param(
name='embedding_input_dim',
desc='Usually equals vocab size + 1. Should be set manually.'
))
params.add(Param(
name='embedding_output_dim',
desc='Should be set manually.'
))
params.add(Param(
name='embedding_trainable', value=True,
desc='`True` to enable embedding layer training, '
'`False` to freeze embedding parameters.'
))
if with_multi_layer_perceptron:
params.add(Param(
name='with_multi_layer_perceptron', value=True,
desc="A flag of whether a multiple layer perceptron is used. "
"Shouldn't be changed."
))
params.add(Param(
name='mlp_num_units', value=128,
desc="Number of units in first `mlp_num_layers` layers.",
hyper_space=hyper_spaces.quniform(8, 256, 8)
))
params.add(Param(
name='mlp_num_layers', value=3,
desc="Number of layers of the multiple layer percetron.",
hyper_space=hyper_spaces.quniform(1, 6)
))
params.add(Param(
name='mlp_num_fan_out', value=64,
desc="Number of units of the layer that connects the multiple "
"layer percetron and the output.",
hyper_space=hyper_spaces.quniform(4, 128, 4)
))
params.add(Param(
name='mlp_activation_func', value='relu',
desc='Activation function used in the multiple '
'layer perceptron.'
))
return params
@classmethod
def get_default_preprocessor(cls) -> BasePreprocessor:
"""
Model default preprocessor.
The preprocessor's transform should produce a correctly shaped data
pack that can be used for training. Some extra configuration (e.g.
setting `input_shapes` in :class:`matchzoo.models.DSSMModel` may be
required on the user's end.
:return: Default preprocessor.
"""
return matchzoo.preprocessors.BasicPreprocessor()
@property
def params(self) -> ParamTable:
""":return: model parameters."""
return self._params
@params.setter
def params(self, val):
self._params = val
@property
def backend(self) -> keras.models.Model:
""":return model backend, a keras model instance."""
if not self._backend:
raise ValueError("Backend not found."
"Please build the model first.")
else:
return self._backend
@abc.abstractmethod
def build(self):
"""Build model, each subclass need to impelemnt this method."""
def compile(self):
"""
Compile model for training.
Only `keras` native metrics are compiled together with backend.
MatchZoo metrics are evaluated only through :meth:`evaluate`.
Notice that `keras` count `loss` as one of the metrics while MatchZoo
:class:`matchzoo.engine.BaseTask` does not.
Examples:
>>> from matchzoo import models
>>> model = models.Naive()
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.params['task'].metrics = ['mse', 'map']
>>> model.params['task'].metrics
['mse', mean_average_precision(0.0)]
>>> model.build()
>>> model.compile()
"""
self._backend.compile(optimizer=self._params['optimizer'],
loss=self._params['task'].loss)
def fit(
self,
x: typing.Union[np.ndarray, typing.List[np.ndarray], dict],
y: np.ndarray,
batch_size: int = 128,
epochs: int = 1,
verbose: int = 1,
**kwargs
) -> keras.callbacks.History:
"""
Fit the model.
See :meth:`keras.models.Model.fit` for more details.
:param x: input data.
:param y: labels.
:param batch_size: number of samples per gradient update.
:param epochs: number of epochs to train the model.
:param verbose: 0, 1, or 2. Verbosity mode. 0 = silent, 1 = verbose,
2 = one log line per epoch.
Key word arguments not listed above will be propagated to keras's fit.
:return: A `keras.callbacks.History` instance. Its history attribute
contains all information collected during training.
"""
return self._backend.fit(x=x, y=y,
batch_size=batch_size, epochs=epochs,
verbose=verbose, **kwargs)
def fit_generator(
self,
generator: matchzoo.DataGenerator,
epochs: int = 1,
verbose: int = 1,
**kwargs
) -> keras.callbacks.History:
"""
Fit the model with matchzoo `generator`.
See :meth:`keras.models.Model.fit_generator` for more details.
:param generator: A generator, an instance of
:class:`engine.DataGenerator`.
:param epochs: Number of epochs to train the model.
:param verbose: 0, 1, or 2. Verbosity mode. 0 = silent, 1 = verbose,
2 = one log line per epoch.
:return: A `keras.callbacks.History` instance. Its history attribute
contains all information collected during training.
"""
return self._backend.fit_generator(
generator=generator,
epochs=epochs,
verbose=verbose, **kwargs
)
def evaluate(
self,
x: typing.Dict[str, np.ndarray],
y: np.ndarray,
batch_size: int = 128
) -> typing.Dict[BaseMetric, float]:
"""
Evaluate the model.
:param x: Input data.
:param y: Labels.
:param batch_size: Number of samples when `predict` for evaluation.
(default: 128)
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> preprocessor = mz.preprocessors.NaivePreprocessor()
>>> data_pack = preprocessor.fit_transform(data_pack, verbose=0)
>>> m = mz.models.DenseBaseline()
>>> m.params['task'] = mz.tasks.Ranking()
>>> m.params['task'].metrics = [
... 'acc', 'mse', 'mae', 'ce',
... 'average_precision', 'precision', 'dcg', 'ndcg',
... 'mean_reciprocal_rank', 'mean_average_precision', 'mrr',
... 'map', 'MAP',
... mz.metrics.AveragePrecision(threshold=1),
... mz.metrics.Precision(k=2, threshold=2),
... mz.metrics.DiscountedCumulativeGain(k=2),
... mz.metrics.NormalizedDiscountedCumulativeGain(
... k=3, threshold=-1),
... mz.metrics.MeanReciprocalRank(threshold=2),
... mz.metrics.MeanAveragePrecision(threshold=3)
... ]
>>> m.guess_and_fill_missing_params(verbose=0)
>>> m.build()
>>> m.compile()
>>> x, y = data_pack.unpack()
>>> evals = m.evaluate(x, y)
>>> type(evals)
<class 'dict'>
"""
result = dict()
matchzoo_metrics, keras_metrics = self._separate_metrics()
y_pred = self.predict(x, batch_size)
for metric in keras_metrics:
metric_func = keras.metrics.get(metric)
result[metric] = K.eval(K.mean(
metric_func(K.variable(y), K.variable(y_pred))))
if matchzoo_metrics:
if not isinstance(self.params['task'], tasks.Ranking):
raise ValueError("Matchzoo metrics only works on ranking.")
for metric in matchzoo_metrics:
result[metric] = self._eval_metric_on_data_frame(
metric, x['id_left'], y, y_pred)
return result
def evaluate_generator(
self,
generator: DataGenerator,
batch_size: int = 128
) -> typing.Dict['BaseMetric', float]:
"""
Evaluate the model.
:param generator: DataGenerator to evluate.
:param batch_size: Batch size. (default: 128)
"""
x, y = generator[:]
return self.evaluate(x, y, batch_size=batch_size)
def _separate_metrics(self):
matchzoo_metrics = []
keras_metrics = []
for metric in self._params['task'].metrics:
if isinstance(metric, BaseMetric):
matchzoo_metrics.append(metric)
else:
keras_metrics.append(metric)
return matchzoo_metrics, keras_metrics
@classmethod
def _eval_metric_on_data_frame(
cls,
metric: BaseMetric,
id_left: typing.Union[list, np.array],
y: typing.Union[list, np.array],
y_pred: typing.Union[list, np.array]
):
eval_df = pd.DataFrame(data={
'id': id_left,
'true': y.squeeze(),
'pred': y_pred.squeeze()
})
assert isinstance(metric, BaseMetric)
val = eval_df.groupby(by='id').apply(
lambda df: metric(df['true'].values, df['pred'].values)
).mean()
return val
def predict(
self,
x: typing.Dict[str, np.ndarray],
batch_size=128
) -> np.ndarray:
"""
Generate output predictions for the input samples.
See :meth:`keras.models.Model.predict` for more details.
:param x: input data
:param batch_size: number of samples per gradient update
:return: numpy array(s) of predictions
"""
return self._backend.predict(x=x, batch_size=batch_size)
def save(self, dirpath: typing.Union[str, Path]):
"""
Save the model.
A saved model is represented as a directory with two files. One is a
model parameters file saved by `pickle`, and the other one is a model
h5 file saved by `keras`.
:param dirpath: directory path of the saved model
Example:
>>> import matchzoo as mz
>>> model = mz.models.Naive()
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
>>> model.save('temp-model')
>>> import shutil
>>> shutil.rmtree('temp-model')
"""
dirpath = Path(dirpath)
params_path = dirpath.joinpath(self.PARAMS_FILENAME)
weights_path = dirpath.joinpath(self.BACKEND_WEIGHTS_FILENAME)
if not dirpath.exists():
dirpath.mkdir(parents=True)
else:
raise FileExistsError(f'{dirpath} already exist, fail to save.')
self._backend.save_weights(weights_path)
with open(params_path, mode='wb') as params_file:
dill.dump(self._params, params_file)
def get_embedding_layer(
self, name: str = 'embedding'
) -> keras.layers.Layer:
"""
Get the embedding layer.
All MatchZoo models with a single embedding layer set the embedding
layer name to `embedding`, and this method should return that layer.
:param name: Name of the embedding layer. (default: `embedding`)
"""
for layer in self._backend.layers:
if layer.name == name:
return layer
raise ValueError(f"Layer {name} not found. Initialize your embedding "
f"layer with `name='{name}'`.")
def load_embedding_matrix(
self,
embedding_matrix: np.ndarray,
name: str = 'embedding'
):
"""
Load an embedding matrix.
Load an embedding matrix into the model's embedding layer. The name
of the embedding layer is specified by `name`. For models with only
one embedding layer, set `name='embedding'` when creating the keras
layer, and use the default `name` when load the matrix. For models
with more than one embedding layers, initialize keras layer with
different layer names, and set `name` accordingly to load a matrix
to a chosen layer.
:param embedding_matrix: Embedding matrix to be loaded.
:param name: Name of the layer. (default: 'embedding')
"""
self.get_embedding_layer(name).set_weights([embedding_matrix])
def guess_and_fill_missing_params(self, verbose=1):
"""
Guess and fill missing parameters in :attr:`params`.
Use this method to automatically fill-in other hyper parameters.
This involves some guessing so the parameter it fills could be
wrong. For example, the default task is `Ranking`, and if we do not
set it to `Classification` manaully for data packs prepared for
classification, then the shape of the model output and the data will
mismatch.
:param verbose: Verbosity.
"""
self._params.get('task').set_default(tasks.Ranking(), verbose)
self._params.get('input_shapes').set_default([(30,), (30,)], verbose)
if 'with_embedding' in self._params:
self._params.get('embedding_input_dim').set_default(300, verbose)
self._params.get('embedding_output_dim').set_default(300, verbose)
def _set_param_default(self, name: str,
default_val: str, verbose: int = 0):
if self._params[name] is None:
self._params[name] = default_val
if verbose:
print(f"Parameter \"{name}\" set to {default_val}.")
def _make_inputs(self) -> list:
input_left = keras.layers.Input(
name='text_left',
shape=self._params['input_shapes'][0]
)
input_right = keras.layers.Input(
name='text_right',
shape=self._params['input_shapes'][1]
)
return [input_left, input_right]
def _make_output_layer(self) -> keras.layers.Layer:
""":return: a correctly shaped keras dense layer for model output."""
task = self._params['task']
if isinstance(task, tasks.Classification):
return keras.layers.Dense(task.num_classes, activation='softmax')
elif isinstance(task, tasks.Ranking):
return keras.layers.Dense(1, activation='linear')
else:
raise ValueError(f"{task} is not a valid task type."
f"Must be in `Ranking` and `Classification`.")
def _make_embedding_layer(
self,
name: str = 'embedding',
**kwargs
) -> keras.layers.Layer:
return keras.layers.Embedding(
self._params['embedding_input_dim'],
self._params['embedding_output_dim'],
trainable=self._params['embedding_trainable'],
name=name,
**kwargs
)
def _make_multi_layer_perceptron_layer(self) -> keras.layers.Layer:
# TODO: do not create new layers for a second call
if not self._params['with_multi_layer_perceptron']:
raise AttributeError(
'Parameter `with_multi_layer_perception` not set.')
def _wrapper(x):
activation = self._params['mlp_activation_func']
for _ in range(self._params['mlp_num_layers']):
x = keras.layers.Dense(self._params['mlp_num_units'],
activation=activation)(x)
return keras.layers.Dense(self._params['mlp_num_fan_out'],
activation=activation)(x)
return _wrapper
def load_model(dirpath: typing.Union[str, Path]) -> BaseModel:
"""
Load a model. The reverse function of :meth:`BaseModel.save`.
:param dirpath: directory path of the saved model
:return: a :class:`BaseModel` instance
Example:
>>> import matchzoo as mz
>>> model = mz.models.Naive()
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
>>> model.save('my-model')
>>> model.params.keys() == mz.load_model('my-model').params.keys()
True
>>> import shutil
>>> shutil.rmtree('my-model')
"""
dirpath = Path(dirpath)
params_path = dirpath.joinpath(BaseModel.PARAMS_FILENAME)
weights_path = dirpath.joinpath(BaseModel.BACKEND_WEIGHTS_FILENAME)
with open(params_path, mode='rb') as params_file:
params = dill.load(params_file)
model_instance = params['model_class'](params=params)
model_instance.build()
model_instance.compile()
model_instance.backend.load_weights(weights_path)
return model_instance
|
import os
import sys
import subprocess
import errno
from .base_classes import Environment, Command, Container, LatexObject, \
UnsafeCommand, SpecialArguments
from .package import Package
from .errors import CompilerError
from .utils import dumps_list, rm_temp_dir, NoEscape
import pylatex.config as cf
class Document(Environment):
r"""
A class that contains a full LaTeX document.
If needed, you can append stuff to the preamble or the packages.
For instance, if you need to use ``\maketitle`` you can add the title,
author and date commands to the preamble to make it work.
"""
def __init__(self, default_filepath='default_filepath', *,
documentclass='article', document_options=None, fontenc='T1',
inputenc='utf8', font_size="normalsize", lmodern=True,
textcomp=True, microtype=None, page_numbers=True, indent=None,
geometry_options=None, data=None):
r"""
Args
----
default_filepath: str
The default path to save files.
documentclass: str or `~.Command`
The LaTeX class of the document.
document_options: str or `list`
The options to supply to the documentclass
fontenc: str
The option for the fontenc package. If it is `None`, the fontenc
package will not be loaded at all.
inputenc: str
The option for the inputenc package. If it is `None`, the inputenc
package will not be loaded at all.
font_size: str
The font size to declare as normalsize
lmodern: bool
Use the Latin Modern font. This is a font that contains more glyphs
than the standard LaTeX font.
textcomp: bool
Adds even more glyphs, for instance the Euro (€) sign.
page_numbers: bool
Adds the ability to add the last page to the document.
indent: bool
Determines whether or not the document requires indentation. If it
is `None` it will use the value from the active config. Which is
`True` by default.
geometry_options: dict
The options to supply to the geometry package
data: list
Initial content of the document.
"""
self.default_filepath = default_filepath
if isinstance(documentclass, Command):
self.documentclass = documentclass
else:
self.documentclass = Command('documentclass',
arguments=documentclass,
options=document_options)
if indent is None:
indent = cf.active.indent
if microtype is None:
microtype = cf.active.microtype
# These variables are used by the __repr__ method
self._fontenc = fontenc
self._inputenc = inputenc
self._lmodern = lmodern
self._indent = indent
self._microtype = microtype
packages = []
if fontenc is not None:
packages.append(Package('fontenc', options=fontenc))
if inputenc is not None:
packages.append(Package('inputenc', options=inputenc))
if lmodern:
packages.append(Package('lmodern'))
if textcomp:
packages.append(Package('textcomp'))
if page_numbers:
packages.append(Package('lastpage'))
if not indent:
packages.append(Package('parskip'))
if microtype:
packages.append(Package('microtype'))
if geometry_options is not None:
packages.append(Package('geometry'))
# Make sure we don't add this options command for an empty list,
# because that breaks.
if geometry_options:
packages.append(Command(
'geometry',
arguments=SpecialArguments(geometry_options),
))
super().__init__(data=data)
# Usually the name is the class name, but if we create our own
# document class, \begin{document} gets messed up.
self._latex_name = 'document'
self.packages |= packages
self.variables = []
self.preamble = []
if not page_numbers:
self.change_document_style("empty")
# No colors have been added to the document yet
self.color = False
self.meta_data = False
self.append(Command(command=font_size))
def _propagate_packages(self):
r"""Propogate packages.
Make sure that all the packages included in the previous containers
are part of the full list of packages.
"""
super()._propagate_packages()
for item in (self.preamble):
if isinstance(item, LatexObject):
if isinstance(item, Container):
item._propagate_packages()
for p in item.packages:
self.packages.add(p)
def dumps(self):
"""Represent the document as a string in LaTeX syntax.
Returns
-------
str
"""
head = self.documentclass.dumps() + '%\n'
head += self.dumps_packages() + '%\n'
head += dumps_list(self.variables) + '%\n'
head += dumps_list(self.preamble) + '%\n'
return head + '%\n' + super().dumps()
def generate_tex(self, filepath=None):
"""Generate a .tex file for the document.
Args
----
filepath: str
The name of the file (without .tex), if this is not supplied the
default filepath attribute is used as the path.
"""
super().generate_tex(self._select_filepath(filepath))
def generate_pdf(self, filepath=None, *, clean=True, clean_tex=True,
compiler=None, compiler_args=None, silent=True):
"""Generate a pdf file from the document.
Args
----
filepath: str
The name of the file (without .pdf), if it is `None` the
``default_filepath`` attribute will be used.
clean: bool
Whether non-pdf files created that are created during compilation
should be removed.
clean_tex: bool
Also remove the generated tex file.
compiler: `str` or `None`
The name of the LaTeX compiler to use. If it is None, PyLaTeX will
choose a fitting one on its own. Starting with ``latexmk`` and then
``pdflatex``.
compiler_args: `list` or `None`
Extra arguments that should be passed to the LaTeX compiler. If
this is None it defaults to an empty list.
silent: bool
Whether to hide compiler output
"""
if compiler_args is None:
compiler_args = []
# In case of newer python with the use of the cwd parameter
# one can avoid to physically change the directory
# to the destination folder
python_cwd_available = sys.version_info >= (3, 6)
filepath = self._select_filepath(filepath)
if not os.path.basename(filepath):
filepath = os.path.join(os.path.abspath(filepath),
'default_basename')
else:
filepath = os.path.abspath(filepath)
cur_dir = os.getcwd()
dest_dir = os.path.dirname(filepath)
if not python_cwd_available:
os.chdir(dest_dir)
self.generate_tex(filepath)
if compiler is not None:
compilers = ((compiler, []),)
else:
latexmk_args = ['--pdf']
compilers = (
('latexmk', latexmk_args),
('pdflatex', [])
)
main_arguments = ['--interaction=nonstopmode', filepath + '.tex']
check_output_kwargs = {}
if python_cwd_available:
check_output_kwargs = {'cwd': dest_dir}
os_error = None
for compiler, arguments in compilers:
command = [compiler] + arguments + compiler_args + main_arguments
try:
output = subprocess.check_output(command,
stderr=subprocess.STDOUT,
**check_output_kwargs)
except (OSError, IOError) as e:
# Use FileNotFoundError when python 2 is dropped
os_error = e
if os_error.errno == errno.ENOENT:
# If compiler does not exist, try next in the list
continue
raise
except subprocess.CalledProcessError as e:
# For all other errors print the output and raise the error
print(e.output.decode())
raise
else:
if not silent:
print(output.decode())
if clean:
try:
# Try latexmk cleaning first
subprocess.check_output(['latexmk', '-c', filepath],
stderr=subprocess.STDOUT,
**check_output_kwargs)
except (OSError, IOError, subprocess.CalledProcessError):
# Otherwise just remove some file extensions.
extensions = ['aux', 'log', 'out', 'fls',
'fdb_latexmk']
for ext in extensions:
try:
os.remove(filepath + '.' + ext)
except (OSError, IOError) as e:
# Use FileNotFoundError when python 2 is dropped
if e.errno != errno.ENOENT:
raise
rm_temp_dir()
if clean_tex:
os.remove(filepath + '.tex') # Remove generated tex file
# Compilation has finished, so no further compilers have to be
# tried
break
else:
# Notify user that none of the compilers worked.
raise(CompilerError(
'No LaTex compiler was found\n'
'Either specify a LaTex compiler '
'or make sure you have latexmk or pdfLaTex installed.'
))
if not python_cwd_available:
os.chdir(cur_dir)
def _select_filepath(self, filepath):
"""Make a choice between ``filepath`` and ``self.default_filepath``.
Args
----
filepath: str
the filepath to be compared with ``self.default_filepath``
Returns
-------
str
The selected filepath
"""
if filepath is None:
return self.default_filepath
else:
if os.path.basename(filepath) == '':
filepath = os.path.join(filepath, os.path.basename(
self.default_filepath))
return filepath
def change_page_style(self, style):
r"""Alternate page styles of the current page.
Args
----
style: str
value to set for the page style of the current page
"""
self.append(Command("thispagestyle", arguments=style))
def change_document_style(self, style):
r"""Alternate page style for the entire document.
Args
----
style: str
value to set for the document style
"""
self.append(Command("pagestyle", arguments=style))
def add_color(self, name, model, description):
r"""Add a color that can be used throughout the document.
Args
----
name: str
Name to set for the color
model: str
The color model to use when defining the color
description: str
The values to use to define the color
"""
if self.color is False:
self.packages.append(Package("color"))
self.color = True
self.preamble.append(Command("definecolor", arguments=[name,
model,
description]))
def change_length(self, parameter, value):
r"""Change the length of a certain parameter to a certain value.
Args
----
parameter: str
The name of the parameter to change the length for
value: str
The value to set the parameter to
"""
self.preamble.append(UnsafeCommand('setlength',
arguments=[parameter, value]))
def set_variable(self, name, value):
r"""Add a variable which can be used inside the document.
Variables are defined before the preamble. If a variable with that name
has already been set, the new value will override it for future uses.
This is done by appending ``\renewcommand`` to the document.
Args
----
name: str
The name to set for the variable
value: str
The value to set for the variable
"""
name_arg = "\\" + name
variable_exists = False
for variable in self.variables:
if name_arg == variable.arguments._positional_args[0]:
variable_exists = True
break
if variable_exists:
renew = Command(command="renewcommand",
arguments=[NoEscape(name_arg), value])
self.append(renew)
else:
new = Command(command="newcommand",
arguments=[NoEscape(name_arg), value])
self.variables.append(new)
|
import sys
from twisted.internet import reactor, ssl
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
from autobahn.twisted.resource import WebSocketResource
class PingServerProtocol(WebSocketServerProtocol):
def doPing(self):
if self.run:
self.sendPing()
self.factory.pingsSent[self.peer] += 1
print("Ping sent to {} - {}".format(self.peer, self.factory.pingsSent[self.peer]))
reactor.callLater(1, self.doPing)
def onPong(self, payload):
self.factory.pongsReceived[self.peer] += 1
print("Pong received from {} - {}".format(self.peer, self.factory.pongsReceived[self.peer]))
def onOpen(self):
self.factory.pingsSent[self.peer] = 0
self.factory.pongsReceived[self.peer] = 0
self.run = True
self.doPing()
def onClose(self, wasClean, code, reason):
self.run = False
class PingServerFactory(WebSocketServerFactory):
def __init__(self, uri):
WebSocketServerFactory.__init__(self, uri)
self.pingsSent = {}
self.pongsReceived = {}
if __name__ == '__main__':
log.startLogging(sys.stdout)
contextFactory = ssl.DefaultOpenSSLContextFactory('keys/server.key',
'keys/server.crt')
factory = PingServerFactory("wss://127.0.0.1:9000")
factory.protocol = PingServerProtocol
listenWS(factory, contextFactory)
resource = WebSocketResource(factory)
root = File(".")
# note that Twisted uses bytes for URLs, which mostly affects Python3
root.putChild(b"ws", resource)
site = Site(root)
reactor.listenSSL(8080, site, contextFactory)
# reactor.listenTCP(8080, site)
reactor.run()
|
from typing import Any, Dict, Optional
from elgato import Elgato, ElgatoError, Info
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import CONF_SERIAL_NUMBER, DOMAIN # pylint: disable=unused-import
class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a Elgato Key Light config flow."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if user_input is None:
return self._show_setup_form()
try:
info = await self._get_elgato_info(
user_input[CONF_HOST], user_input[CONF_PORT]
)
except ElgatoError:
return self._show_setup_form({"base": "cannot_connect"})
# Check if already configured
await self.async_set_unique_id(info.serial_number)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=info.serial_number,
data={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
CONF_SERIAL_NUMBER: info.serial_number,
},
)
async def async_step_zeroconf(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle zeroconf discovery."""
if user_input is None:
return self.async_abort(reason="cannot_connect")
try:
info = await self._get_elgato_info(
user_input[CONF_HOST], user_input[CONF_PORT]
)
except ElgatoError:
return self.async_abort(reason="cannot_connect")
# Check if already configured
await self.async_set_unique_id(info.serial_number)
self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]})
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update(
{
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
CONF_SERIAL_NUMBER: info.serial_number,
"title_placeholders": {"serial_number": info.serial_number},
}
)
# Prepare configuration flow
return self._show_confirm_dialog()
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
async def async_step_zeroconf_confirm(
self, user_input: ConfigType = None
) -> Dict[str, Any]:
"""Handle a flow initiated by zeroconf."""
if user_input is None:
return self._show_confirm_dialog()
try:
info = await self._get_elgato_info(
self.context.get(CONF_HOST), self.context.get(CONF_PORT)
)
except ElgatoError:
return self.async_abort(reason="cannot_connect")
# Check if already configured
await self.async_set_unique_id(info.serial_number)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=self.context.get(CONF_SERIAL_NUMBER),
data={
CONF_HOST: self.context.get(CONF_HOST),
CONF_PORT: self.context.get(CONF_PORT),
CONF_SERIAL_NUMBER: self.context.get(CONF_SERIAL_NUMBER),
},
)
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the setup form to the user."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=9123): int,
}
),
errors=errors or {},
)
def _show_confirm_dialog(self) -> Dict[str, Any]:
"""Show the confirm dialog to the user."""
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
serial_number = self.context.get(CONF_SERIAL_NUMBER)
return self.async_show_form(
step_id="zeroconf_confirm",
description_placeholders={"serial_number": serial_number},
)
async def _get_elgato_info(self, host: str, port: int) -> Info:
"""Get device information from an Elgato Key Light device."""
session = async_get_clientsession(self.hass)
elgato = Elgato(
host,
port=port,
session=session,
)
return await elgato.info()
|
import logging
from pyhap.const import (
CATEGORY_FAUCET,
CATEGORY_OUTLET,
CATEGORY_SHOWER_HEAD,
CATEGORY_SPRINKLER,
CATEGORY_SWITCH,
)
from homeassistant.components.switch import DOMAIN
from homeassistant.components.vacuum import (
DOMAIN as VACUUM_DOMAIN,
SERVICE_RETURN_TO_BASE,
SERVICE_START,
STATE_CLEANING,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_TYPE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
from homeassistant.core import callback, split_entity_id
from homeassistant.helpers.event import call_later
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_ACTIVE,
CHAR_IN_USE,
CHAR_ON,
CHAR_OUTLET_IN_USE,
CHAR_VALVE_TYPE,
SERV_OUTLET,
SERV_SWITCH,
SERV_VALVE,
TYPE_FAUCET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_VALVE,
)
_LOGGER = logging.getLogger(__name__)
VALVE_TYPE = {
TYPE_FAUCET: (CATEGORY_FAUCET, 3),
TYPE_SHOWER: (CATEGORY_SHOWER_HEAD, 2),
TYPE_SPRINKLER: (CATEGORY_SPRINKLER, 1),
TYPE_VALVE: (CATEGORY_FAUCET, 0),
}
@TYPES.register("Outlet")
class Outlet(HomeAccessory):
"""Generate an Outlet accessory."""
def __init__(self, *args):
"""Initialize an Outlet accessory object."""
super().__init__(*args, category=CATEGORY_OUTLET)
state = self.hass.states.get(self.entity_id)
serv_outlet = self.add_preload_service(SERV_OUTLET)
self.char_on = serv_outlet.configure_char(
CHAR_ON, value=False, setter_callback=self.set_state
)
self.char_outlet_in_use = serv_outlet.configure_char(
CHAR_OUTLET_IN_USE, value=True
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = new_state.state == STATE_ON
if self.char_on.value is not current_state:
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("Switch")
class Switch(HomeAccessory):
"""Generate a Switch accessory."""
def __init__(self, *args):
"""Initialize a Switch accessory object."""
super().__init__(*args, category=CATEGORY_SWITCH)
self._domain = split_entity_id(self.entity_id)[0]
state = self.hass.states.get(self.entity_id)
self.activate_only = self.is_activate(self.hass.states.get(self.entity_id))
serv_switch = self.add_preload_service(SERV_SWITCH)
self.char_on = serv_switch.configure_char(
CHAR_ON, value=False, setter_callback=self.set_state
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def is_activate(self, state):
"""Check if entity is activate only."""
if self._domain == "scene":
return True
return False
def reset_switch(self, *args):
"""Reset switch to emulate activate click."""
_LOGGER.debug("%s: Reset switch to off", self.entity_id)
if self.char_on.value is not False:
self.char_on.set_value(False)
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
if self.activate_only and not value:
_LOGGER.debug("%s: Ignoring turn_off call", self.entity_id)
return
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.call_service(self._domain, service, params)
if self.activate_only:
call_later(self.hass, 1, self.reset_switch)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
self.activate_only = self.is_activate(new_state)
if self.activate_only:
_LOGGER.debug(
"%s: Ignore state change, entity is activate only", self.entity_id
)
return
current_state = new_state.state == STATE_ON
if self.char_on.value is not current_state:
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("DockVacuum")
class DockVacuum(Switch):
"""Generate a Switch accessory."""
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_START if value else SERVICE_RETURN_TO_BASE
self.call_service(VACUUM_DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = new_state.state in (STATE_CLEANING, STATE_ON)
if self.char_on.value is not current_state:
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("Valve")
class Valve(HomeAccessory):
"""Generate a Valve accessory."""
def __init__(self, *args):
"""Initialize a Valve accessory object."""
super().__init__(*args)
state = self.hass.states.get(self.entity_id)
valve_type = self.config[CONF_TYPE]
self.category = VALVE_TYPE[valve_type][0]
serv_valve = self.add_preload_service(SERV_VALVE)
self.char_active = serv_valve.configure_char(
CHAR_ACTIVE, value=False, setter_callback=self.set_state
)
self.char_in_use = serv_valve.configure_char(CHAR_IN_USE, value=False)
self.char_valve_type = serv_valve.configure_char(
CHAR_VALVE_TYPE, value=VALVE_TYPE[valve_type][1]
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def set_state(self, value):
"""Move value state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
self.char_in_use.set_value(value)
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = 1 if new_state.state == STATE_ON else 0
if self.char_active.value != current_state:
_LOGGER.debug("%s: Set active state to %s", self.entity_id, current_state)
self.char_active.set_value(current_state)
if self.char_in_use.value != current_state:
_LOGGER.debug("%s: Set in_use state to %s", self.entity_id, current_state)
self.char_in_use.set_value(current_state)
|
import os
import os.path as op
import shutil
import zipfile
from sys import stdout
import numpy as np
from ...channels import make_standard_montage
from ...epochs import EpochsArray
from ...io.meas_info import create_info
from ...utils import _fetch_file, _check_pandas_installed, verbose
from ..utils import _get_path, _do_path_update
# root url for LIMO files
root_url = 'https://files.de-1.osf.io/v1/resources/52rea/providers/osfstorage/'
# subject identifier
subject_ids = {'S1': '5cde823c8d6e050018595862',
'S2': '5cde825e23fec40017e0561a',
'S3': '5cf7eedee650a2001ad560f2',
'S4': '5cf7eee7d4c7d700193defcb',
'S5': '5cf7eeece650a20017d5b153',
'S6': '5cf8300fe650a20018d59cef',
'S7': '5cf83018a542b8001bc7c75f',
'S8': '5cf8301ea542b8001ac7cc47',
'S9': '5cf830243a4d9500178a692b',
'S10': '5cf83029e650a20017d600b1',
'S11': '5cf834bfa542b8001bc7cae0',
'S12': '5cf834c53a4d9500188a6311',
'S13': '5cf834caa542b8001cc8149b',
'S14': '5cf834cf3a4d9500178a6c6c',
'S15': '5cf834d63a4d9500168ae5d6',
'S16': '5cf834dbe650a20018d5a123',
'S17': '5cf834e23a4d9500198a911f',
'S18': '5cf834e73a4d9500198a9122'}
@verbose
def data_path(subject, path=None, force_update=False, update_path=None,
verbose=None):
"""Get path to local copy of LIMO dataset URL.
This is a low-level function useful for getting a local copy of the
remote LIMO dataset [1]_. The complete dataset is available at
datashare.is.ed.ac.uk/ [2]_.
Parameters
----------
subject : int
Subject to download. Must be of class ìnt in the range from 1 to 18.
path : None | str
Location of where to look for the LIMO data storing directory.
If None, the environment variable or config parameter
``MNE_DATASETS_LIMO_PATH`` is used. If it doesn't exist, the
"~/mne_data" directory is used. If the LIMO dataset
is not found under the given path, the data
will be automatically downloaded to the specified folder.
force_update : bool
Force update of the dataset even if a local copy exists.
update_path : bool | None
If True, set the MNE_DATASETS_LIMO_PATH in mne-python
config to the given path. If None, the user is prompted.
%(verbose)s
Returns
-------
path : str
Local path to the given data file.
Notes
-----
For example, one could do:
>>> from mne.datasets import limo
>>> limo.data_path(subject=1, path=os.getenv('HOME') + '/datasets') # doctest:+SKIP
This would download the LIMO data file to the 'datasets' folder,
and prompt the user to save the 'datasets' path to the mne-python config,
if it isn't there already.
References
----------
.. [1] Guillaume, Rousselet. (2016). LIMO EEG Dataset, [dataset].
University of Edinburgh, Centre for Clinical Brain Sciences.
https://doi.org/10.7488/ds/1556.
.. [2] https://datashare.is.ed.ac.uk/handle/10283/2189?show=full
""" # noqa: E501
# set destination path for download
key = 'MNE_DATASETS_LIMO_PATH'
name = 'LIMO'
path = _get_path(path, key, name)
limo_dir = op.join(path, 'MNE-limo-data')
subject_id = 'S%s' % subject
destination = op.join(limo_dir, '%s.zip') % subject_id
# url for subject in question
url = root_url + subject_ids[subject_id] + '/?zip='
# check if LIMO directory exists; update if desired
if not op.isdir(limo_dir) or force_update:
if op.isdir(limo_dir):
shutil.rmtree(limo_dir)
if not op.isdir(limo_dir):
os.makedirs(limo_dir)
# check if subject in question exists
if not op.isdir(op.join(limo_dir, subject_id)):
os.makedirs(op.join(limo_dir, subject_id))
_fetch_file(url, destination, print_destination=False)
# check if download is a zip-folder
if any(group.endswith(".zip") for group in op.splitext(destination)):
if not op.isdir(op.join(limo_dir, subject_id)):
os.makedirs(op.join(limo_dir, subject_id))
with zipfile.ZipFile(destination) as z1:
files = [op.join(limo_dir, file) for file in z1.namelist()]
stdout.write('Decompressing %g files from\n'
'"%s" ...' % (len(files), destination))
z1.extractall(op.join(limo_dir, subject_id))
stdout.write(' [done]\n')
z1.close()
os.remove(destination)
# update path if desired
_do_path_update(path, update_path, key, name)
return limo_dir
@verbose
def load_data(subject, path=None, force_update=False, update_path=None,
verbose=None):
"""Fetch subjects epochs data for the LIMO data set.
Parameters
----------
subject : int
Subject to use. Must be of class ìnt in the range from 1 to 18.
path : str
Location of where to look for the LIMO data.
If None, the environment variable or config parameter
``MNE_DATASETS_LIMO_PATH`` is used. If it doesn't exist, the
"~/mne_data" directory is used.
force_update : bool
Force update of the dataset even if a local copy exists.
update_path : bool | None
If True, set the MNE_DATASETS_LIMO_PATH in mne-python
config to the given path. If None, the user is prompted.
%(verbose)s
Returns
-------
epochs : instance of Epochs
The epochs.
""" # noqa: E501
pd = _check_pandas_installed()
from scipy.io import loadmat
# subject in question
if isinstance(subject, int) and 1 <= subject <= 18:
subj = 'S%i' % subject
else:
raise ValueError('subject must be an int in the range from 1 to 18')
# set limo path, download and decompress files if not found
limo_path = data_path(subject, path, force_update, update_path)
# -- 1) import .mat files
# epochs info
fname_info = op.join(limo_path, subj, 'LIMO.mat')
data_info = loadmat(fname_info)
# number of epochs per condition
design = data_info['LIMO']['design'][0][0]['X'][0][0]
data_info = data_info['LIMO']['data'][0][0][0][0]
# epochs data
fname_eeg = op.join(limo_path, subj, 'Yr.mat')
data = loadmat(fname_eeg)
# -- 2) get epochs information from structure
# sampling rate
sfreq = data_info['sampling_rate'][0][0]
# tmin and tmax
tmin = data_info['start'][0][0]
# create events matrix
sample = np.arange(len(design))
prev_id = np.zeros(len(design))
ev_id = design[:, 1]
events = np.array([sample, prev_id, ev_id]).astype(int).T
# event ids, such that Face B == 1
event_id = {'Face/A': 0, 'Face/B': 1}
# -- 3) extract channel labels from LIMO structure
# get individual labels
labels = data_info['chanlocs']['labels']
labels = [label for label, *_ in labels[0]]
# get montage
montage = make_standard_montage('biosemi128')
# add external electrodes (e.g., eogs)
ch_names = montage.ch_names + ['EXG1', 'EXG2', 'EXG3', 'EXG4']
# match individual labels to labels in montage
found_inds = [ind for ind, name in enumerate(ch_names) if name in labels]
missing_chans = [name for name in ch_names if name not in labels]
assert labels == [ch_names[ind] for ind in found_inds]
# -- 4) extract data from subjects Yr structure
# data is stored as channels x time points x epochs
# data['Yr'].shape # <-- see here
# transpose to epochs x channels time points
data = np.transpose(data['Yr'], (2, 0, 1))
# initialize data in expected order
temp_data = np.empty((data.shape[0], len(ch_names), data.shape[2]))
# copy over the non-missing data
for source, target in enumerate(found_inds):
# avoid copy when fancy indexing
temp_data[:, target, :] = data[:, source, :]
# data to V (to match MNE's format)
data = temp_data / 1e6
# create list containing channel types
types = ["eog" if ch.startswith("EXG") else "eeg" for ch in ch_names]
# -- 5) Create custom info for mne epochs structure
# create info
info = create_info(ch_names, sfreq, types).set_montage(montage)
# get faces and noise variables from design matrix
event_list = list(events[:, 2])
faces = ['B' if event else 'A' for event in event_list]
noise = list(design[:, 2])
# create epochs metadata
metadata = {'face': faces, 'phase-coherence': noise}
metadata = pd.DataFrame(metadata)
# -- 6) Create custom epochs array
epochs = EpochsArray(data, info, events, tmin, event_id, metadata=metadata)
epochs.info['bads'] = missing_chans # missing channels are marked as bad.
return epochs
|
import sys
from django.core.management.base import BaseCommand
from django.utils.encoding import smart_str
from zinnia.models.entry import Entry
from zinnia.signals import disconnect_entry_signals
class Command(BaseCommand):
"""
Command for re-counting the discussions on entries
in case of problems.
"""
help = 'Refresh all the discussion counts on entries'
def write_out(self, message, verbosity_level=1):
"""
Convenient method for outputing.
"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush()
def handle(self, *args, **options):
disconnect_entry_signals()
self.verbosity = int(options.get('verbosity', 1))
for entry in Entry.objects.all():
self.write_out('Processing %s\n' % entry.title)
changed = False
comment_count = entry.comments.count()
pingback_count = entry.pingbacks.count()
trackback_count = entry.trackbacks.count()
if entry.comment_count != comment_count:
changed = True
self.write_out('- %s comments found, %s before\n' % (
comment_count, entry.comment_count))
entry.comment_count = comment_count
if entry.pingback_count != pingback_count:
changed = True
self.write_out('- %s pingbacks found, %s before\n' % (
pingback_count, entry.pingback_count))
entry.pingback_count = pingback_count
if entry.trackback_count != trackback_count:
changed = True
self.write_out('- %s trackbacks found, %s before\n' % (
trackback_count, entry.trackback_count))
entry.trackback_count = trackback_count
if changed:
self.write_out('- Updating...\n')
entry.save()
|
from roomba import RoombaConnectionError
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.roomba.const import (
CONF_BLID,
CONF_CONTINUOUS,
CONF_DELAY,
DOMAIN,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from tests.async_mock import MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
VALID_CONFIG = {CONF_HOST: "1.2.3.4", CONF_BLID: "blid", CONF_PASSWORD: "password"}
VALID_YAML_CONFIG = {
CONF_HOST: "1.2.3.4",
CONF_BLID: "blid",
CONF_PASSWORD: "password",
CONF_CONTINUOUS: True,
CONF_DELAY: 1,
}
def _create_mocked_roomba(
roomba_connected=None, master_state=None, connect=None, disconnect=None
):
mocked_roomba = MagicMock()
type(mocked_roomba).roomba_connected = PropertyMock(return_value=roomba_connected)
type(mocked_roomba).master_state = PropertyMock(return_value=master_state)
type(mocked_roomba).connect = MagicMock(side_effect=connect)
type(mocked_roomba).disconnect = MagicMock(side_effect=disconnect)
return mocked_roomba
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
mocked_roomba = _create_mocked_roomba(
roomba_connected=True,
master_state={"state": {"reported": {"name": "myroomba"}}},
)
with patch(
"homeassistant.components.roomba.config_flow.Roomba",
return_value=mocked_roomba,
), patch(
"homeassistant.components.roomba.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.roomba.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
VALID_CONFIG,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "myroomba"
assert result2["result"].unique_id == "blid"
assert result2["data"] == {
CONF_BLID: "blid",
CONF_CONTINUOUS: True,
CONF_DELAY: 1,
CONF_HOST: "1.2.3.4",
CONF_PASSWORD: "password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_roomba = _create_mocked_roomba(
connect=RoombaConnectionError,
roomba_connected=True,
master_state={"state": {"reported": {"name": "myroomba"}}},
)
with patch(
"homeassistant.components.roomba.config_flow.Roomba",
return_value=mocked_roomba,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
VALID_CONFIG,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_import(hass):
"""Test we can import yaml config."""
mocked_roomba = _create_mocked_roomba(
roomba_connected=True,
master_state={"state": {"reported": {"name": "imported_roomba"}}},
)
with patch(
"homeassistant.components.roomba.config_flow.Roomba",
return_value=mocked_roomba,
), patch(
"homeassistant.components.roomba.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.roomba.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=VALID_YAML_CONFIG.copy(),
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == "blid"
assert result["title"] == "imported_roomba"
assert result["data"] == {
CONF_BLID: "blid",
CONF_CONTINUOUS: True,
CONF_DELAY: 1,
CONF_HOST: "1.2.3.4",
CONF_PASSWORD: "password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import_dupe(hass):
"""Test we get abort on duplicate import."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN, data=VALID_CONFIG, unique_id="blid")
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=VALID_YAML_CONFIG.copy(),
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
|
import numpy as np
from .unit import Unit
class MatchingHistogram(Unit):
"""
MatchingHistogramUnit Class.
:param bin_size: The number of bins of the matching histogram.
:param embedding_matrix: The word embedding matrix applied to calculate
the matching histogram.
:param normalize: Boolean, normalize the embedding or not.
:param mode: The type of the historgram, it should be one of 'CH', 'NG',
or 'LCH'.
Examples:
>>> embedding_matrix = np.array([[1.0, -1.0], [1.0, 2.0], [1.0, 3.0]])
>>> text_left = [0, 1]
>>> text_right = [1, 2]
>>> histogram = MatchingHistogram(3, embedding_matrix, True, 'CH')
>>> histogram.transform([text_left, text_right])
[[3.0, 1.0, 1.0], [1.0, 2.0, 2.0]]
"""
def __init__(self, bin_size: int = 30, embedding_matrix=None,
normalize=True, mode: str = 'LCH'):
"""The constructor."""
self._hist_bin_size = bin_size
self._embedding_matrix = embedding_matrix
if normalize:
self._normalize_embedding()
self._mode = mode
def _normalize_embedding(self):
"""Normalize the embedding matrix."""
l2_norm = np.sqrt(
(self._embedding_matrix * self._embedding_matrix).sum(axis=1)
)
self._embedding_matrix = \
self._embedding_matrix / l2_norm[:, np.newaxis]
def transform(self, input_: list) -> list:
"""Transform the input text."""
text_left, text_right = input_
matching_hist = np.ones((len(text_left), self._hist_bin_size),
dtype=np.float32)
embed_left = self._embedding_matrix[text_left]
embed_right = self._embedding_matrix[text_right]
matching_matrix = embed_left.dot(np.transpose(embed_right))
for (i, j), value in np.ndenumerate(matching_matrix):
bin_index = int((value + 1.) / 2. * (self._hist_bin_size - 1.))
matching_hist[i][bin_index] += 1.0
if self._mode == 'NH':
matching_sum = matching_hist.sum(axis=1)
matching_hist = matching_hist / matching_sum[:, np.newaxis]
elif self._mode == 'LCH':
matching_hist = np.log(matching_hist)
return matching_hist.tolist()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from compare_gan.architectures import abstract_arch
from compare_gan.architectures import arch_ops as ops
from six.moves import range
import tensorflow as tf
def unpool(value, name="unpool"):
"""Unpooling operation.
N-dimensional version of the unpooling operation from
https://www.robots.ox.ac.uk/~vgg/rg/papers/Dosovitskiy_Learning_to_Generate_2015_CVPR_paper.pdf
Taken from: https://github.com/tensorflow/tensorflow/issues/2169
Args:
value: a Tensor of shape [b, d0, d1, ..., dn, ch]
name: name of the op
Returns:
A Tensor of shape [b, 2*d0, 2*d1, ..., 2*dn, ch]
"""
with tf.name_scope(name) as scope:
sh = value.get_shape().as_list()
dim = len(sh[1:-1])
out = (tf.reshape(value, [-1] + sh[-dim:]))
for i in range(dim, 0, -1):
out = tf.concat([out, tf.zeros_like(out)], i)
out_size = [-1] + [s * 2 for s in sh[1:-1]] + [sh[-1]]
out = tf.reshape(out, out_size, name=scope)
return out
def validate_image_inputs(inputs, validate_power2=True):
inputs.get_shape().assert_has_rank(4)
inputs.get_shape()[1:3].assert_is_fully_defined()
if inputs.get_shape()[1] != inputs.get_shape()[2]:
raise ValueError("Input tensor does not have equal width and height: ",
inputs.get_shape()[1:3])
width = inputs.get_shape().as_list()[1]
if validate_power2 and math.log(width, 2) != int(math.log(width, 2)):
raise ValueError("Input tensor `width` is not a power of 2: ", width)
class ResNetBlock(object):
"""ResNet block with options for various normalizations."""
def __init__(self,
name,
in_channels,
out_channels,
scale,
is_gen_block,
layer_norm=False,
spectral_norm=False,
batch_norm=None):
"""Constructs a new ResNet block.
Args:
name: Scope name for the resent block.
in_channels: Integer, the input channel size.
out_channels: Integer, the output channel size.
scale: Whether or not to scale up or down, choose from "up", "down" or
"none".
is_gen_block: Boolean, deciding whether this is a generator or
discriminator block.
layer_norm: Apply layer norm before both convolutions.
spectral_norm: Use spectral normalization for all weights.
batch_norm: Function for batch normalization.
"""
assert scale in ["up", "down", "none"]
self._name = name
self._in_channels = in_channels
self._out_channels = out_channels
self._scale = scale
# In SN paper, if they upscale in generator they do this in the first conv.
# For discriminator downsampling happens after second conv.
self._scale1 = scale if is_gen_block else "none"
self._scale2 = "none" if is_gen_block else scale
self._layer_norm = layer_norm
self._spectral_norm = spectral_norm
self.batch_norm = batch_norm
def __call__(self, inputs, z, y, is_training):
return self.apply(inputs=inputs, z=z, y=y, is_training=is_training)
def _get_conv(self, inputs, in_channels, out_channels, scale, suffix,
kernel_size=(3, 3), strides=(1, 1)):
"""Performs a convolution in the ResNet block."""
if inputs.get_shape().as_list()[-1] != in_channels:
raise ValueError("Unexpected number of input channels.")
if scale not in ["up", "down", "none"]:
raise ValueError(
"Scale: got {}, expected 'up', 'down', or 'none'.".format(scale))
outputs = inputs
if scale == "up":
outputs = unpool(outputs)
outputs = ops.conv2d(
outputs,
output_dim=out_channels,
k_h=kernel_size[0], k_w=kernel_size[1],
d_h=strides[0], d_w=strides[1],
use_sn=self._spectral_norm,
name="{}_{}".format("same" if scale == "none" else scale, suffix))
if scale == "down":
outputs = tf.nn.pool(outputs, [2, 2], "AVG", "SAME", strides=[2, 2],
name="pool_%s" % suffix)
return outputs
def apply(self, inputs, z, y, is_training):
""""ResNet block containing possible down/up sampling, shared for G / D.
Args:
inputs: a 3d input tensor of feature map.
z: the latent vector for potential self-modulation. Can be None if use_sbn
is set to False.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, whether or notthis is called during the training.
Returns:
output: a 3d output tensor of feature map.
"""
if inputs.get_shape().as_list()[-1] != self._in_channels:
raise ValueError("Unexpected number of input channels.")
with tf.variable_scope(self._name, values=[inputs]):
output = inputs
shortcut = self._get_conv(
output, self._in_channels, self._out_channels, self._scale,
suffix="conv_shortcut")
output = self.batch_norm(
output, z=z, y=y, is_training=is_training, name="bn1")
if self._layer_norm:
output = ops.layer_norm(output, is_training=is_training, scope="ln1")
output = tf.nn.relu(output)
output = self._get_conv(
output, self._in_channels, self._out_channels, self._scale1,
suffix="conv1")
output = self.batch_norm(
output, z=z, y=y, is_training=is_training, name="bn2")
if self._layer_norm:
output = ops.layer_norm(output, is_training=is_training, scope="ln2")
output = tf.nn.relu(output)
output = self._get_conv(
output, self._out_channels, self._out_channels, self._scale2,
suffix="conv2")
# Combine skip-connection with the convolved part.
output += shortcut
return output
class ResNetGenerator(abstract_arch.AbstractGenerator):
"""Abstract base class for generators based on the ResNet architecture."""
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["up", "none"]:
raise ValueError(
"Unknown generator ResNet block scaling: {}.".format(scale))
return ResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
is_gen_block=True,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
class ResNetDiscriminator(abstract_arch.AbstractDiscriminator):
"""Abstract base class for discriminators based on the ResNet architecture."""
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["down", "none"]:
raise ValueError(
"Unknown discriminator ResNet block scaling: {}.".format(scale))
return ResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
is_gen_block=False,
layer_norm=self._layer_norm,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
|
import pytest
from molecule.command import idempotence
@pytest.fixture
def _patched_is_idempotent(mocker):
return mocker.patch(
'molecule.command.idempotence.Idempotence._is_idempotent')
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
config_instance.state.change_state('converged', True)
return idempotence.Idempotence(config_instance)
def test_execute(mocker, patched_logger_info, patched_ansible_converge,
_patched_is_idempotent, patched_logger_success, _instance):
_instance.execute()
x = [
mocker.call("Scenario: 'default'"),
mocker.call("Action: 'idempotence'"),
]
assert x == patched_logger_info.mock_calls
patched_ansible_converge.assert_called_once_with(out=None, err=None)
_patched_is_idempotent.assert_called_once_with(
'patched-ansible-converge-stdout')
msg = 'Idempotence completed successfully.'
patched_logger_success.assert_called_once_with(msg)
def test_execute_raises_when_not_converged(
patched_logger_critical, patched_ansible_converge, _instance):
_instance._config.state.change_state('converged', False)
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
msg = 'Instances not converged. Please converge instances first.'
patched_logger_critical.assert_called_once_with(msg)
def test_execute_raises_when_fails_idempotence(
mocker, patched_logger_critical, patched_ansible_converge,
_patched_is_idempotent, _instance):
_patched_is_idempotent.return_value = False
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
msg = 'Idempotence test failed because of the following tasks:\n'
patched_logger_critical.assert_called_once_with(msg)
def test_is_idempotent(_instance):
output = """
PLAY RECAP ***********************************************************
check-command-01: ok=3 changed=0 unreachable=0 failed=0
"""
assert _instance._is_idempotent(output)
def test_is_idempotent_not_idempotent(_instance):
output = """
PLAY RECAP ***********************************************************
check-command-01: ok=2 changed=1 unreachable=0 failed=0
check-command-02: ok=2 changed=1 unreachable=0 failed=0
"""
assert not _instance._is_idempotent(output)
def test_non_idempotent_tasks_idempotent(_instance):
output = """
PLAY [all] ***********************************************************
GATHERING FACTS ******************************************************
ok: [check-command-01]
TASK: [Idempotence test] *********************************************
ok: [check-command-01]
PLAY RECAP ***********************************************************
check-command-01: ok=3 changed=0 unreachable=0 failed=0
"""
result = _instance._non_idempotent_tasks(output)
assert result == []
def test_non_idempotent_tasks_not_idempotent(_instance):
output = """
PLAY [all] ***********************************************************
GATHERING FACTS ******************************************************
ok: [check-command-01]
ok: [check-command-02]
TASK: [Idempotence test] *********************************************
changed: [check-command-01]
changed: [check-command-02]
PLAY RECAP ***********************************************************
check-command-01: ok=2 changed=1 unreachable=0 failed=0
check-command-02: ok=2 changed=1 unreachable=0 failed=0
"""
result = _instance._non_idempotent_tasks(output)
assert result == [
'* [check-command-01] => Idempotence test',
'* [check-command-02] => Idempotence test'
]
|
import aiohttp
from sisyphus_control import Track
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SHUFFLE_SET,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.exceptions import PlatformNotReady
from . import DATA_SISYPHUS
MEDIA_TYPE_TRACK = "sisyphus_track"
SUPPORTED_FEATURES = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_PAUSE
| SUPPORT_SHUFFLE_SET
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_PLAY
)
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a media player entity for a Sisyphus table."""
host = discovery_info[CONF_HOST]
try:
table_holder = hass.data[DATA_SISYPHUS][host]
table = await table_holder.get_table()
except aiohttp.ClientError as err:
raise PlatformNotReady() from err
add_entities([SisyphusPlayer(table_holder.name, host, table)], True)
class SisyphusPlayer(MediaPlayerEntity):
"""Representation of a Sisyphus table as a media player device."""
def __init__(self, name, host, table):
"""Initialize the Sisyphus media device."""
self._name = name
self._host = host
self._table = table
async def async_added_to_hass(self):
"""Add listeners after this object has been initialized."""
self._table.add_listener(self.async_write_ha_state)
@property
def unique_id(self):
"""Return the UUID of the table."""
return self._table.id
@property
def available(self):
"""Return true if the table is responding to heartbeats."""
return self._table.is_connected
@property
def name(self):
"""Return the name of the table."""
return self._name
@property
def state(self):
"""Return the current state of the table; sleeping maps to off."""
if self._table.state in ["homing", "playing"]:
return STATE_PLAYING
if self._table.state == "paused":
if self._table.is_sleeping:
return STATE_OFF
return STATE_PAUSED
if self._table.state == "waiting":
return STATE_IDLE
return None
@property
def volume_level(self):
"""Return the current playback speed (0..1)."""
return self._table.speed
@property
def shuffle(self):
"""Return True if the current playlist is in shuffle mode."""
return self._table.is_shuffle
async def async_set_shuffle(self, shuffle):
"""Change the shuffle mode of the current playlist."""
await self._table.set_shuffle(shuffle)
@property
def media_playlist(self):
"""Return the name of the current playlist."""
return self._table.active_playlist.name if self._table.active_playlist else None
@property
def media_title(self):
"""Return the title of the current track."""
return self._table.active_track.name if self._table.active_track else None
@property
def media_content_type(self):
"""Return the content type currently playing; i.e. a Sisyphus track."""
return MEDIA_TYPE_TRACK
@property
def media_content_id(self):
"""Return the track ID of the current track."""
return self._table.active_track.id if self._table.active_track else None
@property
def supported_features(self):
"""Return the features supported by this table."""
return SUPPORTED_FEATURES
@property
def media_image_url(self):
"""Return the URL for a thumbnail image of the current track."""
if self._table.active_track:
return self._table.active_track.get_thumbnail_url(Track.ThumbnailSize.LARGE)
return super.media_image_url()
async def async_turn_on(self):
"""Wake up a sleeping table."""
await self._table.wakeup()
async def async_turn_off(self):
"""Put the table to sleep."""
await self._table.sleep()
async def async_volume_down(self):
"""Slow down playback."""
await self._table.set_speed(max(0, self._table.speed - 0.1))
async def async_volume_up(self):
"""Speed up playback."""
await self._table.set_speed(min(1.0, self._table.speed + 0.1))
async def async_set_volume_level(self, volume):
"""Set playback speed (0..1)."""
await self._table.set_speed(volume)
async def async_media_play(self):
"""Start playing."""
await self._table.play()
async def async_media_pause(self):
"""Pause."""
await self._table.pause()
async def async_media_next_track(self):
"""Skip to next track."""
cur_track_index = self._get_current_track_index()
await self._table.active_playlist.play(
self._table.active_playlist.tracks[cur_track_index + 1]
)
async def async_media_previous_track(self):
"""Skip to previous track."""
cur_track_index = self._get_current_track_index()
await self._table.active_playlist.play(
self._table.active_playlist.tracks[cur_track_index - 1]
)
def _get_current_track_index(self):
for index, track in enumerate(self._table.active_playlist.tracks):
if track.id == self._table.active_track.id:
return index
return -1
|
import logging
from orvibo.s20 import S20, S20Exception, discover
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_DISCOVERY,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_SWITCHES,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Orvibo S20 Switch"
DEFAULT_DISCOVERY = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SWITCHES, default=[]): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
],
),
vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean,
}
)
def setup_platform(hass, config, add_entities_callback, discovery_info=None):
"""Set up S20 switches."""
switch_data = {}
switches = []
switch_conf = config.get(CONF_SWITCHES, [config])
if config.get(CONF_DISCOVERY):
_LOGGER.info("Discovering S20 switches ...")
switch_data.update(discover())
for switch in switch_conf:
switch_data[switch.get(CONF_HOST)] = switch
for host, data in switch_data.items():
try:
switches.append(
S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC)))
)
_LOGGER.info("Initialized S20 at %s", host)
except S20Exception:
_LOGGER.error("S20 at %s couldn't be initialized", host)
add_entities_callback(switches)
class S20Switch(SwitchEntity):
"""Representation of an S20 switch."""
def __init__(self, name, s20):
"""Initialize the S20 device."""
self._name = name
self._s20 = s20
self._state = False
self._exc = S20Exception
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def update(self):
"""Update device state."""
try:
self._state = self._s20.on
except self._exc:
_LOGGER.exception("Error while fetching S20 state")
def turn_on(self, **kwargs):
"""Turn the device on."""
try:
self._s20.on = True
except self._exc:
_LOGGER.exception("Error while turning on S20")
def turn_off(self, **kwargs):
"""Turn the device off."""
try:
self._s20.on = False
except self._exc:
_LOGGER.exception("Error while turning off S20")
|
from copy import deepcopy
from os import makedirs
import os.path as op
import re
from shutil import copy
import numpy as np
import pytest
from numpy.testing import assert_equal, assert_allclose
from mne import (make_bem_model, read_bem_surfaces, write_bem_surfaces,
make_bem_solution, read_bem_solution, write_bem_solution,
make_sphere_model, Transform, Info, write_surface)
from mne.preprocessing.maxfilter import fit_sphere_to_headshape
from mne.io.constants import FIFF
from mne.transforms import translation
from mne.datasets import testing
from mne.utils import (run_tests_if_main, catch_logging, requires_h5py)
from mne.bem import (_ico_downsample, _get_ico_map, _order_surfaces,
_assert_complete_surface, _assert_inside,
_check_surface_size, _bem_find_surface)
from mne.surface import read_surface
from mne.io import read_info
fname_raw = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data',
'test_raw.fif')
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
fname_bem_3 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-320-320-bem.fif')
fname_bem_1 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-bem.fif')
fname_bem_sol_3 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-320-320-bem-sol.fif')
fname_bem_sol_1 = op.join(subjects_dir, 'sample', 'bem',
'sample-320-bem-sol.fif')
def _compare_bem_surfaces(surfs_1, surfs_2):
"""Compare BEM surfaces."""
names = ['id', 'nn', 'rr', 'coord_frame', 'tris', 'sigma', 'ntri', 'np']
ignores = ['tri_cent', 'tri_nn', 'tri_area', 'neighbor_tri']
for s0, s1 in zip(surfs_1, surfs_2):
assert_equal(set(names), set(s0.keys()) - set(ignores))
assert_equal(set(names), set(s1.keys()) - set(ignores))
for name in names:
assert_allclose(s0[name], s1[name], rtol=1e-3, atol=1e-6,
err_msg='Mismatch: "%s"' % name)
def _compare_bem_solutions(sol_a, sol_b):
"""Compare BEM solutions."""
# compare the surfaces we used
_compare_bem_surfaces(sol_a['surfs'], sol_b['surfs'])
# compare the actual solutions
names = ['bem_method', 'field_mult', 'gamma', 'is_sphere',
'nsol', 'sigma', 'source_mult', 'solution']
assert_equal(set(sol_a.keys()), set(sol_b.keys()))
assert_equal(set(names + ['surfs']), set(sol_b.keys()))
for key in names:
assert_allclose(sol_a[key], sol_b[key], rtol=1e-3, atol=1e-5,
err_msg='Mismatch: %s' % key)
@testing.requires_testing_data
@requires_h5py
@pytest.mark.parametrize('ext', ('fif', 'h5'))
def test_io_bem(tmpdir, ext):
"""Test reading and writing of bem surfaces and solutions."""
import h5py
temp_bem = op.join(str(tmpdir), f'temp-bem.{ext}')
# model
with pytest.raises(ValueError, match='BEM data not found'):
read_bem_surfaces(fname_raw)
with pytest.raises(ValueError, match='surface with id 10'):
read_bem_surfaces(fname_bem_3, s_id=10)
surf = read_bem_surfaces(fname_bem_3, patch_stats=True)
surf = read_bem_surfaces(fname_bem_3, patch_stats=False)
write_bem_surfaces(temp_bem, surf[0])
with pytest.raises(IOError, match='exists'):
write_bem_surfaces(temp_bem, surf[0])
write_bem_surfaces(temp_bem, surf[0], overwrite=True)
if ext == 'h5':
with h5py.File(temp_bem, 'r'): # make sure it's valid
pass
surf_read = read_bem_surfaces(temp_bem, patch_stats=False)
_compare_bem_surfaces(surf, surf_read)
# solution
with pytest.raises(RuntimeError, match='No BEM solution found'):
read_bem_solution(fname_bem_3)
temp_sol = op.join(str(tmpdir), f'temp-sol.{ext}')
sol = read_bem_solution(fname_bem_sol_3)
assert 'BEM' in repr(sol)
write_bem_solution(temp_sol, sol)
sol_read = read_bem_solution(temp_sol)
_compare_bem_solutions(sol, sol_read)
sol = read_bem_solution(fname_bem_sol_1)
with pytest.raises(RuntimeError, match='BEM model does not have'):
_bem_find_surface(sol, 3)
def test_make_sphere_model():
"""Test making a sphere model."""
info = read_info(fname_raw)
pytest.raises(ValueError, make_sphere_model, 'foo', 'auto', info)
pytest.raises(ValueError, make_sphere_model, 'auto', 'auto', None)
pytest.raises(ValueError, make_sphere_model, 'auto', 'auto', info,
relative_radii=(), sigmas=())
with pytest.raises(ValueError, match='relative_radii.*must match.*sigmas'):
make_sphere_model('auto', 'auto', info, relative_radii=(1,))
# here we just make sure it works -- the functionality is actually
# tested more extensively e.g. in the forward and dipole code
with catch_logging() as log:
bem = make_sphere_model('auto', 'auto', info, verbose=True)
log = log.getvalue()
assert ' RV = ' in log
for line in log.split('\n'):
if ' RV = ' in line:
val = float(line.split()[-2])
assert val < 0.01 # actually decent fitting
break
assert '3 layers' in repr(bem)
assert 'Sphere ' in repr(bem)
assert ' mm' in repr(bem)
bem = make_sphere_model('auto', None, info)
assert 'no layers' in repr(bem)
assert 'Sphere ' in repr(bem)
with pytest.raises(ValueError, match='at least 2 sigmas.*head_radius'):
make_sphere_model(sigmas=(0.33,), relative_radii=(1.0,))
@testing.requires_testing_data
@pytest.mark.parametrize('kwargs, fname', [
[dict(), fname_bem_3],
[dict(conductivity=[0.3]), fname_bem_1],
])
def test_make_bem_model(tmpdir, kwargs, fname):
"""Test BEM model creation from Python with I/O."""
fname_temp = tmpdir.join('temp-bem.fif')
with catch_logging() as log:
model = make_bem_model('sample', ico=2, subjects_dir=subjects_dir,
verbose=True, **kwargs)
log = log.getvalue()
if len(kwargs.get('conductivity', (0, 0, 0))) == 1:
assert 'distance' not in log
else:
assert re.search(r'urfaces is approximately *3\.4 mm', log) is not None
assert re.search(r'inner skull CM is *0\.65 *-9\.62 *43\.85 mm',
log) is not None
model_c = read_bem_surfaces(fname)
_compare_bem_surfaces(model, model_c)
write_bem_surfaces(fname_temp, model)
model_read = read_bem_surfaces(fname_temp)
_compare_bem_surfaces(model, model_c)
_compare_bem_surfaces(model_read, model_c)
# bad conductivity
with pytest.raises(ValueError, match='conductivity must be'):
make_bem_model('sample', 4, [0.3, 0.006], subjects_dir=subjects_dir)
@testing.requires_testing_data
def test_bem_model_topology(tmpdir):
"""Test BEM model topological checks."""
# bad topology (not enough neighboring tris)
makedirs(tmpdir.join('foo', 'bem'))
for fname in ('inner_skull', 'outer_skull', 'outer_skin'):
fname += '.surf'
copy(op.join(subjects_dir, 'sample', 'bem', fname),
str(tmpdir.join('foo', 'bem', fname)))
outer_fname = tmpdir.join('foo', 'bem', 'outer_skull.surf')
rr, tris = read_surface(outer_fname)
tris = tris[:-1]
write_surface(outer_fname, rr, tris[:-1], overwrite=True)
with pytest.raises(RuntimeError, match='Surface outer skull is not compl'):
make_bem_model('foo', None, subjects_dir=tmpdir)
# Now get past this error to reach gh-6127 (not enough neighbor tris)
rr_bad = np.concatenate([rr, np.mean(rr, axis=0, keepdims=True)], axis=0)
write_surface(outer_fname, rr_bad, tris, overwrite=True)
with pytest.raises(RuntimeError, match='Surface outer skull.*triangles'):
make_bem_model('foo', None, subjects_dir=tmpdir)
@pytest.mark.slowtest
@testing.requires_testing_data
@pytest.mark.parametrize('cond, fname', [
[(0.3,), fname_bem_sol_1],
[(0.3, 0.006, 0.3), fname_bem_sol_3],
])
def test_bem_solution(tmpdir, cond, fname):
"""Test making a BEM solution from Python with I/O."""
# test degenerate conditions
surf = read_bem_surfaces(fname_bem_1)[0]
pytest.raises(RuntimeError, _ico_downsample, surf, 10) # bad dec grade
s_bad = dict(tris=surf['tris'][1:], ntri=surf['ntri'] - 1, rr=surf['rr'])
pytest.raises(RuntimeError, _ico_downsample, s_bad, 1) # not isomorphic
s_bad = dict(tris=surf['tris'].copy(), ntri=surf['ntri'],
rr=surf['rr']) # bad triangulation
s_bad['tris'][0] = [0, 0, 0]
pytest.raises(RuntimeError, _ico_downsample, s_bad, 1)
s_bad['id'] = 1
pytest.raises(RuntimeError, _assert_complete_surface, s_bad)
s_bad = dict(tris=surf['tris'], ntri=surf['ntri'], rr=surf['rr'].copy())
s_bad['rr'][0] = 0.
pytest.raises(RuntimeError, _get_ico_map, surf, s_bad)
surfs = read_bem_surfaces(fname_bem_3)
pytest.raises(RuntimeError, _assert_inside, surfs[0], surfs[1]) # outside
surfs[0]['id'] = 100 # bad surfs
pytest.raises(RuntimeError, _order_surfaces, surfs)
surfs[1]['rr'] /= 1000.
pytest.raises(RuntimeError, _check_surface_size, surfs[1])
# actually test functionality
fname_temp = op.join(str(tmpdir), 'temp-bem-sol.fif')
# use a model and solution made in Python
for model_type in ('python', 'c'):
if model_type == 'python':
model = make_bem_model('sample', conductivity=cond, ico=2,
subjects_dir=subjects_dir)
else:
model = fname_bem_1 if len(cond) == 1 else fname_bem_3
solution = make_bem_solution(model, verbose=True)
solution_c = read_bem_solution(fname)
_compare_bem_solutions(solution, solution_c)
write_bem_solution(fname_temp, solution)
solution_read = read_bem_solution(fname_temp)
_compare_bem_solutions(solution, solution_c)
_compare_bem_solutions(solution_read, solution_c)
def test_fit_sphere_to_headshape():
"""Test fitting a sphere to digitization points."""
# Create points of various kinds
rad = 0.09
big_rad = 0.12
center = np.array([0.0005, -0.01, 0.04])
dev_trans = np.array([0., -0.005, -0.01])
dev_center = center - dev_trans
dig = [
# Left auricular
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_LPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([-1.0, 0.0, 0.0])},
# Nasion
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_NASION,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([0.0, 1.0, 0.0])},
# Right auricular
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_RPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': np.array([1.0, 0.0, 0.0])},
# Top of the head (extra point)
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EXTRA,
'ident': 0,
'r': np.array([0.0, 0.0, 1.0])},
# EEG points
# Fz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'ident': 0,
'r': np.array([0, .72, .69])},
# F3
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'ident': 1,
'r': np.array([-.55, .67, .50])},
# F4
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'ident': 2,
'r': np.array([.55, .67, .50])},
# Cz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'ident': 3,
'r': np.array([0.0, 0.0, 1.0])},
# Pz
{'coord_frame': FIFF.FIFFV_COORD_HEAD,
'kind': FIFF.FIFFV_POINT_EEG,
'ident': 4,
'r': np.array([0, -.72, .69])},
]
for d in dig:
d['r'] *= rad
d['r'] += center
# Device to head transformation (rotate .2 rad over X-axis)
dev_head_t = Transform('meg', 'head', translation(*(dev_trans)))
info = Info(dig=dig, dev_head_t=dev_head_t)
# Degenerate conditions
pytest.raises(ValueError, fit_sphere_to_headshape, info,
dig_kinds=(FIFF.FIFFV_POINT_HPI,))
pytest.raises(ValueError, fit_sphere_to_headshape, info,
dig_kinds='foo', units='m')
info['dig'][0]['coord_frame'] = FIFF.FIFFV_COORD_DEVICE
pytest.raises(RuntimeError, fit_sphere_to_headshape, info, units='m')
info['dig'][0]['coord_frame'] = FIFF.FIFFV_COORD_HEAD
# # Test with 4 points that match a perfect sphere
dig_kinds = (FIFF.FIFFV_POINT_CARDINAL, FIFF.FIFFV_POINT_EXTRA)
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
kwargs = dict(rtol=1e-3, atol=1e-5)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
# Test with all points
dig_kinds = ('cardinal', FIFF.FIFFV_POINT_EXTRA, 'eeg')
kwargs = dict(rtol=1e-3, atol=1e-3)
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
# Test with some noisy EEG points only.
dig_kinds = 'eeg'
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r, oh, od = fit_sphere_to_headshape(info, dig_kinds=dig_kinds,
units='m')
kwargs = dict(rtol=1e-3, atol=1e-2)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, center, **kwargs)
# Test big size
dig_kinds = ('cardinal', 'extra')
info_big = deepcopy(info)
for d in info_big['dig']:
d['r'] -= center
d['r'] *= big_rad / rad
d['r'] += center
with pytest.warns(RuntimeWarning, match='Estimated head size'):
r, oh, od = fit_sphere_to_headshape(info_big, dig_kinds=dig_kinds,
units='mm')
assert_allclose(oh, center * 1000, atol=1e-3)
assert_allclose(r, big_rad * 1000, atol=1e-3)
del info_big
# Test offcenter
dig_kinds = ('cardinal', 'extra')
info_shift = deepcopy(info)
shift_center = np.array([0., -0.03, 0.])
for d in info_shift['dig']:
d['r'] -= center
d['r'] += shift_center
with pytest.warns(RuntimeWarning, match='from head frame origin'):
r, oh, od = fit_sphere_to_headshape(
info_shift, dig_kinds=dig_kinds, units='m')
assert_allclose(oh, shift_center, atol=1e-6)
assert_allclose(r, rad, atol=1e-6)
# Test "auto" mode (default)
# Should try "extra", fail, and go on to EEG
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r, oh, od = fit_sphere_to_headshape(info, units='m')
kwargs = dict(rtol=1e-3, atol=1e-3)
assert_allclose(r, rad, **kwargs)
assert_allclose(oh, center, **kwargs)
assert_allclose(od, dev_center, **kwargs)
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r2, oh2, od2 = fit_sphere_to_headshape(info, units='m')
assert_allclose(r, r2, atol=1e-7)
assert_allclose(oh, oh2, atol=1e-7)
assert_allclose(od, od2, atol=1e-7)
# this one should pass, 1 EXTRA point and 3 EEG (but the fit is terrible)
info = Info(dig=dig[:7], dev_head_t=dev_head_t)
with pytest.warns(RuntimeWarning, match='Only .* head digitization'):
r, oh, od = fit_sphere_to_headshape(info, units='m')
# this one should fail, 1 EXTRA point and 3 EEG (but the fit is terrible)
info = Info(dig=dig[:6], dev_head_t=dev_head_t)
pytest.raises(ValueError, fit_sphere_to_headshape, info, units='m')
pytest.raises(TypeError, fit_sphere_to_headshape, 1, units='m')
run_tests_if_main()
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
LIGHT_LUX,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
HUMIDITY_ICON = "mdi:water-percent"
TEMP_C_ICON = "mdi:thermometer"
BRIGHTNESS_ICON = "mdi:brightness-6"
CO2_ICON = "mdi:molecule-co2"
class HomeKitHumiditySensor(HomeKitEntity):
"""Representation of a Homekit humidity sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT]
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_HUMIDITY
@property
def name(self):
"""Return the name of the device."""
return f"{super().name} Humidity"
@property
def icon(self):
"""Return the sensor icon."""
return HUMIDITY_ICON
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
return PERCENTAGE
@property
def state(self):
"""Return the current humidity."""
return self.service.value(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT)
class HomeKitTemperatureSensor(HomeKitEntity):
"""Representation of a Homekit temperature sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.TEMPERATURE_CURRENT]
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_TEMPERATURE
@property
def name(self):
"""Return the name of the device."""
return f"{super().name} Temperature"
@property
def icon(self):
"""Return the sensor icon."""
return TEMP_C_ICON
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
return TEMP_CELSIUS
@property
def state(self):
"""Return the current temperature in Celsius."""
return self.service.value(CharacteristicsTypes.TEMPERATURE_CURRENT)
class HomeKitLightSensor(HomeKitEntity):
"""Representation of a Homekit light level sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.LIGHT_LEVEL_CURRENT]
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_ILLUMINANCE
@property
def name(self):
"""Return the name of the device."""
return f"{super().name} Light Level"
@property
def icon(self):
"""Return the sensor icon."""
return BRIGHTNESS_ICON
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
return LIGHT_LUX
@property
def state(self):
"""Return the current light level in lux."""
return self.service.value(CharacteristicsTypes.LIGHT_LEVEL_CURRENT)
class HomeKitCarbonDioxideSensor(HomeKitEntity):
"""Representation of a Homekit Carbon Dioxide sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.CARBON_DIOXIDE_LEVEL]
@property
def name(self):
"""Return the name of the device."""
return f"{super().name} CO2"
@property
def icon(self):
"""Return the sensor icon."""
return CO2_ICON
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
return CONCENTRATION_PARTS_PER_MILLION
@property
def state(self):
"""Return the current CO2 level in ppm."""
return self.service.value(CharacteristicsTypes.CARBON_DIOXIDE_LEVEL)
class HomeKitBatterySensor(HomeKitEntity):
"""Representation of a Homekit battery sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [
CharacteristicsTypes.BATTERY_LEVEL,
CharacteristicsTypes.STATUS_LO_BATT,
CharacteristicsTypes.CHARGING_STATE,
]
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def name(self):
"""Return the name of the device."""
return f"{super().name} Battery"
@property
def icon(self):
"""Return the sensor icon."""
if not self.available or self.state is None:
return "mdi:battery-unknown"
# This is similar to the logic in helpers.icon, but we have delegated the
# decision about what mdi:battery-alert is to the device.
icon = "mdi:battery"
if self.is_charging and self.state > 10:
percentage = int(round(self.state / 20 - 0.01)) * 20
icon += f"-charging-{percentage}"
elif self.is_charging:
icon += "-outline"
elif self.is_low_battery:
icon += "-alert"
elif self.state < 95:
percentage = max(int(round(self.state / 10 - 0.01)) * 10, 10)
icon += f"-{percentage}"
return icon
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
return PERCENTAGE
@property
def is_low_battery(self):
"""Return true if battery level is low."""
return self.service.value(CharacteristicsTypes.STATUS_LO_BATT) == 1
@property
def is_charging(self):
"""Return true if currently charing."""
# 0 = not charging
# 1 = charging
# 2 = not chargeable
return self.service.value(CharacteristicsTypes.CHARGING_STATE) == 1
@property
def state(self):
"""Return the current battery level percentage."""
return self.service.value(CharacteristicsTypes.BATTERY_LEVEL)
ENTITY_TYPES = {
"humidity": HomeKitHumiditySensor,
"temperature": HomeKitTemperatureSensor,
"light": HomeKitLightSensor,
"carbon-dioxide": HomeKitCarbonDioxideSensor,
"battery": HomeKitBatterySensor,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit sensors."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
entity_class = ENTITY_TYPES.get(service["stype"])
if not entity_class:
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.utils import ConstantStubLink
@testing.parameterize(
{'outputs': {'shape': (3, 4), 'dtype': np.float32}},
{'outputs': ({'shape': (3, 4), 'dtype': np.float32},)},
{'outputs': (
{'shape': (3, 4), 'dtype': np.float32},
{'shape': (3,), 'dtype': np.int32})},
)
class TestConstantStubLink(unittest.TestCase):
def setUp(self):
if isinstance(self.outputs, tuple):
self.outputs = tuple(
np.random.uniform(size=output['shape']).astype(output['dtype'])
for output in self.outputs)
else:
self.outputs = np.random.uniform(
size=self.outputs['shape']).astype(self.outputs['dtype'])
self.link = ConstantStubLink(self.outputs)
def _check(self, xp):
self.assertIsInstance(self.link, chainer.Link)
self.assertEqual(self.link.xp, xp)
outputs = self.link('ignored', -1, 'inputs', 1.0)
if isinstance(self.outputs, tuple):
originals = self.outputs
outputs = outputs
else:
originals = self.outputs,
outputs = outputs,
self.assertEqual(len(originals), len(outputs))
for orig, out in zip(originals, outputs):
self.assertIsInstance(out, chainer.Variable)
self.assertEqual(out.shape, orig.shape)
self.assertEqual(out.dtype, orig.dtype)
self.assertEqual(
chainer.backends.cuda.get_array_module(out.array), xp)
out.to_cpu()
np.testing.assert_equal(out.array, orig)
def test_cpu(self):
self._check(np)
@attr.gpu
def test_gpu(self):
self.link.to_gpu()
self._check(chainer.backends.cuda.cupy)
@attr.gpu
def test_gpu_to_cpu(self):
self.link.to_gpu()
self.link.to_cpu()
self._check(np)
class TestConstantStubLinkInvalidArgument(unittest.TestCase):
def test_string(self):
with self.assertRaises(ValueError):
ConstantStubLink('invalid')
def test_list(self):
with self.assertRaises(ValueError):
ConstantStubLink([np.empty((3, 4))])
testing.run_module(__name__, __file__)
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class ZinniaConfig(AppConfig):
"""
Config for Zinnia application.
"""
name = 'zinnia'
label = 'zinnia'
verbose_name = _('Weblog')
def ready(self):
from django_comments.moderation import moderator
from zinnia.signals import connect_entry_signals
from zinnia.signals import connect_discussion_signals
from zinnia.moderator import EntryCommentModerator
entry_klass = self.get_model('Entry')
# Register the comment moderator on Entry
moderator.register(entry_klass, EntryCommentModerator)
# Connect the signals
connect_entry_signals()
connect_discussion_signals()
|
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from paasta_tools.paastaapi.model.envoy_status import EnvoyStatus
from paasta_tools.paastaapi.model.marathon_app_status import MarathonAppStatus
from paasta_tools.paastaapi.model.marathon_autoscaling_info import MarathonAutoscalingInfo
from paasta_tools.paastaapi.model.marathon_mesos_status import MarathonMesosStatus
from paasta_tools.paastaapi.model.smartstack_status import SmartstackStatus
globals()['EnvoyStatus'] = EnvoyStatus
globals()['MarathonAppStatus'] = MarathonAppStatus
globals()['MarathonAutoscalingInfo'] = MarathonAutoscalingInfo
globals()['MarathonMesosStatus'] = MarathonMesosStatus
globals()['SmartstackStatus'] = SmartstackStatus
class InstanceStatusMarathon(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('bounce_method',): {
'BRUTAL': "brutal",
'UPTHENDOWN': "upthendown",
'DOWNTHENUP': "downthenup",
'CROSSOVER': "crossover",
},
('desired_state',): {
'START': "start",
'STOP': "stop",
},
('deploy_status',): {
'RUNNING': "Running",
'DEPLOYING': "Deploying",
'STOPPED': "Stopped",
'DELAYED': "Delayed",
'WAITING': "Waiting",
'WAITING_FOR_BOUNCE': "Waiting for bounce",
'NOTRUNNING': "NotRunning",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'app_count': (int,), # noqa: E501
'bounce_method': (str,), # noqa: E501
'desired_state': (str,), # noqa: E501
'active_shas': ([[str, none_type]],), # noqa: E501
'app_statuses': ([MarathonAppStatus],), # noqa: E501
'autoscaling_info': (MarathonAutoscalingInfo,), # noqa: E501
'backoff_seconds': (int,), # noqa: E501
'deploy_status': (str,), # noqa: E501
'desired_app_id': (str,), # noqa: E501
'app_id': (str,), # noqa: E501
'envoy': (EnvoyStatus,), # noqa: E501
'error_message': (str,), # noqa: E501
'expected_instance_count': (int,), # noqa: E501
'mesos': (MarathonMesosStatus,), # noqa: E501
'running_instance_count': (int,), # noqa: E501
'smartstack': (SmartstackStatus,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'app_count': 'app_count', # noqa: E501
'bounce_method': 'bounce_method', # noqa: E501
'desired_state': 'desired_state', # noqa: E501
'active_shas': 'active_shas', # noqa: E501
'app_statuses': 'app_statuses', # noqa: E501
'autoscaling_info': 'autoscaling_info', # noqa: E501
'backoff_seconds': 'backoff_seconds', # noqa: E501
'deploy_status': 'deploy_status', # noqa: E501
'desired_app_id': 'desired_app_id', # noqa: E501
'app_id': 'app_id', # noqa: E501
'envoy': 'envoy', # noqa: E501
'error_message': 'error_message', # noqa: E501
'expected_instance_count': 'expected_instance_count', # noqa: E501
'mesos': 'mesos', # noqa: E501
'running_instance_count': 'running_instance_count', # noqa: E501
'smartstack': 'smartstack', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, app_count, bounce_method, desired_state, *args, **kwargs): # noqa: E501
"""InstanceStatusMarathon - a model defined in OpenAPI
Args:
app_count (int): The number of different running versions of the same service (0 for stopped, 1 for running and 1+ for bouncing)
bounce_method (str): Method to transit between new and old versions of a service
desired_state (str): Desired state of a service, for Marathon
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
active_shas ([[str, none_type]]): List of git/config SHAs running.. [optional] # noqa: E501
app_statuses ([MarathonAppStatus]): Statuses of each app of the service. [optional] # noqa: E501
autoscaling_info (MarathonAutoscalingInfo): [optional] # noqa: E501
backoff_seconds (int): backoff in seconds before launching the next task. [optional] # noqa: E501
deploy_status (str): Deploy status of a marathon service. [optional] # noqa: E501
desired_app_id (str): ID of the desired version of a service instance. [optional] # noqa: E501
app_id (str): ID of the current version of a service instance. [optional] # noqa: E501
envoy (EnvoyStatus): [optional] # noqa: E501
error_message (str): Error message when a marathon job ID cannot be found. [optional] # noqa: E501
expected_instance_count (int): The number of desired instances of the service. [optional] # noqa: E501
mesos (MarathonMesosStatus): [optional] # noqa: E501
running_instance_count (int): The number of actual running instances of the service. [optional] # noqa: E501
smartstack (SmartstackStatus): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.app_count = app_count
self.bounce_method = bounce_method
self.desired_state = desired_state
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
from Handler import Handler
try:
import pika
except ImportError:
pika = None
class rmqHandler (Handler):
"""
Implements the abstract Handler class
Sending data to a RabbitMQ topic exchange.
The routing key will be the full name of the metric being sent.
Based on the rmqHandler and zmpqHandler code.
"""
def __init__(self, config=None):
"""
Create a new instance of rmqHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
# Initialize Data
self.connection = None
self.channel = None
# Initialize Options
self.server = self.config.get('server', '127.0.0.1')
self.port = int(self.config.get('port', 5672))
self.topic_exchange = self.config.get('topic_exchange', 'diamond')
self.vhost = self.config.get('vhost', '')
self.user = self.config.get('user', 'guest')
self.password = self.config.get('password', 'guest')
self.routing_key = self.config.get('routing_key', 'metric')
self.custom_routing_key = self.config.get(
'custom_routing_key', 'diamond')
if not pika:
self.log.error('pika import failed. Handler disabled')
self.enabled = False
return
# Create rabbitMQ topic exchange and bind
try:
self._bind()
except pika.exceptions.AMQPConnectionError:
self.log.error('Failed to bind to rabbitMQ topic exchange')
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(rmqHandler, self).get_default_config_help()
config.update({
'server': '',
'topic_exchange': '',
'vhost': '',
'user': '',
'password': '',
'routing_key': '',
'custom_routing_key': '',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(rmqHandler, self).get_default_config()
config.update({
'server': '127.0.0.1',
'topic_exchange': 'diamond',
'vhost': '/',
'user': 'guest',
'password': 'guest',
'port': '5672',
})
return config
def _bind(self):
"""
Create socket and bind
"""
credentials = pika.PlainCredentials(self.user, self.password)
params = pika.ConnectionParameters(credentials=credentials,
host=self.server,
virtual_host=self.vhost,
port=self.port)
self.connection = pika.BlockingConnection(params)
self.channel = self.connection.channel()
# NOTE : PIKA version uses 'exchange_type' instead of 'type'
self.channel.exchange_declare(exchange=self.topic_exchange,
exchange_type="topic")
def __del__(self):
"""
Destroy instance of the rmqHandler class
"""
try:
self.connection.close()
except AttributeError:
pass
def process(self, metric):
"""
Process a metric and send it to RabbitMQ topic exchange
"""
# Send the data as ......
if not pika:
return
routingKeyDic = {
'metric': lambda: metric.path,
'custom': lambda: self.custom_routing_key,
# These option and the below are really not needed because
# with Rabbitmq you can use regular expressions to indicate
# what routing_keys to subscribe to. But I figure this is
# a good example of how to allow more routing keys
'host': lambda: metric.host,
'metric.path': metric.getMetricPath,
'path.prefix': metric.getPathPrefix,
'collector.path': metric.getCollectorPath,
}
try:
self.channel.basic_publish(
exchange=self.topic_exchange,
routing_key=routingKeyDic[self.routing_key](),
body="%s" % metric)
except Exception: # Rough connection re-try logic.
self.log.info(
"Failed publishing to rabbitMQ. Attempting reconnect")
self._bind()
|
import pytest
from homeassistant.components.risco import (
LAST_EVENT_TIMESTAMP_KEY,
CannotConnectError,
UnauthorizedError,
)
from homeassistant.components.risco.const import DOMAIN, EVENTS_COORDINATOR
from .util import TEST_CONFIG, setup_risco
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry
ENTITY_IDS = {
"Alarm": "sensor.risco_test_site_name_alarm_events",
"Status": "sensor.risco_test_site_name_status_events",
"Trouble": "sensor.risco_test_site_name_trouble_events",
"Other": "sensor.risco_test_site_name_other_events",
}
TEST_EVENTS = [
MagicMock(
time="2020-09-02T10:00:00Z",
category_id=4,
category_name="System Status",
type_id=16,
type_name="disarmed",
name="'user' disarmed 'partition'",
text="",
partition_id=0,
zone_id=None,
user_id=3,
group=None,
priority=2,
raw={},
),
MagicMock(
time="2020-09-02T09:00:00Z",
category_id=7,
category_name="Troubles",
type_id=36,
type_name="service needed",
name="Device Fault",
text="Service is needed.",
partition_id=None,
zone_id=None,
user_id=None,
group=None,
priority=1,
raw={},
),
MagicMock(
time="2020-09-02T08:00:00Z",
category_id=2,
category_name="Alarms",
type_id=3,
type_name="triggered",
name="Alarm is on",
text="Yes it is.",
partition_id=0,
zone_id=12,
user_id=None,
group=None,
priority=0,
raw={},
),
MagicMock(
time="2020-09-02T07:00:00Z",
category_id=4,
category_name="System Status",
type_id=119,
type_name="group arm",
name="You armed a group",
text="",
partition_id=0,
zone_id=None,
user_id=1,
group="C",
priority=2,
raw={},
),
MagicMock(
time="2020-09-02T06:00:00Z",
category_id=8,
category_name="Made up",
type_id=200,
type_name="also made up",
name="really made up",
text="",
partition_id=2,
zone_id=None,
user_id=1,
group=None,
priority=2,
raw={},
),
]
CATEGORIES_TO_EVENTS = {
"Alarm": 2,
"Status": 0,
"Trouble": 1,
"Other": 4,
}
@pytest.fixture
def emptry_alarm():
"""Fixture to mock an empty alarm."""
with patch(
"homeassistant.components.risco.RiscoAPI.get_state",
return_value=MagicMock(paritions={}, zones={}),
):
yield
async def test_cannot_connect(hass):
"""Test connection error."""
with patch(
"homeassistant.components.risco.RiscoAPI.login",
side_effect=CannotConnectError,
):
config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
registry = await hass.helpers.entity_registry.async_get_registry()
for id in ENTITY_IDS.values():
assert not registry.async_is_registered(id)
async def test_unauthorized(hass):
"""Test unauthorized error."""
with patch(
"homeassistant.components.risco.RiscoAPI.login",
side_effect=UnauthorizedError,
):
config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
registry = await hass.helpers.entity_registry.async_get_registry()
for id in ENTITY_IDS.values():
assert not registry.async_is_registered(id)
def _check_state(hass, category, entity_id):
event = TEST_EVENTS[CATEGORIES_TO_EVENTS[category]]
assert hass.states.get(entity_id).state == event.time
assert hass.states.get(entity_id).attributes["category_id"] == event.category_id
assert hass.states.get(entity_id).attributes["category_name"] == event.category_name
assert hass.states.get(entity_id).attributes["type_id"] == event.type_id
assert hass.states.get(entity_id).attributes["type_name"] == event.type_name
assert hass.states.get(entity_id).attributes["name"] == event.name
assert hass.states.get(entity_id).attributes["text"] == event.text
assert hass.states.get(entity_id).attributes["partition_id"] == event.partition_id
assert hass.states.get(entity_id).attributes["zone_id"] == event.zone_id
assert hass.states.get(entity_id).attributes["user_id"] == event.user_id
assert hass.states.get(entity_id).attributes["group"] == event.group
assert hass.states.get(entity_id).attributes["priority"] == event.priority
assert hass.states.get(entity_id).attributes["raw"] == event.raw
async def test_setup(hass, emptry_alarm):
"""Test entity setup."""
registry = await hass.helpers.entity_registry.async_get_registry()
for id in ENTITY_IDS.values():
assert not registry.async_is_registered(id)
with patch(
"homeassistant.components.risco.RiscoAPI.get_events",
return_value=TEST_EVENTS,
), patch(
"homeassistant.components.risco.Store.async_save",
) as save_mock:
entry = await setup_risco(hass)
await hass.async_block_till_done()
save_mock.assert_awaited_once_with(
{LAST_EVENT_TIMESTAMP_KEY: TEST_EVENTS[0].time}
)
for id in ENTITY_IDS.values():
assert registry.async_is_registered(id)
for category, entity_id in ENTITY_IDS.items():
_check_state(hass, category, entity_id)
coordinator = hass.data[DOMAIN][entry.entry_id][EVENTS_COORDINATOR]
with patch(
"homeassistant.components.risco.RiscoAPI.get_events", return_value=[]
) as events_mock, patch(
"homeassistant.components.risco.Store.async_load",
return_value={LAST_EVENT_TIMESTAMP_KEY: TEST_EVENTS[0].time},
):
await coordinator.async_refresh()
await hass.async_block_till_done()
events_mock.assert_awaited_once_with(TEST_EVENTS[0].time, 10)
for category, entity_id in ENTITY_IDS.items():
_check_state(hass, category, entity_id)
|
from collections import defaultdict
import colorsys
import random
import sys
import time
import threading
from openrazer.client import DeviceManager
from openrazer.client import constants as razer_constants
# Set a quit flag that will be used when the user quits to restore effects.
quit = False
# Create a DeviceManager. This is used to get specific devices
device_manager = DeviceManager()
print("Found {} Razer devices".format(len(device_manager.devices)))
devices = device_manager.devices
for device in devices:
if not device.fx.advanced:
print("Skipping device " + device.name + " (" + device.serial + ")")
devices.remove(device)
print()
# Disable daemon effect syncing.
# Without this, the daemon will try to set the lighting effect to every device.
device_manager.sync_effects = False
# Helper function to generate interesting colors
def random_color():
rgb = colorsys.hsv_to_rgb(random.uniform(0, 1), random.uniform(0.5, 1), 1)
return tuple(map(lambda x: int(256 * x), rgb))
# Handle the startlight effect for a single key
def starlight_key(device, row, col, active):
color = random_color()
hue = random.uniform(0, 1)
start_time = time.time()
fade_time = 2
elapsed = 0
while elapsed < fade_time:
value = 1 - elapsed / fade_time
rgb = colorsys.hsv_to_rgb(hue, 1, value)
color = tuple(map(lambda x: int(256 * x), rgb))
device.fx.advanced.matrix[row, col] = color
# print(device, color)
time.sleep(1 / 60)
elapsed = time.time() - start_time
device.fx.advanced.matrix[row, col] = (0, 0, 0)
active[(row, col)] = False
# Handle the startlight effect for an entire device
def starlight_effect(device):
rows, cols = device.fx.advanced.rows, device.fx.advanced.cols
active = defaultdict(bool)
device.fx.advanced.matrix.reset()
device.fx.advanced.draw()
while True:
row, col = random.randrange(rows), random.randrange(cols)
if not active[(row, col)]:
active[(row, col)] = True
threading.Thread(target=starlight_key, args=(device, row, col, active)).start()
time.sleep(0.1)
if quit:
break
device.fx.advanced.restore()
# Spawn a manager thread for each device and wait on all of them.
threads = []
for device in devices:
t = threading.Thread(target=starlight_effect, args=(device,), daemon=True)
t.start()
threads.append(t)
# If there are still threads, update each device.
try:
while any(t.isAlive() for t in threads):
for device in devices:
device.fx.advanced.draw()
time.sleep(1 / 60)
except KeyboardInterrupt:
quit = True
for t in threads:
t.join()
sys.exit(0)
|