text
stringlengths 213
32.3k
|
---|
from homeassistant.const import CONF_NAME, STATE_UNAVAILABLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DATA_UPDATED, DOMAIN, SENSOR_TYPES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Glances sensors."""
client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = []
for sensor_type, sensor_details in SENSOR_TYPES.items():
if not sensor_details[0] in client.api.data:
continue
if sensor_details[0] in client.api.data:
if sensor_details[0] == "fs":
# fs will provide a list of disks attached
for disk in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
disk["mnt_point"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif sensor_details[0] == "sensors":
# sensors will provide temp for different devices
for sensor in client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
sensor["label"],
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
elif client.api.data[sensor_details[0]]:
dev.append(
GlancesSensor(
client,
name,
"",
SENSOR_TYPES[sensor_type][1],
sensor_type,
SENSOR_TYPES[sensor_type],
)
)
async_add_entities(dev, True)
class GlancesSensor(Entity):
"""Implementation of a Glances sensor."""
def __init__(
self,
glances_data,
name,
sensor_name_prefix,
sensor_name_suffix,
sensor_type,
sensor_details,
):
"""Initialize the sensor."""
self.glances_data = glances_data
self._sensor_name_prefix = sensor_name_prefix
self._sensor_name_suffix = sensor_name_suffix
self._name = name
self.type = sensor_type
self._state = None
self.sensor_details = sensor_details
self.unsub_update = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._sensor_name_prefix} {self._sensor_name_suffix}"
@property
def unique_id(self):
"""Set unique_id for sensor."""
return f"{self.glances_data.host}-{self.name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.sensor_details[3]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.sensor_details[2]
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.glances_data.available
@property
def state(self):
"""Return the state of the resources."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.unsub_update = async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
async def will_remove_from_hass(self):
"""Unsubscribe from update dispatcher."""
if self.unsub_update:
self.unsub_update()
self.unsub_update = None
async def async_update(self):
"""Get the latest data from REST API."""
value = self.glances_data.api.data
if value is None:
return
if value is not None:
if self.sensor_details[0] == "fs":
for var in value["fs"]:
if var["mnt_point"] == self._sensor_name_prefix:
disk = var
break
if self.type == "disk_use_percent":
self._state = disk["percent"]
elif self.type == "disk_use":
self._state = round(disk["used"] / 1024 ** 3, 1)
elif self.type == "disk_free":
try:
self._state = round(disk["free"] / 1024 ** 3, 1)
except KeyError:
self._state = round(
(disk["size"] - disk["used"]) / 1024 ** 3,
1,
)
elif self.type == "sensor_temp":
for sensor in value["sensors"]:
if sensor["label"] == self._sensor_name_prefix:
self._state = sensor["value"]
break
elif self.type == "memory_use_percent":
self._state = value["mem"]["percent"]
elif self.type == "memory_use":
self._state = round(value["mem"]["used"] / 1024 ** 2, 1)
elif self.type == "memory_free":
self._state = round(value["mem"]["free"] / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = value["memswap"]["percent"]
elif self.type == "swap_use":
self._state = round(value["memswap"]["used"] / 1024 ** 3, 1)
elif self.type == "swap_free":
self._state = round(value["memswap"]["free"] / 1024 ** 3, 1)
elif self.type == "processor_load":
# Windows systems don't provide load details
try:
self._state = value["load"]["min15"]
except KeyError:
self._state = value["cpu"]["total"]
elif self.type == "process_running":
self._state = value["processcount"]["running"]
elif self.type == "process_total":
self._state = value["processcount"]["total"]
elif self.type == "process_thread":
self._state = value["processcount"]["thread"]
elif self.type == "process_sleeping":
self._state = value["processcount"]["sleeping"]
elif self.type == "cpu_use_percent":
self._state = value["quicklook"]["cpu"]
elif self.type == "docker_active":
count = 0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
count += 1
self._state = count
except KeyError:
self._state = count
elif self.type == "docker_cpu_use":
cpu_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
cpu_use += container["cpu"]["total"]
self._state = round(cpu_use, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
elif self.type == "docker_memory_use":
mem_use = 0.0
try:
for container in value["docker"]["containers"]:
if (
container["Status"] == "running"
or "Up" in container["Status"]
):
mem_use += container["memory"]["usage"]
self._state = round(mem_use / 1024 ** 2, 1)
except KeyError:
self._state = STATE_UNAVAILABLE
|
from homeassistant.components.risco.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_PIN, CONF_USERNAME
from tests.async_mock import PropertyMock, patch
from tests.common import MockConfigEntry
TEST_CONFIG = {
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_PIN: "1234",
}
TEST_SITE_UUID = "test-site-uuid"
TEST_SITE_NAME = "test-site-name"
async def setup_risco(hass, options={}):
"""Set up a Risco integration for testing."""
config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG, options=options)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.risco.RiscoAPI.login",
return_value=True,
), patch(
"homeassistant.components.risco.RiscoAPI.site_uuid",
new_callable=PropertyMock(return_value=TEST_SITE_UUID),
), patch(
"homeassistant.components.risco.RiscoAPI.site_name",
new_callable=PropertyMock(return_value=TEST_SITE_NAME),
), patch(
"homeassistant.components.risco.RiscoAPI.close"
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
|
from flexx import event
class Temperature(event.Component):
""" Temperature object with a settable prop for both Celcius and
Fahrenheit.
"""
c = event.FloatProp(doc='Temperature in degrees Celcius')
f = event.FloatProp(doc='Temperature in degrees Fahrenheit')
@event.action
def set_c(self, t):
t = float(t)
self._mutate_c(t)
self._mutate_f(t * 1.8 + 32)
@event.action
def set_f(self, t):
t = float(t)
self._mutate_f(t)
self._mutate_c((t - 32) / 1.8)
@event.reaction
def on_temp_change(self):
# This gets called once with two events when either C or F is changed.
print(' temp in Celcius: %1.1f C' % self.c)
print(' temp in Fahrenheit: %1.1f F' % self.f)
t = Temperature()
print('Water is freezing:')
t.set_c(0)
event.loop.iter()
print('Average annual temp in California')
t.set_f(59.4)
event.loop.iter()
|
from pygal import StackedLine
def test_stacked_line():
"""Test stacked line"""
stacked = StackedLine()
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_reverse():
"""Test stack from top stacked line"""
stacked = StackedLine(stack_from_top=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('11 (+1)', '14 (+2)', '10', '12')
)
def test_stacked_line_log():
"""Test logarithmic stacked line"""
stacked = StackedLine(logarithmic=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_interpolate():
"""Test interpolated stacked line"""
stacked = StackedLine(interpolate='cubic')
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
|
import numpy as np
from scipy.stats import rankdata
class CornerScore(object):
@staticmethod
def get_scores(cat_word_counts, not_cat_word_counts):
pos = CornerScore.get_scores_for_category(cat_word_counts, not_cat_word_counts)
neg = CornerScore.get_scores_for_category(not_cat_word_counts, cat_word_counts)
scores = CornerScore._balance_scores(pos, neg)
return scores
@staticmethod
def _balance_scores(cat_scores, not_cat_scores):
scores = np.zeros(len(cat_scores))
scores[cat_scores < not_cat_scores] \
= np.sqrt(2) - cat_scores[cat_scores < not_cat_scores]
scores[not_cat_scores < cat_scores] \
= -(np.sqrt(2) - not_cat_scores[not_cat_scores < cat_scores])
return ((scores / np.sqrt(2)) + 1.) / 2
@staticmethod
def get_scores_for_category(cat_word_counts, not_cat_word_counts):
cat_pctls = CornerScore._get_percentiles_from_freqs(cat_word_counts)
not_cat_pctls = CornerScore._get_percentiles_from_freqs(not_cat_word_counts)
return CornerScore._distance_from_upper_left(cat_pctls, not_cat_pctls)
@staticmethod
def _distance_from_upper_left(cat_pctls, not_cat_pctls):
return np.linalg.norm(np.array([1, 0]) - np.array(list(zip(cat_pctls, not_cat_pctls))),
axis=1)
@staticmethod
def _get_percentiles_from_freqs(freqs):
return rankdata(freqs) * 1. / len(freqs)
|
import logging
from typing import Any, Callable, List, Optional
import attr
from homeassistant.components.switch import (
DEVICE_CLASS_SWITCH,
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from . import HuaweiLteBaseEntity
from .const import DOMAIN, KEY_DIALUP_MOBILE_DATASWITCH
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up from config entry."""
router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]]
switches: List[Entity] = []
if router.data.get(KEY_DIALUP_MOBILE_DATASWITCH):
switches.append(HuaweiLteMobileDataSwitch(router))
async_add_entities(switches, True)
@attr.s
class HuaweiLteBaseSwitch(HuaweiLteBaseEntity, SwitchEntity):
"""Huawei LTE switch device base class."""
key: str
item: str
_raw_state: Optional[str] = attr.ib(init=False, default=None)
def _turn(self, state: bool) -> None:
raise NotImplementedError
def turn_on(self, **kwargs: Any) -> None:
"""Turn switch on."""
self._turn(state=True)
def turn_off(self, **kwargs: Any) -> None:
"""Turn switch off."""
self._turn(state=False)
@property
def device_class(self) -> str:
"""Return device class."""
return DEVICE_CLASS_SWITCH
async def async_added_to_hass(self) -> None:
"""Subscribe to needed data on add."""
await super().async_added_to_hass()
self.router.subscriptions[self.key].add(f"{SWITCH_DOMAIN}/{self.item}")
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from needed data on remove."""
await super().async_will_remove_from_hass()
self.router.subscriptions[self.key].remove(f"{SWITCH_DOMAIN}/{self.item}")
async def async_update(self) -> None:
"""Update state."""
try:
value = self.router.data[self.key][self.item]
except KeyError:
_LOGGER.debug("%s[%s] not in data", self.key, self.item)
self._available = False
return
self._available = True
self._raw_state = str(value)
@attr.s
class HuaweiLteMobileDataSwitch(HuaweiLteBaseSwitch):
"""Huawei LTE mobile data switch device."""
def __attrs_post_init__(self) -> None:
"""Initialize identifiers."""
self.key = KEY_DIALUP_MOBILE_DATASWITCH
self.item = "dataswitch"
@property
def _entity_name(self) -> str:
return "Mobile data"
@property
def _device_unique_id(self) -> str:
return f"{self.key}.{self.item}"
@property
def is_on(self) -> bool:
"""Return whether the switch is on."""
return self._raw_state == "1"
def _turn(self, state: bool) -> None:
value = 1 if state else 0
self.router.client.dial_up.set_mobile_dataswitch(dataswitch=value)
self._raw_state = str(value)
self.schedule_update_ha_state()
@property
def icon(self) -> str:
"""Return switch icon."""
return "mdi:signal" if self.is_on else "mdi:signal-off"
|
import json
import os
import tempfile
import homeassistant.components.command_line.switch as command_line
import homeassistant.components.switch as switch
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
async def test_state_none(hass):
"""Test with none state."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "switch_status")
test_switch = {
"command_on": f"echo 1 > {path}",
"command_off": f"echo 0 > {path}",
}
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "command_line",
"switches": {"test": test_switch},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_ON == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
async def test_state_value(hass):
"""Test with state value."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "switch_status")
test_switch = {
"command_state": f"cat {path}",
"command_on": f"echo 1 > {path}",
"command_off": f"echo 0 > {path}",
"value_template": '{{ value=="1" }}',
}
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "command_line",
"switches": {"test": test_switch},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_ON == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
async def test_state_json_value(hass):
"""Test with state JSON value."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "switch_status")
oncmd = json.dumps({"status": "ok"})
offcmd = json.dumps({"status": "nope"})
test_switch = {
"command_state": f"cat {path}",
"command_on": f"echo '{oncmd}' > {path}",
"command_off": f"echo '{offcmd}' > {path}",
"value_template": '{{ value_json.status=="ok" }}',
}
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "command_line",
"switches": {"test": test_switch},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_ON == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
async def test_state_code(hass):
"""Test with state code."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "switch_status")
test_switch = {
"command_state": f"cat {path}",
"command_on": f"echo 1 > {path}",
"command_off": f"echo 0 > {path}",
}
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "command_line",
"switches": {"test": test_switch},
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert STATE_OFF == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_ON == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.test"},
blocking=True,
)
state = hass.states.get("switch.test")
assert STATE_ON == state.state
def test_assumed_state_should_be_true_if_command_state_is_none(hass):
"""Test with state value."""
# args: hass, device_name, friendly_name, command_on, command_off,
# command_state, value_template
init_args = [
hass,
"test_device_name",
"Test friendly name!",
"echo 'on command'",
"echo 'off command'",
None,
None,
15,
]
no_state_device = command_line.CommandSwitch(*init_args)
assert no_state_device.assumed_state
# Set state command
init_args[-3] = "cat {}"
state_device = command_line.CommandSwitch(*init_args)
assert not state_device.assumed_state
def test_entity_id_set_correctly(hass):
"""Test that entity_id is set correctly from object_id."""
init_args = [
hass,
"test_device_name",
"Test friendly name!",
"echo 'on command'",
"echo 'off command'",
False,
None,
15,
]
test_switch = command_line.CommandSwitch(*init_args)
assert test_switch.entity_id == "switch.test_device_name"
assert test_switch.name == "Test friendly name!"
|
from aiopvapi.resources.shade import ATTR_TYPE
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
DEVICE_FIRMWARE,
DEVICE_MAC_ADDRESS,
DEVICE_MODEL,
DEVICE_NAME,
DEVICE_SERIAL_NUMBER,
DOMAIN,
FIRMWARE_BUILD,
FIRMWARE_IN_SHADE,
FIRMWARE_REVISION,
FIRMWARE_SUB_REVISION,
MANUFACTURER,
)
class HDEntity(CoordinatorEntity):
"""Base class for hunter douglas entities."""
def __init__(self, coordinator, device_info, unique_id):
"""Initialize the entity."""
super().__init__(coordinator)
self._unique_id = unique_id
self._device_info = device_info
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def device_info(self):
"""Return the device_info of the device."""
firmware = self._device_info[DEVICE_FIRMWARE]
sw_version = f"{firmware[FIRMWARE_REVISION]}.{firmware[FIRMWARE_SUB_REVISION]}.{firmware[FIRMWARE_BUILD]}"
return {
"identifiers": {(DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER])},
"connections": {
(dr.CONNECTION_NETWORK_MAC, self._device_info[DEVICE_MAC_ADDRESS])
},
"name": self._device_info[DEVICE_NAME],
"model": self._device_info[DEVICE_MODEL],
"sw_version": sw_version,
"manufacturer": MANUFACTURER,
}
class ShadeEntity(HDEntity):
"""Base class for hunter douglas shade entities."""
def __init__(self, coordinator, device_info, shade, shade_name):
"""Initialize the shade."""
super().__init__(coordinator, device_info, shade.id)
self._shade_name = shade_name
self._shade = shade
@property
def device_info(self):
"""Return the device_info of the device."""
device_info = {
"identifiers": {(DOMAIN, self._shade.id)},
"name": self._shade_name,
"manufacturer": MANUFACTURER,
"via_device": (DOMAIN, self._device_info[DEVICE_SERIAL_NUMBER]),
}
if FIRMWARE_IN_SHADE not in self._shade.raw_data:
return device_info
firmware = self._shade.raw_data[FIRMWARE_IN_SHADE]
sw_version = f"{firmware[FIRMWARE_REVISION]}.{firmware[FIRMWARE_SUB_REVISION]}.{firmware[FIRMWARE_BUILD]}"
model = self._shade.raw_data[ATTR_TYPE]
for shade in self._shade.shade_types:
if shade.shade_type == model:
model = shade.description
break
device_info["sw_version"] = sw_version
device_info["model"] = model
return device_info
|
from homeassistant.components.plex.const import (
CONF_SERVER,
CONF_SERVER_IDENTIFIER,
DOMAIN,
PLEX_SERVER_CONFIG,
SERVICE_REFRESH_LIBRARY,
SERVICE_SCAN_CLIENTS,
)
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
CONF_TOKEN,
CONF_URL,
CONF_VERIFY_SSL,
)
from .const import MOCK_SERVERS, MOCK_TOKEN
from .mock_classes import MockPlexLibrarySection
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_refresh_library(hass, mock_plex_server, setup_plex_server):
"""Test refresh_library service call."""
# Test with non-existent server
with patch.object(MockPlexLibrarySection, "update") as mock_update:
assert await hass.services.async_call(
DOMAIN,
SERVICE_REFRESH_LIBRARY,
{"server_name": "Not a Server", "library_name": "Movies"},
True,
)
assert not mock_update.called
# Test with non-existent library
with patch.object(MockPlexLibrarySection, "update") as mock_update:
assert await hass.services.async_call(
DOMAIN,
SERVICE_REFRESH_LIBRARY,
{"library_name": "Not a Library"},
True,
)
assert not mock_update.called
# Test with valid library
with patch.object(MockPlexLibrarySection, "update") as mock_update:
assert await hass.services.async_call(
DOMAIN,
SERVICE_REFRESH_LIBRARY,
{"library_name": "Movies"},
True,
)
assert mock_update.called
# Add a second configured server
entry_2 = MockConfigEntry(
domain=DOMAIN,
data={
CONF_SERVER: MOCK_SERVERS[1][CONF_SERVER],
PLEX_SERVER_CONFIG: {
CONF_TOKEN: MOCK_TOKEN,
CONF_URL: f"https://{MOCK_SERVERS[1][CONF_HOST]}:{MOCK_SERVERS[1][CONF_PORT]}",
CONF_VERIFY_SSL: True,
},
CONF_SERVER_IDENTIFIER: MOCK_SERVERS[1][CONF_SERVER_IDENTIFIER],
},
)
await setup_plex_server(config_entry=entry_2)
# Test multiple servers available but none specified
with patch.object(MockPlexLibrarySection, "update") as mock_update:
assert await hass.services.async_call(
DOMAIN,
SERVICE_REFRESH_LIBRARY,
{"library_name": "Movies"},
True,
)
assert not mock_update.called
async def test_scan_clients(hass, mock_plex_server):
"""Test scan_for_clients service call."""
assert await hass.services.async_call(
DOMAIN,
SERVICE_SCAN_CLIENTS,
blocking=True,
)
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import nfs_service
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_DEFAULT_NFS_TIER = 'foo'
class _DemoNfsService(nfs_service.BaseNfsService):
CLOUD = 'mock'
NFS_TIERS = (_DEFAULT_NFS_TIER,)
def __init__(self, disk_spec, zone):
super(_DemoNfsService, self).__init__(disk_spec, zone)
self.is_ready_called = False
def _IsReady(self):
return True
def GetRemoteAddress(self):
return 'remote1'
def _Create(self):
pass
def _Delete(self):
pass
class _DemoNfsServiceWithDefaultNfsVersion(_DemoNfsService):
CLOUD = 'mock2'
DEFAULT_NFS_VERSION = '4.1'
class NfsServiceTest(pkb_common_test_case.PkbCommonTestCase):
def _SetFlags(self, nfs_tier=''):
FLAGS['default_timeout'].parse(10)
FLAGS['nfs_tier'].parse(nfs_tier)
def _NewNfsResource(self, nfs_tier=''):
self._SetFlags(nfs_tier=nfs_tier)
return _DemoNfsService(disk.BaseDiskSpec('test_component'), 'us-west1-a')
def testNewNfsResource(self):
nfs = self._NewNfsResource(_DEFAULT_NFS_TIER)
self.assertEqual(_DEFAULT_NFS_TIER, nfs.nfs_tier)
self.assertIsNone(nfs.DEFAULT_NFS_VERSION)
def testNewNfsResourceBadNfsTier(self):
with self.assertRaises(errors.Config.InvalidValue):
self._NewNfsResource('NonExistentNfsTier')
def testNewNfsResourceNfsTierNotSet(self):
nfs = self._NewNfsResource()
self.assertIsNone(nfs.nfs_tier)
def testRegistry(self):
nfs_class = nfs_service.GetNfsServiceClass(_DemoNfsService.CLOUD)
self.assertEqual(_DemoNfsService, nfs_class)
def testCreateNfsDisk(self):
nfs = self._NewNfsResource()
nfs_disk = nfs.CreateNfsDisk()
self.assertEqual('remote1:/', nfs_disk.device_path)
self.assertIsNone(nfs_disk.nfs_version)
def testDefaultNfsVersion(self):
self._SetFlags()
nfs = _DemoNfsServiceWithDefaultNfsVersion(
disk.BaseDiskSpec('test_component'), 'us-west1-a')
nfs_disk = nfs.CreateNfsDisk()
self.assertEqual('4.1', nfs_disk.nfs_version)
class UnmanagedNfsServiceTest(pkb_common_test_case.PkbCommonTestCase):
def _setUpDiskSpec(self):
disk_spec = disk.BaseDiskSpec('test_disk_spec')
disk_spec.device_path = '/test_dir'
self.disk_spec = disk_spec
def _setUpMockServerVm(self):
self.mock_server_vm = mock.Mock(internal_ip='1.1.1.1')
self.mock_server_vm.RemoteCommand.return_value = None, None, None
def setUp(self):
super(UnmanagedNfsServiceTest, self).setUp()
self._setUpDiskSpec()
self._setUpMockServerVm()
self.nfs_service = nfs_service.UnmanagedNfsService(self.disk_spec,
self.mock_server_vm)
def testNewUnmanagedNfsService(self):
self.assertIsNotNone(self.nfs_service)
self.assertIsNotNone(self.nfs_service.server_vm)
self.assertIsNotNone(self.nfs_service.disk_spec)
self.assertEqual(self.nfs_service.server_directory,
self.disk_spec.device_path)
def testCreateNfsDisk(self):
nfs_disk = self.nfs_service.CreateNfsDisk()
self.assertEqual(nfs_disk.device_path, '1.1.1.1:/test_dir')
def testGetRemoteAddress(self):
self.assertEqual(self.nfs_service.GetRemoteAddress(), '1.1.1.1')
if __name__ == '__main__':
unittest.main()
|
from homeassistant.components import logbook
from homeassistant.components.homekit.const import (
ATTR_DISPLAY_NAME,
ATTR_VALUE,
DOMAIN as DOMAIN_HOMEKIT,
EVENT_HOMEKIT_CHANGED,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.components.logbook.test_init import MockLazyEventPartialState
async def test_humanify_homekit_changed_event(hass, hk_driver):
"""Test humanifying HomeKit changed event."""
hass.config.components.add("recorder")
with patch("homeassistant.components.homekit.HomeKit"):
assert await async_setup_component(hass, "homekit", {"homekit": {}})
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
event1, event2 = list(
logbook.humanify(
hass,
[
MockLazyEventPartialState(
EVENT_HOMEKIT_CHANGED,
{
ATTR_ENTITY_ID: "lock.front_door",
ATTR_DISPLAY_NAME: "Front Door",
ATTR_SERVICE: "lock",
},
),
MockLazyEventPartialState(
EVENT_HOMEKIT_CHANGED,
{
ATTR_ENTITY_ID: "cover.window",
ATTR_DISPLAY_NAME: "Window",
ATTR_SERVICE: "set_cover_position",
ATTR_VALUE: 75,
},
),
],
entity_attr_cache,
{},
)
)
assert event1["name"] == "HomeKit"
assert event1["domain"] == DOMAIN_HOMEKIT
assert event1["message"] == "send command lock for Front Door"
assert event1["entity_id"] == "lock.front_door"
assert event2["name"] == "HomeKit"
assert event2["domain"] == DOMAIN_HOMEKIT
assert event2["message"] == "send command set_cover_position to 75 for Window"
assert event2["entity_id"] == "cover.window"
|
from pygal.adapters import none_to_zero, positive
from pygal.graph.graph import Graph
from pygal.util import alter, cut, decorate
class Treemap(Graph):
"""Treemap graph class"""
_adapters = [positive, none_to_zero]
def _rect(self, serie, serie_node, rects, val, x, y, w, h, i):
rx, ry = self.view((x, y))
rw, rh = self.view((x + w, y + h))
rw -= rx
rh -= ry
metadata = serie.metadata.get(i)
val = self._format(serie, i)
rect = decorate(
self.svg, self.svg.node(rects, class_="rect"), metadata
)
alter(
self.svg.node(
rect,
'rect',
x=rx,
y=ry,
width=rw,
height=rh,
class_='rect reactive tooltip-trigger'
), metadata
)
self._tooltip_data(
rect, val, rx + rw / 2, ry + rh / 2, 'centered',
self._get_x_label(i)
)
self._static_value(serie_node, val, rx + rw / 2, ry + rh / 2, metadata)
def _binary_tree(self, data, total, x, y, w, h, parent=None):
if total == 0:
return
if len(data) == 1:
if parent:
i, datum = data[0]
serie, serie_node, rects = parent
self._rect(serie, serie_node, rects, datum, x, y, w, h, i)
else:
datum = data[0]
serie_node = self.svg.serie(datum)
self._binary_tree(
list(enumerate(datum.values)), total, x, y, w, h, (
datum, serie_node,
self.svg.node(serie_node['plot'], class_="rects")
)
)
return
midpoint = total / 2
pivot_index = 1
running_sum = 0
for i, elt in enumerate(data):
if running_sum >= midpoint:
pivot_index = i
break
running_sum += elt[1] if parent else sum(elt.values)
half1 = data[:pivot_index]
half2 = data[pivot_index:]
if parent:
half1_sum = sum(cut(half1, 1))
half2_sum = sum(cut(half2, 1))
else:
half1_sum = sum(map(sum, map(lambda x: x.values, half1)))
half2_sum = sum(map(sum, map(lambda x: x.values, half2)))
pivot_pct = half1_sum / total
if h > w:
y_pivot = pivot_pct * h
self._binary_tree(half1, half1_sum, x, y, w, y_pivot, parent)
self._binary_tree(
half2, half2_sum, x, y + y_pivot, w, h - y_pivot, parent
)
else:
x_pivot = pivot_pct * w
self._binary_tree(half1, half1_sum, x, y, x_pivot, h, parent)
self._binary_tree(
half2, half2_sum, x + x_pivot, y, w - x_pivot, h, parent
)
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
pass
def _plot(self):
total = sum(map(sum, map(lambda x: x.values, self.series)))
if total == 0:
return
gw = self.width - self.margin_box.x
gh = self.height - self.margin_box.y
self.view.box.xmin = self.view.box.ymin = x = y = 0
self.view.box.xmax = w = (total * gw / gh)**.5
self.view.box.ymax = h = total / w
self.view.box.fix()
self._binary_tree(self.series, total, x, y, w, h)
|
import hashlib
from typing import Optional
from asyncio import sleep # noqa
__all = (
'sleep',
'peer2str',
'transport_channel_id',
)
def transport_channel_id(transport, is_server: bool, channel_id_type: Optional[str] = None) -> bytes:
"""
Application-layer user authentication protocols are vulnerable to generic
credential forwarding attacks, where an authentication credential sent by
a client C to a server M may then be used by M to impersonate C at another
server S. To prevent such credential forwarding attacks, modern authentication
protocols rely on channel bindings. For example, WAMP-cryptosign can use
the tls-unique channel identifier provided by the TLS layer to strongly bind
authentication credentials to the underlying channel, so that a credential
received on one TLS channel cannot be forwarded on another.
:param transport: The asyncio TLS transport to extract the TLS channel ID from.
:param is_server: Flag indicating the transport is for a server.
:param channel_id_type: TLS channel ID type, currently only "tls-unique" is supported.
:returns: The TLS channel id (32 bytes).
"""
if channel_id_type is None:
return b'\x00' * 32
if channel_id_type not in ['tls-unique']:
raise Exception("invalid channel ID type {}".format(channel_id_type))
ssl_obj = transport.get_extra_info('ssl_object')
if ssl_obj is None:
raise Exception("TLS transport channel_id for tls-unique requested, but ssl_obj not found on transport")
if not hasattr(ssl_obj, 'get_channel_binding'):
raise Exception("TLS transport channel_id for tls-unique requested, but get_channel_binding not found on ssl_obj")
# https://python.readthedocs.io/en/latest/library/ssl.html#ssl.SSLSocket.get_channel_binding
# https://tools.ietf.org/html/rfc5929.html
tls_finished_msg = ssl_obj.get_channel_binding(cb_type='tls-unique')
m = hashlib.sha256()
m.update(tls_finished_msg)
channel_id = m.digest()
return channel_id
def peer2str(peer):
if isinstance(peer, tuple):
ip_ver = 4 if len(peer) == 2 else 6
return "tcp{2}:{0}:{1}".format(peer[0], peer[1], ip_ver)
elif isinstance(peer, str):
return "unix:{0}".format(peer)
else:
return "?:{0}".format(peer)
def get_serializers():
from autobahn.wamp import serializer
serializers = ['CBORSerializer', 'MsgPackSerializer', 'UBJSONSerializer', 'JsonSerializer']
serializers = list(filter(lambda x: x, map(lambda s: getattr(serializer, s) if hasattr(serializer, s)
else None, serializers)))
return serializers
|
from __future__ import print_function
def main(command, fullname=False):
global _stash
rt = globals()['_stash'].runtime
try:
filename = rt.find_script_file(command)
if not fullname:
filename = _stash.libcore.collapseuser(filename)
print(filename)
except Exception:
pass
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('command', help='name of the command to be located')
ap.add_argument('-f', '--fullname', action='store_true', help='show full path')
ns = ap.parse_args()
main(ns.command, ns.fullname)
|
import pytest
from bs4 import BeautifulSoup
from django.core.mail import EmailMessage
from django.urls import reverse
from django.utils.dateparse import parse_datetime
from django.utils.timezone import datetime
from post_office.models import Email
from shop.models.cart import CartItemModel
from shop.models.order import OrderModel, OrderItemModel
from shop.models.delivery import DeliveryModel, DeliveryItemModel
from shop.models.notification import Notify
from shop.views.checkout import CheckoutViewSet
from shop.views.order import OrderView
@pytest.fixture(name='order')
@pytest.mark.django_db
def test_purchase(api_rf, empty_cart, commodity_factory, notification_factory):
# fill the cart
product = commodity_factory()
CartItemModel.objects.create(cart=empty_cart, product=product, product_code=product.product_code, quantity=1)
product = commodity_factory()
CartItemModel.objects.create(cart=empty_cart, product=product, product_code=product.product_code, quantity=3)
assert empty_cart.num_items == 2
data = {
'payment_method': {
'payment_modifier': 'forward-fund-payment',
'plugin_order': 1,
},
'shipping_method': {
'shipping_modifier': 'self-collection',
'plugin_order': 2,
}
}
# add notification on an order awaiting payment
notification_factory(
transition_target='awaiting_payment',
notify=Notify.CUSTOMER,
recipient=empty_cart.customer.user,
)
# select the payment method
request = api_rf.put('/shop/api/checkout/upload', data=data, format='json')
request.user = empty_cart.customer.user
request.customer = empty_cart.customer
response = CheckoutViewSet.as_view({'put': 'upload'})(request)
assert response.status_code == 200
# perform the purchase
request = api_rf.post('/shop/api/checkout/purchase')
request.user = empty_cart.customer.user
request.customer = empty_cart.customer
assert request.customer.orders.count() == 0
empty_cart.update(request)
response = CheckoutViewSet.as_view({'post': 'purchase'})(request)
assert response.status_code == 200
assert 'expression' in response.data
assert request.customer.orders.count() == 1
order = request.customer.orders.first()
assert order.items.count() == 2
assert order.total == empty_cart.total
assert order.subtotal == empty_cart.subtotal
assert order.extra['payment_modifier'] == 'forward-fund-payment'
assert order.extra['shipping_modifier'] == 'self-collection'
assert empty_cart.items.count() == 0
# check that a confirmation email has been queued
email = Email.objects.first()
assert email is not None
message = email.email_message()
assert isinstance(message, EmailMessage)
assert product.product_name in message.body
assert "Subtotal: {}".format(order.subtotal) in message.body
assert "Total: {}".format(order.total) in message.body
return order
@pytest.mark.django_db
def test_addendum(api_rf, order):
data = {'annotation': "client comment"}
request = api_rf.post('/pages/order', data=data, format='json')
request.customer = order.customer
response = OrderView.as_view(many=False)(request, slug=order.get_number(), secret=order.secret)
assert response.status_code == 200
order = OrderModel.objects.get(slug=response.data['number'])
addendum = order.extra.get('addendum')
assert isinstance(addendum, list)
assert isinstance(parse_datetime(addendum[0][0]), datetime)
assert addendum[0][1] == "client comment"
@pytest.fixture(name='paid_order')
@pytest.mark.django_db
def test_add_forward_fund(admin_client, order):
assert order.status == 'awaiting_payment'
url = reverse('admin:testshop_order_change', args=(order.pk,))
response = admin_client.get(url)
assert response.status_code == 200
data = _extract_form_data(response.content)
# add a row for an Order payment
half_total = order.total / 2
data.update({
'orderpayment_set-TOTAL_FORMS': '1',
'orderpayment_set-0-amount': str(half_total.as_decimal()),
'orderpayment_set-0-transaction_id': 'payment-tx-id-1',
'orderpayment_set-0-payment_method': 'forward-fund-payment',
'_save': 'Save',
})
response = admin_client.post(url, data)
assert response.status_code == 302
order.prepayment_deposited() # mark as partially paid
assert order.status == 'awaiting_payment'
assert order.is_fully_paid() is False
assert order.amount_paid == half_total
# reload admin page for Order
response = admin_client.get(url)
assert response.status_code == 200
data = _extract_form_data(response.content)
# add a row for the second half of the payment
half_total = order.total - half_total.__class__(half_total.as_decimal())
data.update({
'orderpayment_set-TOTAL_FORMS': '2',
'orderpayment_set-1-amount': str(half_total.as_decimal()),
'orderpayment_set-1-transaction_id': 'payment-tx-id-2',
'orderpayment_set-1-payment_method': 'forward-fund-payment',
'_save': "Save",
})
response = admin_client.post(url, data)
assert response.status_code == 302
order.prepayment_deposited() # mark as fully paid
assert order.status == 'prepayment_deposited'
assert order.is_fully_paid() is True
assert order.amount_paid >= order.total
return order
@pytest.mark.django_db
def test_fulfill_order_partially(admin_client, paid_order):
assert paid_order.status == 'prepayment_deposited'
url = reverse('admin:testshop_order_change', args=(paid_order.pk,))
response = admin_client.get(url)
assert response.status_code == 200
data = _extract_form_data(response.content)
assert int(data['items-TOTAL_FORMS']) == 2
data.update({
'_fsmtransition-status-pick_goods': "Pick the goods",
})
response = admin_client.post(url, data)
assert response.status_code == 302
assert response.url == url
response = admin_client.get(url)
assert response.status_code == 200
order = OrderModel.objects.get(pk=paid_order.pk)
assert not DeliveryModel.objects.filter(order=order).exists()
assert order.status == 'pick_goods'
data = _extract_form_data(response.content)
assert int(data['items-TOTAL_FORMS']) == 2
assert int(data['items-0-deliver_quantity']) == 1
assert int(data['items-1-deliver_quantity']) == 3
data.update({
'items-0-canceled': 'checked',
'items-1-deliver_quantity': '1',
'_fsmtransition-status-pack_goods': "Pack the goods",
})
response = admin_client.post(url, data)
assert response.status_code == 302
assert response.url == url
order = OrderModel.objects.get(pk=paid_order.pk)
assert DeliveryModel.objects.filter(order=order).count() == 1
delivery = DeliveryModel.objects.filter(order=order).first()
order_item = OrderItemModel.objects.get(pk=data['items-0-id'])
assert order_item.canceled is True
order_item = OrderItemModel.objects.get(pk=data['items-1-id'])
assert order_item.canceled is False
assert DeliveryItemModel.objects.filter(delivery=delivery).count() == 1
delivery_item = DeliveryItemModel.objects.filter(delivery=delivery).first()
delivery_item.item_id == order_item.id
delivery_item.quantity == 1
response = admin_client.get(url)
assert response.status_code == 200
assert order.status == 'pack_goods'
data = _extract_form_data(response.content)
assert int(data['delivery_set-TOTAL_FORMS']) == 1
data.update({
'delivery_set-0-shipping_id': 'A1',
'_fsmtransition-status-ship_goods': "Ship the goods",
})
response = admin_client.post(url, data)
assert response.status_code == 302
assert response.url == url
delivery.refresh_from_db()
assert delivery.get_number() == order.get_number() + " / 1"
assert delivery.shipping_id == 'A1'
response = admin_client.get(url)
assert response.status_code == 200
order = OrderModel.objects.get(pk=paid_order.pk)
assert order.status == 'ready_for_delivery'
data = _extract_form_data(response.content)
data.update({
'_fsmtransition-status-pick_goods': "Pick the goods",
})
response = admin_client.post(url, data)
assert response.status_code == 302
response = admin_client.get(url)
assert response.status_code == 200
order = OrderModel.objects.get(pk=order.pk)
assert order.status == 'pick_goods'
data = _extract_form_data(response.content)
assert int(data['items-TOTAL_FORMS']) == 2
assert data['items-0-canceled'] == 'checked'
assert int(data['items-0-deliver_quantity']) == 1
assert int(data['items-1-deliver_quantity']) == 2
data.update({
'_fsmtransition-status-pack_goods': "Pack the goods",
})
response = admin_client.post(url, data)
assert response.status_code == 302
assert DeliveryModel.objects.filter(order=order).count() == 2
delivery = DeliveryModel.objects.filter(order=order).last()
assert delivery.get_number() == order.get_number() + " / 2"
assert DeliveryItemModel.objects.filter(delivery=delivery).count() == 1
delivery_item = DeliveryItemModel.objects.filter(delivery=delivery).first()
delivery_item.item_id == order_item.id
delivery_item.quantity == 2
def _extract_form_data(html_content):
data = {}
soup = BeautifulSoup(html_content, 'html.parser')
for input_field in soup.form.find_all(['input', 'textarea']):
name = input_field.attrs['name']
if not name.startswith('_'):
if input_field.attrs['type'] == 'checkbox':
value = 'checked' if 'checked' in input_field.attrs else ''
else:
value = input_field.attrs.get('value', '')
data.update({name: value})
for select_field in soup.form.find_all('select'):
name = select_field.attrs['name']
for option in select_field.find_all('option'):
if 'selected' in option.attrs:
data.update({name: option.attrs.get('value', '')})
break
return data
|
import pickle
from heapq import heappush
from time import time
from unittest.mock import Mock
from kombu.clocks import LamportClock, timetuple
class test_LamportClock:
def test_clocks(self):
c1 = LamportClock()
c2 = LamportClock()
c1.forward()
c2.forward()
c1.forward()
c1.forward()
c2.adjust(c1.value)
assert c2.value == c1.value + 1
assert repr(c1)
c2_val = c2.value
c2.forward()
c2.forward()
c2.adjust(c1.value)
assert c2.value == c2_val + 2 + 1
c1.adjust(c2.value)
assert c1.value == c2.value + 1
def test_sort(self):
c = LamportClock()
pid1 = 'a.example.com:312'
pid2 = 'b.example.com:311'
events = []
m1 = (c.forward(), pid1)
heappush(events, m1)
m2 = (c.forward(), pid2)
heappush(events, m2)
m3 = (c.forward(), pid1)
heappush(events, m3)
m4 = (30, pid1)
heappush(events, m4)
m5 = (30, pid2)
heappush(events, m5)
assert str(c) == str(c.value)
assert c.sort_heap(events) == m1
assert c.sort_heap([m4, m5]) == m4
assert c.sort_heap([m4, m5, m1]) == m4
class test_timetuple:
def test_repr(self):
x = timetuple(133, time(), 'id', Mock())
assert repr(x)
def test_pickleable(self):
x = timetuple(133, time(), 'id', 'obj')
assert pickle.loads(pickle.dumps(x)) == tuple(x)
def test_order(self):
t1 = time()
t2 = time() + 300 # windows clock not reliable
a = timetuple(133, t1, 'A', 'obj')
b = timetuple(140, t1, 'A', 'obj')
assert a.__getnewargs__()
assert a.clock == 133
assert a.timestamp == t1
assert a.id == 'A'
assert a.obj == 'obj'
assert a <= b
assert b >= a
assert (timetuple(134, time(), 'A', 'obj').__lt__(tuple()) is
NotImplemented)
assert timetuple(134, t2, 'A', 'obj') > timetuple(133, t1, 'A', 'obj')
assert timetuple(134, t1, 'B', 'obj') > timetuple(134, t1, 'A', 'obj')
assert (timetuple(None, t2, 'B', 'obj') >
timetuple(None, t1, 'A', 'obj'))
|
import logging
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_cool_link import DysonPureCoolLink
from homeassistant.const import PERCENTAGE, STATE_OFF, TEMP_CELSIUS, TIME_HOURS
from homeassistant.helpers.entity import Entity
from . import DYSON_DEVICES
SENSOR_UNITS = {
"air_quality": None,
"dust": None,
"filter_life": TIME_HOURS,
"humidity": PERCENTAGE,
}
SENSOR_ICONS = {
"air_quality": "mdi:fan",
"dust": "mdi:cloud",
"filter_life": "mdi:filter-outline",
"humidity": "mdi:water-percent",
"temperature": "mdi:thermometer",
}
DYSON_SENSOR_DEVICES = "dyson_sensor_devices"
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dyson Sensors."""
if discovery_info is None:
return
hass.data.setdefault(DYSON_SENSOR_DEVICES, [])
unit = hass.config.units.temperature_unit
devices = hass.data[DYSON_SENSOR_DEVICES]
# Get Dyson Devices from parent component
device_ids = [device.unique_id for device in hass.data[DYSON_SENSOR_DEVICES]]
new_entities = []
for device in hass.data[DYSON_DEVICES]:
if isinstance(device, DysonPureCool):
if f"{device.serial}-temperature" not in device_ids:
new_entities.append(DysonTemperatureSensor(device, unit))
if f"{device.serial}-humidity" not in device_ids:
new_entities.append(DysonHumiditySensor(device))
elif isinstance(device, DysonPureCoolLink):
new_entities.append(DysonFilterLifeSensor(device))
new_entities.append(DysonDustSensor(device))
new_entities.append(DysonHumiditySensor(device))
new_entities.append(DysonTemperatureSensor(device, unit))
new_entities.append(DysonAirQualitySensor(device))
if not new_entities:
return
devices.extend(new_entities)
add_entities(devices)
class DysonSensor(Entity):
"""Representation of a generic Dyson sensor."""
def __init__(self, device, sensor_type):
"""Create a new generic Dyson sensor."""
self._device = device
self._old_value = None
self._name = None
self._sensor_type = sensor_type
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._device.add_message_listener(self.on_message)
def on_message(self, message):
"""Handle new messages which are received from the fan."""
# Prevent refreshing if not needed
if self._old_value is None or self._old_value != self.state:
_LOGGER.debug("Message received for %s device: %s", self.name, message)
self._old_value = self.state
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the Dyson sensor name."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return SENSOR_UNITS[self._sensor_type]
@property
def icon(self):
"""Return the icon for this sensor."""
return SENSOR_ICONS[self._sensor_type]
@property
def unique_id(self):
"""Return the sensor's unique id."""
return f"{self._device.serial}-{self._sensor_type}"
class DysonFilterLifeSensor(DysonSensor):
"""Representation of Dyson Filter Life sensor (in hours)."""
def __init__(self, device):
"""Create a new Dyson Filter Life sensor."""
super().__init__(device, "filter_life")
self._name = f"{self._device.name} Filter Life"
@property
def state(self):
"""Return filter life in hours."""
if self._device.state:
return int(self._device.state.filter_life)
return None
class DysonDustSensor(DysonSensor):
"""Representation of Dyson Dust sensor (lower is better)."""
def __init__(self, device):
"""Create a new Dyson Dust sensor."""
super().__init__(device, "dust")
self._name = f"{self._device.name} Dust"
@property
def state(self):
"""Return Dust value."""
if self._device.environmental_state:
return self._device.environmental_state.dust
return None
class DysonHumiditySensor(DysonSensor):
"""Representation of Dyson Humidity sensor."""
def __init__(self, device):
"""Create a new Dyson Humidity sensor."""
super().__init__(device, "humidity")
self._name = f"{self._device.name} Humidity"
@property
def state(self):
"""Return Humidity value."""
if self._device.environmental_state:
if self._device.environmental_state.humidity == 0:
return STATE_OFF
return self._device.environmental_state.humidity
return None
class DysonTemperatureSensor(DysonSensor):
"""Representation of Dyson Temperature sensor."""
def __init__(self, device, unit):
"""Create a new Dyson Temperature sensor."""
super().__init__(device, "temperature")
self._name = f"{self._device.name} Temperature"
self._unit = unit
@property
def state(self):
"""Return Temperature value."""
if self._device.environmental_state:
temperature_kelvin = self._device.environmental_state.temperature
if temperature_kelvin == 0:
return STATE_OFF
if self._unit == TEMP_CELSIUS:
return float(f"{(temperature_kelvin - 273.15):.1f}")
return float(f"{(temperature_kelvin * 9 / 5 - 459.67):.1f}")
return None
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
class DysonAirQualitySensor(DysonSensor):
"""Representation of Dyson Air Quality sensor (lower is better)."""
def __init__(self, device):
"""Create a new Dyson Air Quality sensor."""
super().__init__(device, "air_quality")
self._name = f"{self._device.name} AQI"
@property
def state(self):
"""Return Air Quality value."""
if self._device.environmental_state:
return int(self._device.environmental_state.volatil_organic_compounds)
return None
|
import numpy as np
from scipy.sparse import csr_matrix
import pandas as pd
from scattertext.TermDocMatrix import TermDocMatrix
from scattertext.indexstore import IndexStore, IndexStoreFromList
class DimensionMismatchException(Exception):
pass
class TermDocMatrixFromFrequencies(object):
'''
A factory class for building a TermDocMatrix from a set of term-category frequencies.
Note: the TermDocMatrix will assume that only K documents exist, where
K is the number of categories.
>>> from scattertext import TermDocMatrixFromFrequencies
>>> from pandas import DataFrame
>>> term_freq_df = DataFrame({
... 'term': ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'],
... 'A': [6, 3, 3, 3, 5, 0, 0],
... 'B': [6, 3, 3, 3, 5, 1, 1],
... }).set_index('term')[['A', 'B']]
>>> term_doc_mat = TermDocMatrixFromFrequencies(term_freq_df).build()
>>> term_doc_mat.get_categories()
['A', 'B']
>>> term_doc_mat.get_terms()
['a', 'a b', 'a c', 'c', 'b', 'e b', 'e']
'''
def __init__(self,
term_freq_df,
unigram_frequency_path=None):
'''
Parameters
----------
term_freq_df: DataFrame
Indexed on term, columns are counts per category
unigram_frequency_path: str (see TermDocMatrix)
'''
self.term_freq_df = term_freq_df
self.unigram_frequency_path = unigram_frequency_path
def build(self):
'''
Returns
-------
TermDocMatrix
'''
constructor_kwargs = self._get_build_kwargs()
return TermDocMatrix(
**constructor_kwargs
)
def _get_build_kwargs(self):
constructor_kwargs = {
'X': csr_matrix(self.term_freq_df.values.T),
'mX': csr_matrix((0, 0)),
'y': np.array(range(len(self.term_freq_df.columns))),
'term_idx_store': IndexStoreFromList.build(self.term_freq_df.index.values),
'metadata_idx_store': IndexStore(),
'category_idx_store': IndexStoreFromList.build(self.term_freq_df.columns),
'unigram_frequency_path': self.unigram_frequency_path
}
return constructor_kwargs
|
import diamond.collector
from diamond import convertor
class NtpCollector(diamond.collector.ProcessCollector):
def get_default_config_help(self):
config_help = super(NtpCollector, self).get_default_config_help()
config_help.update({
'bin': 'Path to ntpdate binary',
'ntp_pool': 'NTP Pool address',
'precision': 'Number of decimal places to report to',
'time_scale': 'Time unit to report offset in',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NtpCollector, self).get_default_config()
config.update({
'bin': self.find_binary('/usr/sbin/ntpdate'),
'ntp_pool': 'pool.ntp.org',
'path': 'ntp',
'precision': 0,
'time_scale': 'milliseconds',
})
return config
def get_ntpdate_stats(self):
output = self.run_command(['-q', self.config['ntp_pool']])
data = {'server.count': {'val': 0, 'precision': 0}}
for line in output[0].splitlines():
# Only care about best choice not all servers
if line.startswith('server'):
data['server.count']['val'] += 1
continue
parts = line.split()
# Make sure we have the correct output
# Sample of line: 31 Apr 12:00:00 ntpdate[12345]: adjust time \
# server 123.456.789.2 offset -0.000123 sec
if len(parts) != 11:
self.log.error('NtpCollector: Output of ntpdate was %s words '
'long but was expected to be 11' % len(parts))
self.log.debug('NtpCollector: ntpdate output was %s' % parts)
continue
# offset is in seconds, convert is to nanoseconds and miliseconds
offset_in_s = float(parts[9])
# Convert to the requested time unit
offset = convertor.time.convert(offset_in_s,
's',
self.config['time_scale'])
# Determine metric namespace based on given time unit
metric_name = 'offset.%s' % self.config['time_scale']
data[metric_name] = {'val': offset,
'precision': self.config['precision']}
return data.items()
def collect(self):
for stat, v in self.get_ntpdate_stats():
self.publish(stat, v['val'], precision=v['precision'])
|
import asyncio
from collections import deque
import io
from typing import Any, Callable, List
from aiohttp import web
import attr
from homeassistant.components.http import HomeAssistantView
from homeassistant.core import callback
from homeassistant.helpers.event import async_call_later
from homeassistant.util.decorator import Registry
from .const import ATTR_STREAMS, DOMAIN, MAX_SEGMENTS
PROVIDERS = Registry()
@attr.s
class StreamBuffer:
"""Represent a segment."""
segment: io.BytesIO = attr.ib()
output = attr.ib() # type=av.OutputContainer
vstream = attr.ib() # type=av.VideoStream
astream = attr.ib(default=None) # type=Optional[av.AudioStream]
@attr.s
class Segment:
"""Represent a segment."""
sequence: int = attr.ib()
segment: io.BytesIO = attr.ib()
duration: float = attr.ib()
class StreamOutput:
"""Represents a stream output."""
def __init__(self, stream, timeout: int = 300) -> None:
"""Initialize a stream output."""
self.idle = False
self.timeout = timeout
self._stream = stream
self._cursor = None
self._event = asyncio.Event()
self._segments = deque(maxlen=MAX_SEGMENTS)
self._unsub = None
@property
def name(self) -> str:
"""Return provider name."""
return None
@property
def format(self) -> str:
"""Return container format."""
return None
@property
def audio_codecs(self) -> str:
"""Return desired audio codecs."""
return None
@property
def video_codecs(self) -> tuple:
"""Return desired video codecs."""
return None
@property
def container_options(self) -> Callable[[int], dict]:
"""Return Callable which takes a sequence number and returns container options."""
return None
@property
def segments(self) -> List[int]:
"""Return current sequence from segments."""
return [s.sequence for s in self._segments]
@property
def target_duration(self) -> int:
"""Return the max duration of any given segment in seconds."""
segment_length = len(self._segments)
if not segment_length:
return 1
durations = [s.duration for s in self._segments]
return round(max(durations)) or 1
def get_segment(self, sequence: int = None) -> Any:
"""Retrieve a specific segment, or the whole list."""
self.idle = False
# Reset idle timeout
if self._unsub is not None:
self._unsub()
self._unsub = async_call_later(self._stream.hass, self.timeout, self._timeout)
if not sequence:
return self._segments
for segment in self._segments:
if segment.sequence == sequence:
return segment
return None
async def recv(self) -> Segment:
"""Wait for and retrieve the latest segment."""
last_segment = max(self.segments, default=0)
if self._cursor is None or self._cursor <= last_segment:
await self._event.wait()
if not self._segments:
return None
segment = self.get_segment()[-1]
self._cursor = segment.sequence
return segment
@callback
def put(self, segment: Segment) -> None:
"""Store output."""
# Start idle timeout when we start receiving data
if self._unsub is None:
self._unsub = async_call_later(
self._stream.hass, self.timeout, self._timeout
)
if segment is None:
self._event.set()
# Cleanup provider
if self._unsub is not None:
self._unsub()
self.cleanup()
return
self._segments.append(segment)
self._event.set()
self._event.clear()
@callback
def _timeout(self, _now=None):
"""Handle stream timeout."""
self._unsub = None
if self._stream.keepalive:
self.idle = True
self._stream.check_idle()
else:
self.cleanup()
def cleanup(self):
"""Handle cleanup."""
self._segments = deque(maxlen=MAX_SEGMENTS)
self._stream.remove_provider(self)
class StreamView(HomeAssistantView):
"""
Base StreamView.
For implementation of a new stream format, define `url` and `name`
attributes, and implement `handle` method in a child class.
"""
requires_auth = False
platform = None
async def get(self, request, token, sequence=None):
"""Start a GET request."""
hass = request.app["hass"]
stream = next(
(
s
for s in hass.data[DOMAIN][ATTR_STREAMS].values()
if s.access_token == token
),
None,
)
if not stream:
raise web.HTTPNotFound()
# Start worker if not already started
stream.start()
return await self.handle(request, stream, sequence)
async def handle(self, request, stream, sequence):
"""Handle the stream request."""
raise NotImplementedError()
|
from pygal import formatters
from pygal._compat import u
def test_human_readable():
"""Test human_readable formatter"""
f = formatters.human_readable
assert f(1) == '1'
assert f(1.) == '1'
assert f(10) == '10'
assert f(12.5) == '12.5'
assert f(1000) == '1k'
assert f(5000) == '5k'
assert f(100000) == '100k'
assert f(1253) == '1.253k'
assert f(1250) == '1.25k'
assert f(0.1) == '100m'
assert f(0.01) == '10m'
assert f(0.001) == '1m'
assert f(0.002) == '2m'
assert f(0.0025) == '2.5m'
assert f(0.0001) == u('100µ')
assert f(0.000123) == u('123µ')
assert f(0.00001) == u('10µ')
assert f(0.000001) == u('1µ')
assert f(0.0000001) == u('100n')
assert f(0.0000000001) == u('100p')
assert f(0) == '0'
assert f(0.) == '0'
assert f(-1337) == '-1.337k'
assert f(-.000000042) == '-42n'
def test_human_readable_custom():
"""Test human_readable formatter option"""
f = formatters.HumanReadable()
assert f(None) == u('∅')
f = formatters.HumanReadable(none_char='/')
assert f(None) == '/'
def test_significant():
"""Test significant formatter"""
f = formatters.significant
assert f(1) == '1'
assert f(1.) == '1'
assert f(-1.) == '-1'
assert f(10) == '10'
assert f(10000000000) == '1e+10'
assert f(100000000000) == '1e+11'
assert f(120000000000) == '1.2e+11'
assert f(.1) == '0.1'
assert f(.01) == '0.01'
assert f(.0000000001) == '1e-10'
assert f(-.0000000001) == '-1e-10'
assert f(.0000000001002) == '1.002e-10'
assert f(.0000000001002) == '1.002e-10'
assert f(.12345678912345) == '0.1234567891'
assert f(.012345678912345) == '0.01234567891'
assert f(12345678912345) == '1.234567891e+13'
|
import itertools
import csv
import os
import time
import optparse
import logging
import dedupe
import exampleIO
def canonicalImport(filename):
preProcess = exampleIO.preProcess
data_d = {}
with open(filename) as f:
reader = csv.DictReader(f)
for i, row in enumerate(reader):
clean_row = {k: preProcess(v) for (k, v) in
row.items()}
data_d[filename + str(i)] = clean_row
return data_d, reader.fieldnames
def evaluateDuplicates(found_dupes, true_dupes):
true_positives = found_dupes.intersection(true_dupes)
false_positives = found_dupes.difference(true_dupes)
print('found duplicate')
print(len(found_dupes))
print('precision')
print(1 - len(false_positives) / float(len(found_dupes)))
print('recall')
print(len(true_positives) / float(len(true_dupes)))
if __name__ == '__main__':
optp = optparse.OptionParser()
optp.add_option('-v', '--verbose', dest='verbose', action='count',
help='Increase verbosity (specify multiple times for more)'
)
(opts, args) = optp.parse_args()
log_level = logging.WARNING
if opts.verbose:
if opts.verbose == 1:
log_level = logging.INFO
elif opts.verbose >= 2:
log_level = logging.DEBUG
logging.getLogger().setLevel(log_level)
settings_file = 'canonical_gazetteer_learned_settings'
data_1, header = canonicalImport('tests/datasets/restaurant-1.csv')
data_2, _ = canonicalImport('tests/datasets/restaurant-2.csv')
training_pairs = dedupe.training_data_link(data_1, data_2, 'unique_id', 5000)
all_data = data_1.copy()
all_data.update(data_2)
duplicates_s = set()
for _, pair in itertools.groupby(sorted(all_data.items(),
key=lambda x: x[1]['unique_id']),
key=lambda x: x[1]['unique_id']):
pair = list(pair)
if len(pair) == 2:
a, b = pair
duplicates_s.add(frozenset((a[0], b[0])))
t0 = time.time()
print('number of known duplicate pairs', len(duplicates_s))
if os.path.exists(settings_file):
with open(settings_file, 'rb') as f:
gazetteer = dedupe.StaticGazetteer(f)
else:
fields = [{'field': 'name', 'type': 'String'},
{'field': 'address', 'type': 'String'},
{'field': 'cuisine', 'type': 'String'},
{'field': 'city', 'type': 'String'}
]
gazetteer = dedupe.Gazetteer(fields)
gazetteer.prepare_training(data_1, data_2, sample_size=10000)
gazetteer.mark_pairs(training_pairs)
gazetteer.train()
with open(settings_file, 'wb') as f:
gazetteer.write_settings(f)
gazetteer.index(data_2)
gazetteer.unindex(data_2)
gazetteer.index(data_2)
# print candidates
print('clustering...')
results = gazetteer.search(
data_1, n_matches=1, generator=True)
print('Evaluate Clustering')
confirm_dupes_a = set(frozenset([a, b])
for a, result in results
for b, score in result)
evaluateDuplicates(confirm_dupes_a, duplicates_s)
|
from datetime import timedelta
import pytest
from homeassistant.components.modbus.const import CALL_TYPE_COIL, CONF_COILS
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import CONF_NAME, CONF_SLAVE, STATE_OFF, STATE_ON
from .conftest import run_base_read_test, setup_base_test
@pytest.mark.parametrize(
"regs,expected",
[
(
[0x00],
STATE_OFF,
),
(
[0x80],
STATE_OFF,
),
(
[0xFE],
STATE_OFF,
),
(
[0xFF],
STATE_ON,
),
(
[0x01],
STATE_ON,
),
],
)
async def test_coil_switch(hass, mock_hub, regs, expected):
"""Run test for given config."""
switch_name = "modbus_test_switch"
scan_interval = 5
entity_id, now, device = await setup_base_test(
switch_name,
hass,
mock_hub,
{
CONF_COILS: [
{CONF_NAME: switch_name, CALL_TYPE_COIL: 1234, CONF_SLAVE: 1},
]
},
SWITCH_DOMAIN,
scan_interval,
)
await run_base_read_test(
entity_id,
hass,
mock_hub,
CALL_TYPE_COIL,
regs,
expected,
now + timedelta(seconds=scan_interval + 1),
)
|
import urllib.parse
from cherrypy._cpcompat import text_or_bytes
import cherrypy
def expose(func=None, alias=None):
"""Expose the function or class.
Optionally provide an alias or set of aliases.
"""
def expose_(func):
func.exposed = True
if alias is not None:
if isinstance(alias, text_or_bytes):
parents[alias.replace('.', '_')] = func
else:
for a in alias:
parents[a.replace('.', '_')] = func
return func
import sys
import types
decoratable_types = types.FunctionType, types.MethodType, type,
if isinstance(func, decoratable_types):
if alias is None:
# @expose
func.exposed = True
return func
else:
# func = expose(func, alias)
parents = sys._getframe(1).f_locals
return expose_(func)
elif func is None:
if alias is None:
# @expose()
parents = sys._getframe(1).f_locals
return expose_
else:
# @expose(alias="alias") or
# @expose(alias=["alias1", "alias2"])
parents = sys._getframe(1).f_locals
return expose_
else:
# @expose("alias") or
# @expose(["alias1", "alias2"])
parents = sys._getframe(1).f_locals
alias = func
return expose_
def popargs(*args, **kwargs):
"""Decorate _cp_dispatch.
(cherrypy.dispatch.Dispatcher.dispatch_method_name)
Optional keyword argument: handler=(Object or Function)
Provides a _cp_dispatch function that pops off path segments into
cherrypy.request.params under the names specified. The dispatch
is then forwarded on to the next vpath element.
Note that any existing (and exposed) member function of the class that
popargs is applied to will override that value of the argument. For
instance, if you have a method named "list" on the class decorated with
popargs, then accessing "/list" will call that function instead of popping
it off as the requested parameter. This restriction applies to all
_cp_dispatch functions. The only way around this restriction is to create
a "blank class" whose only function is to provide _cp_dispatch.
If there are path elements after the arguments, or more arguments
are requested than are available in the vpath, then the 'handler'
keyword argument specifies the next object to handle the parameterized
request. If handler is not specified or is None, then self is used.
If handler is a function rather than an instance, then that function
will be called with the args specified and the return value from that
function used as the next object INSTEAD of adding the parameters to
cherrypy.request.args.
This decorator may be used in one of two ways:
As a class decorator:
.. code-block:: python
@cherrypy.popargs('year', 'month', 'day')
class Blog:
def index(self, year=None, month=None, day=None):
#Process the parameters here; any url like
#/, /2009, /2009/12, or /2009/12/31
#will fill in the appropriate parameters.
def create(self):
#This link will still be available at /create.
#Defined functions take precedence over arguments.
Or as a member of a class:
.. code-block:: python
class Blog:
_cp_dispatch = cherrypy.popargs('year', 'month', 'day')
#...
The handler argument may be used to mix arguments with built in functions.
For instance, the following setup allows different activities at the
day, month, and year level:
.. code-block:: python
class DayHandler:
def index(self, year, month, day):
#Do something with this day; probably list entries
def delete(self, year, month, day):
#Delete all entries for this day
@cherrypy.popargs('day', handler=DayHandler())
class MonthHandler:
def index(self, year, month):
#Do something with this month; probably list entries
def delete(self, year, month):
#Delete all entries for this month
@cherrypy.popargs('month', handler=MonthHandler())
class YearHandler:
def index(self, year):
#Do something with this year
#...
@cherrypy.popargs('year', handler=YearHandler())
class Root:
def index(self):
#...
"""
# Since keyword arg comes after *args, we have to process it ourselves
# for lower versions of python.
handler = None
handler_call = False
for k, v in kwargs.items():
if k == 'handler':
handler = v
else:
tm = "cherrypy.popargs() got an unexpected keyword argument '{0}'"
raise TypeError(tm.format(k))
import inspect
if handler is not None \
and (hasattr(handler, '__call__') or inspect.isclass(handler)):
handler_call = True
def decorated(cls_or_self=None, vpath=None):
if inspect.isclass(cls_or_self):
# cherrypy.popargs is a class decorator
cls = cls_or_self
name = cherrypy.dispatch.Dispatcher.dispatch_method_name
setattr(cls, name, decorated)
return cls
# We're in the actual function
self = cls_or_self
parms = {}
for arg in args:
if not vpath:
break
parms[arg] = vpath.pop(0)
if handler is not None:
if handler_call:
return handler(**parms)
else:
cherrypy.request.params.update(parms)
return handler
cherrypy.request.params.update(parms)
# If we are the ultimate handler, then to prevent our _cp_dispatch
# from being called again, we will resolve remaining elements through
# getattr() directly.
if vpath:
return getattr(self, vpath.pop(0), None)
else:
return self
return decorated
def url(path='', qs='', script_name=None, base=None, relative=None):
"""Create an absolute URL for the given path.
If 'path' starts with a slash ('/'), this will return
(base + script_name + path + qs).
If it does not start with a slash, this returns
(base + script_name [+ request.path_info] + path + qs).
If script_name is None, cherrypy.request will be used
to find a script_name, if available.
If base is None, cherrypy.request.base will be used (if available).
Note that you can use cherrypy.tools.proxy to change this.
Finally, note that this function can be used to obtain an absolute URL
for the current request path (minus the querystring) by passing no args.
If you call url(qs=cherrypy.request.query_string), you should get the
original browser URL (assuming no internal redirections).
If relative is None or not provided, request.app.relative_urls will
be used (if available, else False). If False, the output will be an
absolute URL (including the scheme, host, vhost, and script_name).
If True, the output will instead be a URL that is relative to the
current request path, perhaps including '..' atoms. If relative is
the string 'server', the output will instead be a URL that is
relative to the server root; i.e., it will start with a slash.
"""
if isinstance(qs, (tuple, list, dict)):
qs = urllib.parse.urlencode(qs)
if qs:
qs = '?' + qs
if cherrypy.request.app:
if not path.startswith('/'):
# Append/remove trailing slash from path_info as needed
# (this is to support mistyped URL's without redirecting;
# if you want to redirect, use tools.trailing_slash).
pi = cherrypy.request.path_info
if cherrypy.request.is_index is True:
if not pi.endswith('/'):
pi = pi + '/'
elif cherrypy.request.is_index is False:
if pi.endswith('/') and pi != '/':
pi = pi[:-1]
if path == '':
path = pi
else:
path = urllib.parse.urljoin(pi, path)
if script_name is None:
script_name = cherrypy.request.script_name
if base is None:
base = cherrypy.request.base
newurl = base + script_name + normalize_path(path) + qs
else:
# No request.app (we're being called outside a request).
# We'll have to guess the base from server.* attributes.
# This will produce very different results from the above
# if you're using vhosts or tools.proxy.
if base is None:
base = cherrypy.server.base()
path = (script_name or '') + path
newurl = base + normalize_path(path) + qs
# At this point, we should have a fully-qualified absolute URL.
if relative is None:
relative = getattr(cherrypy.request.app, 'relative_urls', False)
# See http://www.ietf.org/rfc/rfc2396.txt
if relative == 'server':
# "A relative reference beginning with a single slash character is
# termed an absolute-path reference, as defined by <abs_path>..."
# This is also sometimes called "server-relative".
newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
elif relative:
# "A relative reference that does not begin with a scheme name
# or a slash character is termed a relative-path reference."
old = url(relative=False).split('/')[:-1]
new = newurl.split('/')
while old and new:
a, b = old[0], new[0]
if a != b:
break
old.pop(0)
new.pop(0)
new = (['..'] * len(old)) + new
newurl = '/'.join(new)
return newurl
def normalize_path(path):
"""Resolve given path from relative into absolute form."""
if './' not in path:
return path
# Normalize the URL by removing ./ and ../
atoms = []
for atom in path.split('/'):
if atom == '.':
pass
elif atom == '..':
# Don't pop from empty list
# (i.e. ignore redundant '..')
if atoms:
atoms.pop()
elif atom:
atoms.append(atom)
newpath = '/'.join(atoms)
# Preserve leading '/'
if path.startswith('/'):
newpath = '/' + newpath
return newpath
####
# Inlined from jaraco.classes 1.4.3
# Ref #1673
class _ClassPropertyDescriptor(object):
"""Descript for read-only class-based property.
Turns a classmethod-decorated func into a read-only property of that class
type (means the value cannot be set).
"""
def __init__(self, fget, fset=None):
"""Initialize a class property descriptor.
Instantiated by ``_helper.classproperty``.
"""
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
"""Return property value."""
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def classproperty(func): # noqa: D401; irrelevant for properties
"""Decorator like classmethod to implement a static class property."""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return _ClassPropertyDescriptor(func)
####
|
from homeassistant.components.switch import SwitchEntity
from .account import StarlineAccount, StarlineDevice
from .const import DOMAIN
from .entity import StarlineEntity
SWITCH_TYPES = {
"ign": ["Engine", "mdi:engine-outline", "mdi:engine-off-outline"],
"webasto": ["Webasto", "mdi:radiator", "mdi:radiator-off"],
"out": [
"Additional Channel",
"mdi:access-point-network",
"mdi:access-point-network-off",
],
"poke": ["Horn", "mdi:bullhorn-outline", "mdi:bullhorn-outline"],
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the StarLine switch."""
account: StarlineAccount = hass.data[DOMAIN][entry.entry_id]
entities = []
for device in account.api.devices.values():
if device.support_state:
for key, value in SWITCH_TYPES.items():
switch = StarlineSwitch(account, device, key, *value)
if switch.is_on is not None:
entities.append(switch)
async_add_entities(entities)
class StarlineSwitch(StarlineEntity, SwitchEntity):
"""Representation of a StarLine switch."""
def __init__(
self,
account: StarlineAccount,
device: StarlineDevice,
key: str,
name: str,
icon_on: str,
icon_off: str,
):
"""Initialize the switch."""
super().__init__(account, device, key, name)
self._icon_on = icon_on
self._icon_off = icon_off
@property
def available(self):
"""Return True if entity is available."""
return super().available and self._device.online
@property
def device_state_attributes(self):
"""Return the state attributes of the switch."""
if self._key == "ign":
return self._account.engine_attrs(self._device)
return None
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon_on if self.is_on else self._icon_off
@property
def assumed_state(self):
"""Return True if unable to access real state of the entity."""
return True
@property
def is_on(self):
"""Return True if entity is on."""
if self._key == "poke":
return False
return self._device.car_state.get(self._key)
def turn_on(self, **kwargs):
"""Turn the entity on."""
self._account.api.set_car_state(self._device.device_id, self._key, True)
def turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
if self._key == "poke":
return
self._account.api.set_car_state(self._device.device_id, self._key, False)
|
import logging
import voluptuous as vol
from homeassistant.components.water_heater import (
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CONST_HVAC_HEAT,
CONST_MODE_AUTO,
CONST_MODE_HEAT,
CONST_MODE_OFF,
CONST_MODE_SMART_SCHEDULE,
CONST_OVERLAY_MANUAL,
CONST_OVERLAY_TADO_MODE,
CONST_OVERLAY_TIMER,
DATA,
DOMAIN,
SIGNAL_TADO_UPDATE_RECEIVED,
TYPE_HOT_WATER,
)
from .entity import TadoZoneEntity
_LOGGER = logging.getLogger(__name__)
MODE_AUTO = "auto"
MODE_HEAT = "heat"
MODE_OFF = "off"
OPERATION_MODES = [MODE_AUTO, MODE_HEAT, MODE_OFF]
WATER_HEATER_MAP_TADO = {
CONST_OVERLAY_MANUAL: MODE_HEAT,
CONST_OVERLAY_TIMER: MODE_HEAT,
CONST_OVERLAY_TADO_MODE: MODE_HEAT,
CONST_HVAC_HEAT: MODE_HEAT,
CONST_MODE_SMART_SCHEDULE: MODE_AUTO,
CONST_MODE_OFF: MODE_OFF,
}
SUPPORT_FLAGS_HEATER = SUPPORT_OPERATION_MODE
SERVICE_WATER_HEATER_TIMER = "set_water_heater_timer"
ATTR_TIME_PERIOD = "time_period"
WATER_HEATER_TIMER_SCHEMA = {
vol.Required(ATTR_TIME_PERIOD, default="01:00:00"): vol.All(
cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds()
),
vol.Optional(ATTR_TEMPERATURE): vol.Coerce(float),
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up the Tado water heater platform."""
tado = hass.data[DOMAIN][entry.entry_id][DATA]
entities = await hass.async_add_executor_job(_generate_entities, tado)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_WATER_HEATER_TIMER,
WATER_HEATER_TIMER_SCHEMA,
"set_timer",
)
if entities:
async_add_entities(entities, True)
def _generate_entities(tado):
"""Create all water heater entities."""
entities = []
for zone in tado.zones:
if zone["type"] == TYPE_HOT_WATER:
entity = create_water_heater_entity(tado, zone["name"], zone["id"], zone)
entities.append(entity)
return entities
def create_water_heater_entity(tado, name: str, zone_id: int, zone: str):
"""Create a Tado water heater device."""
capabilities = tado.get_capabilities(zone_id)
supports_temperature_control = capabilities["canSetTemperature"]
if supports_temperature_control and "temperatures" in capabilities:
temperatures = capabilities["temperatures"]
min_temp = float(temperatures["celsius"]["min"])
max_temp = float(temperatures["celsius"]["max"])
else:
min_temp = None
max_temp = None
entity = TadoWaterHeater(
tado,
name,
zone_id,
supports_temperature_control,
min_temp,
max_temp,
zone["devices"][0],
)
return entity
class TadoWaterHeater(TadoZoneEntity, WaterHeaterEntity):
"""Representation of a Tado water heater."""
def __init__(
self,
tado,
zone_name,
zone_id,
supports_temperature_control,
min_temp,
max_temp,
device_info,
):
"""Initialize of Tado water heater entity."""
self._tado = tado
super().__init__(zone_name, device_info, tado.device_id, zone_id)
self.zone_id = zone_id
self._unique_id = f"{zone_id} {tado.device_id}"
self._device_is_active = False
self._supports_temperature_control = supports_temperature_control
self._min_temperature = min_temp
self._max_temperature = max_temp
self._target_temp = None
self._supported_features = SUPPORT_FLAGS_HEATER
if self._supports_temperature_control:
self._supported_features |= SUPPORT_TARGET_TEMPERATURE
self._current_tado_hvac_mode = CONST_MODE_SMART_SCHEDULE
self._overlay_mode = CONST_MODE_SMART_SCHEDULE
self._tado_zone_data = None
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "zone", self.zone_id
),
self._async_update_callback,
)
)
self._async_update_data()
@property
def supported_features(self):
"""Return the list of supported features."""
return self._supported_features
@property
def name(self):
"""Return the name of the entity."""
return self.zone_name
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def current_operation(self):
"""Return current readable operation mode."""
return WATER_HEATER_MAP_TADO.get(self._current_tado_hvac_mode)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._tado_zone_data.target_temp
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return self._tado_zone_data.is_away
@property
def operation_list(self):
"""Return the list of available operation modes (readable)."""
return OPERATION_MODES
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._min_temperature
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._max_temperature
def set_operation_mode(self, operation_mode):
"""Set new operation mode."""
mode = None
if operation_mode == MODE_OFF:
mode = CONST_MODE_OFF
elif operation_mode == MODE_AUTO:
mode = CONST_MODE_SMART_SCHEDULE
elif operation_mode == MODE_HEAT:
mode = CONST_MODE_HEAT
self._control_heater(hvac_mode=mode)
def set_timer(self, time_period, temperature=None):
"""Set the timer on the entity, and temperature if supported."""
if not self._supports_temperature_control and temperature is not None:
temperature = None
self._control_heater(
hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period
)
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if not self._supports_temperature_control or temperature is None:
return
if self._current_tado_hvac_mode not in (
CONST_MODE_OFF,
CONST_MODE_AUTO,
CONST_MODE_SMART_SCHEDULE,
):
self._control_heater(target_temp=temperature)
return
self._control_heater(target_temp=temperature, hvac_mode=CONST_MODE_HEAT)
@callback
def _async_update_callback(self):
"""Load tado data and update state."""
self._async_update_data()
self.async_write_ha_state()
@callback
def _async_update_data(self):
"""Load tado data."""
_LOGGER.debug("Updating water_heater platform for zone %d", self.zone_id)
self._tado_zone_data = self._tado.data["zone"][self.zone_id]
self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode
def _control_heater(self, hvac_mode=None, target_temp=None, duration=None):
"""Send new target temperature."""
if hvac_mode:
self._current_tado_hvac_mode = hvac_mode
if target_temp:
self._target_temp = target_temp
# Set a target temperature if we don't have any
if self._target_temp is None:
self._target_temp = self.min_temp
if self._current_tado_hvac_mode == CONST_MODE_SMART_SCHEDULE:
_LOGGER.debug(
"Switching to SMART_SCHEDULE for zone %s (%d)",
self.zone_name,
self.zone_id,
)
self._tado.reset_zone_overlay(self.zone_id)
return
if self._current_tado_hvac_mode == CONST_MODE_OFF:
_LOGGER.debug(
"Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id
)
self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, TYPE_HOT_WATER)
return
overlay_mode = CONST_OVERLAY_MANUAL
if duration:
overlay_mode = CONST_OVERLAY_TIMER
elif self._tado.fallback:
# Fallback to Smart Schedule at next Schedule switch if we have fallback enabled
overlay_mode = CONST_OVERLAY_TADO_MODE
_LOGGER.debug(
"Switching to %s for zone %s (%d) with temperature %s",
self._current_tado_hvac_mode,
self.zone_name,
self.zone_id,
self._target_temp,
)
self._tado.set_zone_overlay(
zone_id=self.zone_id,
overlay_mode=overlay_mode,
temperature=self._target_temp,
duration=duration,
device_type=TYPE_HOT_WATER,
)
self._overlay_mode = self._current_tado_hvac_mode
|
try:
import simplejson as json
except ImportError:
import json
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
from flask import Flask
from flask import jsonify
from flasgger import Swagger
from flasgger import swag_from
app = Flask(__name__)
swag = Swagger(app)
@app.route("/officer/<int:priority>", methods=['POST'])
@swag_from("officer_specs.yml")
def create_officer(priority):
return 'Request for officer creation successfully received' \
' (priority: %i)'.format(priority), HTTPStatus.OK
@app.route('/schema/<string:schema_id>', methods=['GET'])
def get_schema(schema_id):
"""
Test schema retrieval
This endpoint returns a schema known to Flasgger
---
tags:
- schema
produces:
- application/json
parameters:
- in: path
name: schema_id
type: string
description: schema_id to be retrieved
required: true
responses:
200:
description: The requested schema
"""
return jsonify(swag.get_schema(schema_id)), HTTPStatus.OK
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
response = client.get('/schema/officer')
assert response.status_code == HTTPStatus.OK
retrieved_schema = json.loads(response.data.decode('utf-8'))
actual_schema = specs_data['/apispec_1.json']['definitions']['Officer']
try:
assert retrieved_schema.viewitems() >= actual_schema.viewitems()
except AttributeError:
assert retrieved_schema.items() >= actual_schema.items()
if __name__ == "__main__":
app.run(debug=True)
|
import os
import re
import dateutil.tz
_cache_tz = None
def _try_tz_from_env():
tzenv = os.environ.get("TZ")
if tzenv and tzenv[0] == ":":
tzenv = tzenv[1:]
try:
if tzenv:
dateutil.tz.gettz(tzenv)
return tzenv
except Exception:
pass
def _get_localzone(_root="/"):
"""Try to find the local timezone configuration.
The parameter _root makes the function look for files like /etc/localtime
beneath the _root directory. This is primarily used by the tests.
In normal usage you call the function without parameters.
"""
tzenv = _try_tz_from_env()
if tzenv:
return tzenv
# Are we under Termux on Android?
if os.path.exists("/system/bin/getprop"):
import subprocess
androidtz = (
subprocess.check_output(["getprop", "persist.sys.timezone"])
.strip()
.decode()
)
return androidtz
# Now look for distribution specific configuration files
# that contain the timezone name.
for configfile in ("etc/timezone", "var/db/zoneinfo"):
tzpath = os.path.join(_root, configfile)
try:
with open(tzpath, "rb") as tzfile:
data = tzfile.read()
# Issue #3 was that /etc/timezone was a zoneinfo file.
# That's a misconfiguration, but we need to handle it gracefully:
if data[:5] == b"TZif2":
continue
etctz = data.strip().decode()
if not etctz:
# Empty file, skip
continue
for etctz in data.decode().splitlines():
# Get rid of host definitions and comments:
if " " in etctz:
etctz, dummy = etctz.split(" ", 1)
if "#" in etctz:
etctz, dummy = etctz.split("#", 1)
if not etctz:
continue
tz = etctz.replace(" ", "_")
return tz
except IOError:
# File doesn't exist or is a directory
continue
# CentOS has a ZONE setting in /etc/sysconfig/clock,
# OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and
# Gentoo has a TIMEZONE setting in /etc/conf.d/clock
# We look through these files for a timezone:
zone_re = re.compile(r"\s*ZONE\s*=\s*\"")
timezone_re = re.compile(r"\s*TIMEZONE\s*=\s*\"")
end_re = re.compile('"')
for filename in ("etc/sysconfig/clock", "etc/conf.d/clock"):
tzpath = os.path.join(_root, filename)
try:
with open(tzpath, "rt") as tzfile:
data = tzfile.readlines()
for line in data:
# Look for the ZONE= setting.
match = zone_re.match(line)
if match is None:
# No ZONE= setting. Look for the TIMEZONE= setting.
match = timezone_re.match(line)
if match is not None:
# Some setting existed
line = line[match.end():]
etctz = line[: end_re.search(line).start()]
# We found a timezone
tz = etctz.replace(" ", "_")
return tz
except IOError:
# File doesn't exist or is a directory
continue
# systemd distributions use symlinks that include the zone name,
# see manpage of localtime(5) and timedatectl(1)
tzpath = os.path.join(_root, "etc/localtime")
if os.path.exists(tzpath) and os.path.islink(tzpath):
tzpath = os.path.realpath(tzpath)
start = tzpath.find("/") + 1
while start != 0:
tzpath = tzpath[start:]
try:
dateutil.tz.gettz(tzpath)
return tzpath
except Exception:
pass
start = tzpath.find("/") + 1
# Nothing found, return UTC
return None
def get_localzone():
"""Get the computers configured local timezone, if any."""
global _cache_tz
if _cache_tz is None:
_cache_tz = _get_localzone()
return _cache_tz
def reload_localzone():
"""Reload the cached localzone. You need to call this if the timezone has changed."""
global _cache_tz
_cache_tz = _get_localzone()
return _cache_tz
|
from django.test.utils import modify_settings
from django.urls import reverse
from weblate.trans.models import Change, Component, Project
from weblate.trans.tests.test_views import ViewTestCase
from weblate.trans.tests.utils import create_test_billing
class SettingsTest(ViewTestCase):
def test_project_denied(self):
url = reverse("settings", kwargs=self.kw_project)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
response = self.client.post(url)
self.assertEqual(response.status_code, 404)
def test_project(self):
self.project.add_user(self.user, "@Administration")
self.project.component_set.update(license="MIT")
url = reverse("settings", kwargs=self.kw_project)
response = self.client.get(url)
self.assertContains(response, "Settings")
data = response.context["form"].initial
data["web"] = "https://example.com/test/"
response = self.client.post(url, data, follow=True)
self.assertContains(response, "Settings saved")
self.assertEqual(
Project.objects.get(pk=self.project.pk).web, "https://example.com/test/"
)
@modify_settings(INSTALLED_APPS={"append": "weblate.billing"})
def test_change_access(self):
self.project.add_user(self.user, "@Administration")
url = reverse("settings", kwargs=self.kw_project)
# Get initial form data
response = self.client.get(url)
data = response.context["form"].initial
data["access_control"] = Project.ACCESS_PROTECTED
# No permissions
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "error_1_id_access_control")
# Allow editing by creating billing plan
billing = create_test_billing(self.user)
billing.projects.add(self.project)
# Editing should now work, but components do not have a license
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "You must specify a license for these components")
# Set component license
self.project.component_set.update(license="MIT")
# Editing should now work
response = self.client.post(url, data, follow=True)
self.assertRedirects(response, url)
# Verify change has been done
project = Project.objects.get(pk=self.project.pk)
self.assertEqual(project.access_control, Project.ACCESS_PROTECTED)
self.assertTrue(
project.change_set.filter(action=Change.ACTION_ACCESS_EDIT).exists()
)
def test_component_denied(self):
url = reverse("settings", kwargs=self.kw_component)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
response = self.client.post(url)
self.assertEqual(response.status_code, 404)
def test_component(self):
self.project.add_user(self.user, "@Administration")
url = reverse("settings", kwargs=self.kw_component)
response = self.client.get(url)
self.assertContains(response, "Settings")
data = {}
data.update(response.context["form"].initial)
data["license"] = "MIT"
data["enforced_checks"] = ["same", "duplicate"]
response = self.client.post(url, data, follow=True)
self.assertContains(response, "Settings saved")
component = Component.objects.get(pk=self.component.pk)
self.assertEqual(component.license, "MIT")
self.assertEqual(component.enforced_checks, ["same", "duplicate"])
|
import os
import tempfile
from typing import Tuple
from homeassistant.components import shell_command
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
def mock_process_creator(error: bool = False):
"""Mock a coroutine that creates a process when yielded."""
async def communicate() -> Tuple[bytes, bytes]:
"""Mock a coroutine that runs a process when yielded.
Returns a tuple of (stdout, stderr).
"""
return b"I am stdout", b"I am stderr"
mock_process = MagicMock()
mock_process.communicate = communicate
mock_process.returncode = int(error)
return mock_process
async def test_executing_service(hass):
"""Test if able to call a configured service."""
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "called.txt")
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": f"date > {path}"}},
)
await hass.async_block_till_done()
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
assert os.path.isfile(path)
async def test_config_not_dict(hass):
"""Test that setup fails if config is not a dict."""
assert not await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: ["some", "weird", "list"]},
)
async def test_config_not_valid_service_names(hass):
"""Test that setup fails if config contains invalid service names."""
assert not await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"this is invalid because space": "touch bla.txt"}},
)
@patch(
"homeassistant.components.shell_command.asyncio.subprocess"
".create_subprocess_shell"
)
async def test_template_render_no_template(mock_call, hass):
"""Ensure shell_commands without templates get rendered properly."""
mock_call.return_value = mock_process_creator(error=False)
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": "ls /bin"}},
)
await hass.async_block_till_done()
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
cmd = mock_call.mock_calls[0][1][0]
assert mock_call.call_count == 1
assert "ls /bin" == cmd
@patch(
"homeassistant.components.shell_command.asyncio.subprocess"
".create_subprocess_exec"
)
async def test_template_render(mock_call, hass):
"""Ensure shell_commands with templates get rendered properly."""
hass.states.async_set("sensor.test_state", "Works")
mock_call.return_value = mock_process_creator(error=False)
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{
shell_command.DOMAIN: {
"test_service": ("ls /bin {{ states.sensor.test_state.state }}")
}
},
)
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
cmd = mock_call.mock_calls[0][1]
assert mock_call.call_count == 1
assert ("ls", "/bin", "Works") == cmd
@patch(
"homeassistant.components.shell_command.asyncio.subprocess"
".create_subprocess_shell"
)
@patch("homeassistant.components.shell_command._LOGGER.error")
async def test_subprocess_error(mock_error, mock_call, hass):
"""Test subprocess that returns an error."""
mock_call.return_value = mock_process_creator(error=True)
with tempfile.TemporaryDirectory() as tempdirname:
path = os.path.join(tempdirname, "called.txt")
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": f"touch {path}"}},
)
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
assert mock_call.call_count == 1
assert mock_error.call_count == 1
assert not os.path.isfile(path)
@patch("homeassistant.components.shell_command._LOGGER.debug")
async def test_stdout_captured(mock_output, hass):
"""Test subprocess that has stdout."""
test_phrase = "I have output"
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": f"echo {test_phrase}"}},
)
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
assert mock_output.call_count == 1
assert test_phrase.encode() + b"\n" == mock_output.call_args_list[0][0][-1]
@patch("homeassistant.components.shell_command._LOGGER.debug")
async def test_stderr_captured(mock_output, hass):
"""Test subprocess that has stderr."""
test_phrase = "I have error"
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": f">&2 echo {test_phrase}"}},
)
await hass.services.async_call("shell_command", "test_service", blocking=True)
await hass.async_block_till_done()
assert mock_output.call_count == 1
assert test_phrase.encode() + b"\n" == mock_output.call_args_list[0][0][-1]
async def test_do_no_run_forever(hass, caplog):
"""Test subprocesses terminate after the timeout."""
with patch.object(shell_command, "COMMAND_TIMEOUT", 0.001):
assert await async_setup_component(
hass,
shell_command.DOMAIN,
{shell_command.DOMAIN: {"test_service": "sleep 10000"}},
)
await hass.async_block_till_done()
await hass.services.async_call(
shell_command.DOMAIN, "test_service", blocking=True
)
await hass.async_block_till_done()
assert "Timed out" in caplog.text
assert "sleep 10000" in caplog.text
|
import os.path
import subprocess
from base64 import b64decode
from email import message_from_string
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.http.response import HttpResponse, HttpResponseServerError
from django.shortcuts import redirect
from django.urls import reverse
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from weblate.auth.models import User
from weblate.gitexport.models import SUPPORTED_VCS
from weblate.gitexport.utils import find_git_http_backend
from weblate.utils.errors import report_error
from weblate.utils.views import get_component
def response_authenticate():
"""Return 401 response with authenticate header."""
response = HttpResponse(status=401)
response["WWW-Authenticate"] = 'Basic realm="Weblate Git access"'
return response
def authenticate(request, auth):
"""Perform authentication with HTTP Basic auth."""
try:
method, data = auth.split(None, 1)
if method.lower() == "basic":
username, code = b64decode(data).decode("iso-8859-1").split(":", 1)
try:
user = User.objects.get(username=username, auth_token__key=code)
except User.DoesNotExist:
return False
if not user.is_active:
return False
request.user = user
return True
return False
except (ValueError, TypeError):
return False
@never_cache
@csrf_exempt
def git_export(request, project, component, path):
"""Git HTTP server view.
Wrapper around git-http-backend to provide Git repositories export over HTTP.
Performs permission checks and hands over execution to the wrapper.
"""
# Probably browser access
if not path:
return redirect(
"component", project=project, component=component, permanent=False
)
# Strip possible double path separators
path = path.lstrip("/\\")
# HTTP authentication
auth = request.META.get("HTTP_AUTHORIZATION", b"")
# Reject non pull access early
if request.GET.get("service", "") not in ("", "git-upload-pack"):
raise PermissionDenied("Only pull is supported")
if auth and not authenticate(request, auth):
return response_authenticate()
# Permissions
try:
obj = get_component(request, project, component)
except Http404:
if not request.user.is_authenticated:
return response_authenticate()
raise
if not request.user.has_perm("vcs.access", obj):
raise PermissionDenied("No VCS permissions")
if obj.vcs not in SUPPORTED_VCS:
raise Http404("Not a git repository")
if obj.is_repo_link:
kwargs = obj.linked_component.get_reverse_url_kwargs()
kwargs["path"] = path
return redirect(
"{}?{}".format(
reverse("git-export", kwargs=kwargs), request.META["QUERY_STRING"]
),
permanent=True,
)
return run_git_http(request, obj, path)
def run_git_http(request, obj, path):
"""Git HTTP backend execution wrapper."""
# Find Git HTTP backend
git_http_backend = find_git_http_backend()
if git_http_backend is None:
return HttpResponseServerError("git-http-backend not found")
# Invoke Git HTTP backend
query = request.META.get("QUERY_STRING", "")
process_env = {
"REQUEST_METHOD": request.method,
"PATH_TRANSLATED": os.path.join(obj.full_path, path),
"GIT_HTTP_EXPORT_ALL": "1",
"CONTENT_TYPE": request.META.get("CONTENT_TYPE", ""),
"QUERY_STRING": query,
"HTTP_CONTENT_ENCODING": request.META.get("HTTP_CONTENT_ENCODING", ""),
}
process = subprocess.Popen(
[git_http_backend],
env=process_env,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output, output_err = process.communicate(request.body)
retcode = process.poll()
# Log error
if output_err:
output_err = output_err.decode()
try:
raise Exception(
"Git http backend error: {}".format(output_err.splitlines()[0])
)
except Exception:
report_error(cause="Git backend failure")
# Handle failure
if retcode:
return HttpResponseServerError(output_err)
headers, content = output.split(b"\r\n\r\n", 1)
message = message_from_string(headers.decode())
# Handle status in response
if "status" in message:
return HttpResponse(status=int(message["status"].split()[0]))
# Send content
response = HttpResponse(content_type=message["content-type"])
response.write(content)
return response
|
import numpy as np
import os.path as op
from .._digitization import _artemis123_read_pos
from ...utils import logger
from ...transforms import rotation3d_align_z_axis
def _load_mne_locs(fname=None):
"""Load MNE locs structure from file (if exists) or recreate it."""
if (not fname):
# find input file
resource_dir = op.join(op.dirname(op.abspath(__file__)), 'resources')
fname = op.join(resource_dir, 'Artemis123_mneLoc.csv')
if not op.exists(fname):
raise IOError('MNE locs file "%s" does not exist' % (fname))
logger.info('Loading mne loc file {}'.format(fname))
locs = dict()
with open(fname, 'r') as fid:
for line in fid:
vals = line.strip().split(',')
locs[vals[0]] = np.array(vals[1::], np.float64)
return locs
def _generate_mne_locs_file(output_fname):
"""Generate mne coil locs and save to supplied file."""
logger.info('Converting Tristan coil file to mne loc file...')
resource_dir = op.join(op.dirname(op.abspath(__file__)), 'resources')
chan_fname = op.join(resource_dir, 'Artemis123_ChannelMap.csv')
chans = _load_tristan_coil_locs(chan_fname)
# compute a dict of loc structs
locs = {n: _compute_mne_loc(cinfo) for n, cinfo in chans.items()}
# write it out to output_fname
with open(output_fname, 'w') as fid:
for n in sorted(locs.keys()):
fid.write('%s,' % n)
fid.write(','.join(locs[n].astype(str)))
fid.write('\n')
def _load_tristan_coil_locs(coil_loc_path):
"""Load the Coil locations from Tristan CAD drawings."""
channel_info = dict()
with open(coil_loc_path, 'r') as fid:
# skip 2 Header lines
fid.readline()
fid.readline()
for line in fid:
line = line.strip()
vals = line.split(',')
channel_info[vals[0]] = dict()
if vals[6]:
channel_info[vals[0]]['inner_coil'] = \
np.array(vals[2:5], np.float64)
channel_info[vals[0]]['outer_coil'] = \
np.array(vals[5:8], np.float64)
else: # nothing supplied
channel_info[vals[0]]['inner_coil'] = np.zeros(3)
channel_info[vals[0]]['outer_coil'] = np.zeros(3)
return channel_info
def _compute_mne_loc(coil_loc):
"""Convert a set of coils to an mne Struct.
Note input coil locations are in inches.
"""
loc = np.zeros((12))
if (np.linalg.norm(coil_loc['inner_coil']) == 0) and \
(np.linalg.norm(coil_loc['outer_coil']) == 0):
return loc
# channel location is inner coil location converted to meters From inches
loc[0:3] = coil_loc['inner_coil'] / 39.370078
# figure out rotation
z_axis = coil_loc['outer_coil'] - coil_loc['inner_coil']
R = rotation3d_align_z_axis(z_axis)
loc[3:13] = R.T.reshape(9)
return loc
def _read_pos(fname):
"""Read the .pos file and return positions as dig points."""
nas, lpa, rpa, hpi, extra = None, None, None, None, None
with open(fname, 'r') as fid:
for line in fid:
line = line.strip()
if len(line) > 0:
parts = line.split()
# The lines can have 4 or 5 parts. First part is for the id,
# which can be an int or a string. The last three are for xyz
# coordinates. The extra part is for additional info
# (e.g. 'Pz', 'Cz') which is ignored.
if len(parts) not in [4, 5]:
continue
if parts[0].lower() == 'nasion':
nas = np.array([float(p) for p in parts[-3:]]) / 100.
elif parts[0].lower() == 'left':
lpa = np.array([float(p) for p in parts[-3:]]) / 100.
elif parts[0].lower() == 'right':
rpa = np.array([float(p) for p in parts[-3:]]) / 100.
elif 'hpi' in parts[0].lower():
if hpi is None:
hpi = list()
hpi.append(np.array([float(p) for p in parts[-3:]]) / 100.)
else:
if extra is None:
extra = list()
extra.append(np.array([float(p)
for p in parts[-3:]]) / 100.)
return _artemis123_read_pos(nas, lpa, rpa, hpi, extra)
|
revision = "6006c79b6011"
down_revision = "984178255c83"
from alembic import op
def upgrade():
op.create_unique_constraint("uq_label", "sources", ["label"])
def downgrade():
op.drop_constraint("uq_label", "sources", type_="unique")
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainercv.experimental.links import PSPNetResNet101
from chainercv.experimental.links import PSPNetResNet50
from chainercv.utils import assert_is_semantic_segmentation_link
from chainercv.utils.testing import attr
@testing.parameterize(
{'model': PSPNetResNet101},
{'model': PSPNetResNet50},
)
class TestPSPNetResNet(unittest.TestCase):
def setUp(self):
self.n_class = 10
self.input_size = (120, 160)
self.link = self.model(
n_class=self.n_class, input_size=self.input_size)
def check_call(self):
xp = self.link.xp
x = chainer.Variable(xp.random.uniform(
low=-1, high=1, size=(2, 3) + self.input_size).astype(np.float32))
y = self.link(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.data, xp.ndarray)
self.assertEqual(y.shape, (2, self.n_class, 120, 160))
@attr.slow
@attr.pfnci_skip
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
@attr.slow
@attr.pfnci_skip
def test_predict_cpu(self):
assert_is_semantic_segmentation_link(self.link, self.n_class)
@attr.gpu
@attr.slow
def test_predict_gpu(self):
self.link.to_gpu()
assert_is_semantic_segmentation_link(self.link, self.n_class)
@testing.parameterize(*testing.product({
'model': [PSPNetResNet50, PSPNetResNet101],
'pretrained_model': ['cityscapes', 'ade20k', 'imagenet'],
'n_class': [None, 19, 150],
}))
class TestPSPNetResNetPretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_class': self.n_class,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'cityscapes':
valid = self.n_class in {None, 19}
elif self.pretrained_model == 'ade20k':
valid = self.n_class in {None, 150}
elif self.pretrained_model == 'imagenet':
valid = self.n_class is not None
if valid:
self.model(**kwargs)
else:
with self.assertRaises(ValueError):
self.model(**kwargs)
testing.run_module(__name__, __file__)
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.lock import DOMAIN
from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a lock."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_locked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_unlocked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions)
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("lock.entity", STATE_LOCKED)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "lock.entity",
"type": "is_locked",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_locked - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "lock.entity",
"type": "is_unlocked",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_unlocked - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_locked - event - test_event1"
hass.states.async_set("lock.entity", STATE_UNLOCKED)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_unlocked - event - test_event2"
|
import logging
import ntpath
import os
import posixpath
import re
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import object_storage_service
from perfkitbenchmarker import os_types
from perfkitbenchmarker import temp_dir
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import gcp
from perfkitbenchmarker.providers.gcp import util
_DEFAULT_GCP_SERVICE_KEY_FILE = 'gcp_credentials.json'
DEFAULT_GCP_REGION = 'us-central1'
GCLOUD_CONFIG_PATH = '.config/gcloud'
GCS_CLIENT_PYTHON = 'python'
GCS_CLIENT_BOTO = 'boto'
flags.DEFINE_string('google_cloud_sdk_version', None,
'Use a particular version of the Google Cloud SDK, e.g.: '
'103.0.0')
flags.DEFINE_enum('gcs_client', GCS_CLIENT_BOTO,
[GCS_CLIENT_PYTHON, GCS_CLIENT_BOTO],
'The GCS client library to use (default boto).')
FLAGS = flags.FLAGS
class GoogleCloudStorageService(object_storage_service.ObjectStorageService):
"""Interface to Google Cloud Storage."""
STORAGE_NAME = gcp.CLOUD
def PrepareService(self, location):
self.location = location or DEFAULT_GCP_REGION
def MakeBucket(self, bucket, raise_on_failure=True):
command = ['gsutil', 'mb']
if self.location:
command.extend(['-l', self.location])
if self.location and '-' in self.location:
# regional buckets
command.extend(['-c', 'regional'])
elif FLAGS.object_storage_storage_class is not None:
command.extend(['-c', FLAGS.object_storage_storage_class])
if FLAGS.project:
command.extend(['-p', FLAGS.project])
command.extend(['gs://%s' % bucket])
_, stderr, ret_code = vm_util.IssueCommand(command, raise_on_failure=False)
if ret_code and raise_on_failure:
raise errors.Benchmarks.BucketCreationError(stderr)
command = ['gsutil', 'label', 'ch']
for key, value in util.GetDefaultTags().items():
command.extend(['-l', f'{key}:{value}'])
command.extend([f'gs://{bucket}'])
_, stderr, ret_code = vm_util.IssueCommand(command, raise_on_failure=False)
if ret_code and raise_on_failure:
raise errors.Benchmarks.BucketCreationError(stderr)
def Copy(self, src_url, dst_url, recursive=False):
"""See base class."""
cmd = ['gsutil', 'cp']
if recursive:
cmd += ['-r']
cmd += [src_url, dst_url]
vm_util.IssueCommand(cmd)
def CopyToBucket(self, src_path, bucket, object_path):
"""See base class."""
dst_url = self.MakeRemoteCliDownloadUrl(bucket, object_path)
vm_util.IssueCommand(['gsutil', 'cp', src_path, dst_url])
def MakeRemoteCliDownloadUrl(self, bucket, object_path):
"""See base class."""
path = posixpath.join(bucket, object_path)
return 'gs://' + path
def GenerateCliDownloadFileCommand(self, src_url, local_path):
"""See base class."""
return 'gsutil cp "%s" "%s"' % (src_url, local_path)
def List(self, bucket):
"""See base class."""
# Full URI is required by gsutil.
if not bucket.startswith('gs://'):
bucket = 'gs://' + bucket
stdout, _, _ = vm_util.IssueCommand(['gsutil', 'ls', bucket])
return stdout
def ListTopLevelSubfolders(self, bucket):
"""Lists the top level folders (not files) in a bucket.
Each folder is returned as its full uri, eg. "gs://pkbtpch1/customer/", so
just the folder name is extracted. When there's more than one, splitting
on the newline returns a final blank row, so blank values are skipped.
Args:
bucket: Name of the bucket to list the top level subfolders of.
Returns:
A list of top level subfolder names. Can be empty if there are no folders.
"""
return [
obj.split('/')[-2].strip()
for obj in self.List(bucket).split('\n')
if obj and obj.endswith('/')
]
@vm_util.Retry()
def DeleteBucket(self, bucket):
# We want to retry rm and rb together because it's possible that
# we issue rm followed by rb, but then rb fails because the
# metadata store isn't consistent and the server that handles the
# rb thinks there are still objects in the bucket. It's also
# possible for rm to fail because the metadata store is
# inconsistent and rm doesn't find all objects, so can't delete
# them all.
self.EmptyBucket(bucket)
def _bucket_not_found(stdout, stderr, retcode):
del stdout # unused
return retcode and 'BucketNotFoundException' in stderr
vm_util.IssueCommand(['gsutil', 'rb', 'gs://%s' % bucket],
suppress_failure=_bucket_not_found)
def EmptyBucket(self, bucket):
# Ignore failures here and retry in DeleteBucket. See more comments there.
vm_util.IssueCommand(
['gsutil', '-m', 'rm', '-r',
'gs://%s/*' % bucket], raise_on_failure=False)
def ChmodBucket(self, account, access, bucket):
"""Updates access control lists.
Args:
account: string, the user to be granted.
access: string, the permission to be granted.
bucket: string, the name of the bucket to change
"""
vm_util.IssueCommand([
'gsutil', 'acl', 'ch', '-u',
'{account}:{access}'.format(account=account, access=access),
'gs://{}'.format(bucket)])
@classmethod
def AcquireWritePermissionsWindows(cls, vm):
"""Prepare boto file on a remote Windows instance.
If the boto file specifies a service key file, copy that service key file to
the VM and modify the .boto file on the VM to point to the copied file.
Args:
vm: gce virtual machine object.
"""
boto_src = object_storage_service.FindBotoFile()
boto_des = ntpath.join(vm.home_dir, posixpath.basename(boto_src))
stdout, _ = vm.RemoteCommand(f'Test-Path {boto_des}')
if 'True' in stdout:
return
with open(boto_src) as f:
boto_contents = f.read()
match = re.search(r'gs_service_key_file\s*=\s*(.*)', boto_contents)
if match:
service_key_src = match.group(1)
service_key_des = ntpath.join(vm.home_dir,
posixpath.basename(service_key_src))
boto_src = cls._PrepareGcsServiceKey(vm, boto_src, service_key_src,
service_key_des)
vm.PushFile(boto_src, boto_des)
@classmethod
def AcquireWritePermissionsLinux(cls, vm):
"""Prepare boto file on a remote Linux instance.
If the boto file specifies a service key file, copy that service key file to
the VM and modify the .boto file on the VM to point to the copied file.
Args:
vm: gce virtual machine object.
"""
vm_pwd, _ = vm.RemoteCommand('pwd')
home_dir = vm_pwd.strip()
boto_src = object_storage_service.FindBotoFile()
boto_des = posixpath.join(home_dir, posixpath.basename(boto_src))
if vm.TryRemoteCommand(f'test -f {boto_des}'):
return
with open(boto_src) as f:
boto_contents = f.read()
match = re.search(r'gs_service_key_file\s*=\s*(.*)', boto_contents)
if match:
service_key_src = match.group(1)
service_key_des = posixpath.join(home_dir,
posixpath.basename(service_key_src))
boto_src = cls._PrepareGcsServiceKey(vm, boto_src, service_key_src,
service_key_des)
vm.PushFile(boto_src, boto_des)
@classmethod
def _PrepareGcsServiceKey(cls, vm, boto_src, service_key_src,
service_key_des):
"""Copy GS service key file to remote VM and update key path in boto file.
Args:
vm: gce virtual machine object.
boto_src: string, the boto file path in local machine.
service_key_src: string, the gs service key file in local machine.
service_key_des: string, the gs service key file in remote VM.
Returns:
The updated boto file path.
"""
vm.PushFile(service_key_src, service_key_des)
key = 'gs_service_key_file'
with open(boto_src, 'r') as src_file:
boto_path = os.path.join(temp_dir.GetRunDirPath(),
posixpath.basename(boto_src))
with open(boto_path, 'w') as des_file:
for line in src_file:
if line.startswith(f'{key} = '):
des_file.write(f'{key} = {service_key_des}\n')
else:
des_file.write(line)
return boto_path
def PrepareVM(self, vm):
vm.Install('wget')
# Unfortunately there isn't one URL scheme that works for both
# versioned archives and "always get the latest version".
if FLAGS.google_cloud_sdk_version is not None:
sdk_file = ('google-cloud-sdk-%s-linux-x86_64.tar.gz' %
FLAGS.google_cloud_sdk_version)
sdk_url = 'https://storage.googleapis.com/cloud-sdk-release/' + sdk_file
else:
sdk_file = 'google-cloud-sdk.tar.gz'
sdk_url = 'https://dl.google.com/dl/cloudsdk/release/' + sdk_file
vm.RemoteCommand('wget ' + sdk_url)
vm.RemoteCommand('tar xvf ' + sdk_file)
# Versioned and unversioned archives both unzip to a folder called
# 'google-cloud-sdk'.
vm.RemoteCommand('bash ./google-cloud-sdk/install.sh '
'--disable-installation-options '
'--usage-report=false '
'--rc-path=.bash_profile '
'--path-update=true '
'--bash-completion=true')
vm.Install('google_cloud_storage')
vm.RemoteCommand('mkdir -p .config')
if FLAGS.gcs_client == GCS_CLIENT_BOTO:
if vm.BASE_OS_TYPE == os_types.WINDOWS:
self.AcquireWritePermissionsWindows(vm)
else:
self.AcquireWritePermissionsLinux(vm)
vm.Install('gcs_boto_plugin')
vm.gsutil_path, _ = vm.RemoteCommand('which gsutil', login_shell=True)
vm.gsutil_path = vm.gsutil_path.split()[0]
# Detect if we need to install crcmod for gcp.
# See "gsutil help crc" for details.
raw_result, _ = vm.RemoteCommand('%s version -l' % vm.gsutil_path)
logging.info('gsutil version -l raw result is %s', raw_result)
search_string = 'compiled crcmod: True'
result_string = re.findall(search_string, raw_result)
if not result_string:
logging.info('compiled crcmod is not available, installing now...')
try:
# Try uninstall first just in case there is a pure python version of
# crcmod on the system already, this is required by gsutil doc:
# https://cloud.google.com/storage/docs/
# gsutil/addlhelp/CRC32CandInstallingcrcmod
vm.Uninstall('crcmod')
except errors.VirtualMachine.RemoteCommandError:
logging.info('pip uninstall crcmod failed, could be normal if crcmod '
'is not available at all.')
vm.Install('crcmod')
vm.installed_crcmod = True
else:
logging.info('compiled crcmod is available, not installing again.')
vm.installed_crcmod = False
def CleanupVM(self, vm):
vm.RemoveFile('google-cloud-sdk')
vm.RemoveFile(GCLOUD_CONFIG_PATH)
if FLAGS.gcs_client == GCS_CLIENT_BOTO:
vm.RemoveFile(object_storage_service.DEFAULT_BOTO_LOCATION)
vm.Uninstall('gcs_boto_plugin')
def CLIUploadDirectory(self, vm, directory, files, bucket):
return vm.RemoteCommand(
'time %s -m cp %s/* gs://%s/' % (
vm.gsutil_path, directory, bucket))
def CLIDownloadBucket(self, vm, bucket, objects, dest):
return vm.RemoteCommand(
'time %s -m cp gs://%s/* %s' % (vm.gsutil_path, bucket, dest))
def Metadata(self, vm):
metadata = {
'pkb_installed_crcmod': vm.installed_crcmod,
'gcs_client': str(FLAGS.gcs_client)
}
if FLAGS.gcs_client == GCS_CLIENT_BOTO:
metadata.update({
object_storage_service.BOTO_LIB_VERSION:
linux_packages.GetPipPackageVersion(vm, 'boto')
})
return metadata
def APIScriptArgs(self):
return ['--gcs_client=' + str(FLAGS.gcs_client)]
@classmethod
def APIScriptFiles(cls):
return ['gcs.py', 'gcs_boto.py']
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OCCUPANCY,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
class HomeKitMotionSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit motion sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.MOTION_DETECTED]
@property
def device_class(self):
"""Define this binary_sensor as a motion sensor."""
return DEVICE_CLASS_MOTION
@property
def is_on(self):
"""Has motion been detected."""
return self.service.value(CharacteristicsTypes.MOTION_DETECTED)
class HomeKitContactSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit contact sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.CONTACT_STATE]
@property
def device_class(self):
"""Define this binary_sensor as a opening sensor."""
return DEVICE_CLASS_OPENING
@property
def is_on(self):
"""Return true if the binary sensor is on/open."""
return self.service.value(CharacteristicsTypes.CONTACT_STATE) == 1
class HomeKitSmokeSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit smoke sensor."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_SMOKE
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.SMOKE_DETECTED]
@property
def is_on(self):
"""Return true if smoke is currently detected."""
return self.service.value(CharacteristicsTypes.SMOKE_DETECTED) == 1
class HomeKitCarbonMonoxideSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit BO sensor."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_GAS
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.CARBON_MONOXIDE_DETECTED]
@property
def is_on(self):
"""Return true if CO is currently detected."""
return self.service.value(CharacteristicsTypes.CARBON_MONOXIDE_DETECTED) == 1
class HomeKitOccupancySensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit occupancy sensor."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_OCCUPANCY
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.OCCUPANCY_DETECTED]
@property
def is_on(self):
"""Return true if occupancy is currently detected."""
return self.service.value(CharacteristicsTypes.OCCUPANCY_DETECTED) == 1
class HomeKitLeakSensor(HomeKitEntity, BinarySensorEntity):
"""Representation of a Homekit leak sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
return [CharacteristicsTypes.LEAK_DETECTED]
@property
def device_class(self):
"""Define this binary_sensor as a leak sensor."""
return DEVICE_CLASS_MOISTURE
@property
def is_on(self):
"""Return true if a leak is detected from the binary sensor."""
return self.service.value(CharacteristicsTypes.LEAK_DETECTED) == 1
ENTITY_TYPES = {
"motion": HomeKitMotionSensor,
"contact": HomeKitContactSensor,
"smoke": HomeKitSmokeSensor,
"carbon-monoxide": HomeKitCarbonMonoxideSensor,
"occupancy": HomeKitOccupancySensor,
"leak": HomeKitLeakSensor,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit lighting."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
entity_class = ENTITY_TYPES.get(service["stype"])
if not entity_class:
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
import logging
from homeassistant.components.device_tracker import DeviceScanner
from . import DATA_ASUSWRT
_LOGGER = logging.getLogger(__name__)
async def async_get_scanner(hass, config):
"""Validate the configuration and return an ASUS-WRT scanner."""
scanner = AsusWrtDeviceScanner(hass.data[DATA_ASUSWRT])
await scanner.async_connect()
return scanner if scanner.success_init else None
class AsusWrtDeviceScanner(DeviceScanner):
"""This class queries a router running ASUSWRT firmware."""
# Eighth attribute needed for mode (AP mode vs router mode)
def __init__(self, api):
"""Initialize the scanner."""
self.last_results = {}
self.success_init = False
self.connection = api
self._connect_error = False
async def async_connect(self):
"""Initialize connection to the router."""
# Test the router is accessible.
data = await self.connection.async_get_connected_devices()
self.success_init = data is not None
async def async_scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
await self.async_update_info()
return list(self.last_results)
async def async_get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if device not in self.last_results:
return None
return self.last_results[device].name
async def async_update_info(self):
"""Ensure the information from the ASUSWRT router is up to date.
Return boolean if scanning successful.
"""
_LOGGER.debug("Checking Devices")
try:
self.last_results = await self.connection.async_get_connected_devices()
if self._connect_error:
self._connect_error = False
_LOGGER.info("Reconnected to ASUS router for device update")
except OSError as err:
if not self._connect_error:
self._connect_error = True
_LOGGER.error(
"Error connecting to ASUS router for device update: %s", err
)
|
from homeassistant.bootstrap import async_setup_component
from homeassistant.helpers import intent
from tests.common import async_mock_service
async def test_intent_script(hass):
"""Test intent scripts work."""
calls = async_mock_service(hass, "test", "service")
await async_setup_component(
hass,
"intent_script",
{
"intent_script": {
"HelloWorld": {
"action": {
"service": "test.service",
"data_template": {"hello": "{{ name }}"},
},
"card": {
"title": "Hello {{ name }}",
"content": "Content for {{ name }}",
},
"speech": {"text": "Good morning {{ name }}"},
}
}
},
)
response = await intent.async_handle(
hass, "test", "HelloWorld", {"name": {"value": "Paulus"}}
)
assert len(calls) == 1
assert calls[0].data["hello"] == "Paulus"
assert response.speech["plain"]["speech"] == "Good morning Paulus"
assert response.card["simple"]["title"] == "Hello Paulus"
assert response.card["simple"]["content"] == "Content for Paulus"
|
import asyncio
import logging
import os
import async_timeout
from google.cloud import texttospeech
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_KEY_FILE = "key_file"
CONF_GENDER = "gender"
CONF_VOICE = "voice"
CONF_ENCODING = "encoding"
CONF_SPEED = "speed"
CONF_PITCH = "pitch"
CONF_GAIN = "gain"
CONF_PROFILES = "profiles"
SUPPORTED_LANGUAGES = [
"ar-XA",
"bn-IN",
"cmn-CN",
"cmn-TW",
"cs-CZ",
"da-DK",
"de-DE",
"el-GR",
"en-AU",
"en-GB",
"en-IN",
"en-US",
"es-ES",
"fi-FI",
"fil-PH",
"fr-CA",
"fr-FR",
"gu-IN",
"hi-IN",
"hu-HU",
"id-ID",
"it-IT",
"ja-JP",
"kn-IN",
"ko-KR",
"ml-IN",
"nb-NO",
"nl-NL",
"pl-PL",
"pt-BR",
"pt-PT",
"ru-RU",
"sk-SK",
"sv-SE",
"ta-IN",
"te-IN",
"th-TH",
"tr-TR",
"uk-UA",
"vi-VN",
]
DEFAULT_LANG = "en-US"
DEFAULT_GENDER = "NEUTRAL"
VOICE_REGEX = r"[a-z]{2,3}-[A-Z]{2}-(Standard|Wavenet)-[A-Z]|"
DEFAULT_VOICE = ""
DEFAULT_ENCODING = "MP3"
MIN_SPEED = 0.25
MAX_SPEED = 4.0
DEFAULT_SPEED = 1.0
MIN_PITCH = -20.0
MAX_PITCH = 20.0
DEFAULT_PITCH = 0
MIN_GAIN = -96.0
MAX_GAIN = 16.0
DEFAULT_GAIN = 0
SUPPORTED_PROFILES = [
"wearable-class-device",
"handset-class-device",
"headphone-class-device",
"small-bluetooth-speaker-class-device",
"medium-bluetooth-speaker-class-device",
"large-home-entertainment-class-device",
"large-automotive-class-device",
"telephony-class-application",
]
SUPPORTED_OPTIONS = [
CONF_VOICE,
CONF_GENDER,
CONF_ENCODING,
CONF_SPEED,
CONF_PITCH,
CONF_GAIN,
CONF_PROFILES,
]
GENDER_SCHEMA = vol.All(
vol.Upper, vol.In(texttospeech.enums.SsmlVoiceGender.__members__)
)
VOICE_SCHEMA = cv.matches_regex(VOICE_REGEX)
SCHEMA_ENCODING = vol.All(
vol.Upper, vol.In(texttospeech.enums.AudioEncoding.__members__)
)
SPEED_SCHEMA = vol.All(vol.Coerce(float), vol.Clamp(min=MIN_SPEED, max=MAX_SPEED))
PITCH_SCHEMA = vol.All(vol.Coerce(float), vol.Clamp(min=MIN_PITCH, max=MAX_PITCH))
GAIN_SCHEMA = vol.All(vol.Coerce(float), vol.Clamp(min=MIN_GAIN, max=MAX_GAIN))
PROFILES_SCHEMA = vol.All(cv.ensure_list, [vol.In(SUPPORTED_PROFILES)])
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_KEY_FILE): cv.string,
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORTED_LANGUAGES),
vol.Optional(CONF_GENDER, default=DEFAULT_GENDER): GENDER_SCHEMA,
vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): VOICE_SCHEMA,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): SCHEMA_ENCODING,
vol.Optional(CONF_SPEED, default=DEFAULT_SPEED): SPEED_SCHEMA,
vol.Optional(CONF_PITCH, default=DEFAULT_PITCH): PITCH_SCHEMA,
vol.Optional(CONF_GAIN, default=DEFAULT_GAIN): GAIN_SCHEMA,
vol.Optional(CONF_PROFILES, default=[]): PROFILES_SCHEMA,
}
)
async def async_get_engine(hass, config, discovery_info=None):
"""Set up Google Cloud TTS component."""
key_file = config.get(CONF_KEY_FILE)
if key_file:
key_file = hass.config.path(key_file)
if not os.path.isfile(key_file):
_LOGGER.error("File %s doesn't exist", key_file)
return None
return GoogleCloudTTSProvider(
hass,
key_file,
config.get(CONF_LANG),
config.get(CONF_GENDER),
config.get(CONF_VOICE),
config.get(CONF_ENCODING),
config.get(CONF_SPEED),
config.get(CONF_PITCH),
config.get(CONF_GAIN),
config.get(CONF_PROFILES),
)
class GoogleCloudTTSProvider(Provider):
"""The Google Cloud TTS API provider."""
def __init__(
self,
hass,
key_file=None,
language=DEFAULT_LANG,
gender=DEFAULT_GENDER,
voice=DEFAULT_VOICE,
encoding=DEFAULT_ENCODING,
speed=1.0,
pitch=0,
gain=0,
profiles=None,
):
"""Init Google Cloud TTS service."""
self.hass = hass
self.name = "Google Cloud TTS"
self._language = language
self._gender = gender
self._voice = voice
self._encoding = encoding
self._speed = speed
self._pitch = pitch
self._gain = gain
self._profiles = profiles
if key_file:
self._client = texttospeech.TextToSpeechClient.from_service_account_json(
key_file
)
else:
self._client = texttospeech.TextToSpeechClient()
@property
def supported_languages(self):
"""Return list of supported languages."""
return SUPPORTED_LANGUAGES
@property
def default_language(self):
"""Return the default language."""
return self._language
@property
def supported_options(self):
"""Return a list of supported options."""
return SUPPORTED_OPTIONS
@property
def default_options(self):
"""Return a dict including default options."""
return {
CONF_GENDER: self._gender,
CONF_VOICE: self._voice,
CONF_ENCODING: self._encoding,
CONF_SPEED: self._speed,
CONF_PITCH: self._pitch,
CONF_GAIN: self._gain,
CONF_PROFILES: self._profiles,
}
async def async_get_tts_audio(self, message, language, options=None):
"""Load TTS from google."""
options_schema = vol.Schema(
{
vol.Optional(CONF_GENDER, default=self._gender): GENDER_SCHEMA,
vol.Optional(CONF_VOICE, default=self._voice): VOICE_SCHEMA,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): SCHEMA_ENCODING,
vol.Optional(CONF_SPEED, default=self._speed): SPEED_SCHEMA,
vol.Optional(CONF_PITCH, default=self._speed): SPEED_SCHEMA,
vol.Optional(CONF_GAIN, default=DEFAULT_GAIN): GAIN_SCHEMA,
vol.Optional(CONF_PROFILES, default=[]): PROFILES_SCHEMA,
}
)
options = options_schema(options)
_encoding = options[CONF_ENCODING]
_voice = options[CONF_VOICE]
if _voice and not _voice.startswith(language):
language = _voice[:5]
try:
# pylint: disable=no-member
synthesis_input = texttospeech.types.SynthesisInput(text=message)
voice = texttospeech.types.VoiceSelectionParams(
language_code=language,
ssml_gender=texttospeech.enums.SsmlVoiceGender[options[CONF_GENDER]],
name=_voice,
)
audio_config = texttospeech.types.AudioConfig(
audio_encoding=texttospeech.enums.AudioEncoding[_encoding],
speaking_rate=options.get(CONF_SPEED),
pitch=options.get(CONF_PITCH),
volume_gain_db=options.get(CONF_GAIN),
effects_profile_id=options.get(CONF_PROFILES),
)
# pylint: enable=no-member
with async_timeout.timeout(10, loop=self.hass.loop):
response = await self.hass.async_add_executor_job(
self._client.synthesize_speech, synthesis_input, voice, audio_config
)
return _encoding, response.audio_content
except asyncio.TimeoutError as ex:
_LOGGER.error("Timeout for Google Cloud TTS call: %s", ex)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.exception("Error occurred during Google Cloud TTS call: %s", ex)
return None, None
|
import os
EXAMPLES_FOLDER = os.path.join(os.path.dirname(__file__), "static")
def get_file_path(file_name):
return os.path.join(EXAMPLES_FOLDER, file_name)
def get_file_content(file_name):
try:
with open(get_file_path(file_name), encoding="utf-8") as fd:
return fd.read()
except IOError:
print("Couldn't open the file %s" % file_name)
def configuration_to_dict(configuration):
"""Convert configuration to a dict with raw values."""
return {section: {option: configuration.get_raw(section, option)
for option in configuration.options(section)
if not option.startswith("_")}
for section in configuration.sections()
if not section.startswith("_")}
|
import datetime
import json
import logging
from collections import namedtuple
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, MutableMapping, Optional, Union
import discord
import lavalink
from redbot.core.bot import Red
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import humanize_list
from ..errors import InvalidPlaylistScope, MissingAuthor, MissingGuild
from ..utils import PlaylistScope
log = logging.getLogger("red.cogs.Audio.api.utils")
_ = Translator("Audio", Path(__file__))
@dataclass
class YouTubeCacheFetchResult:
query: Optional[str]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
@dataclass
class SpotifyCacheFetchResult:
query: Optional[str]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
@dataclass
class LavalinkCacheFetchResult:
query: Optional[MutableMapping]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
if isinstance(self.query, str):
self.query = json.loads(self.query)
@dataclass
class LavalinkCacheFetchForGlobalResult:
query: str
data: MutableMapping
def __post_init__(self):
if isinstance(self.data, str):
self.data_string = str(self.data)
self.data = json.loads(self.data)
@dataclass
class PlaylistFetchResult:
playlist_id: int
playlist_name: str
scope_id: int
author_id: int
playlist_url: Optional[str] = None
tracks: List[MutableMapping] = field(default_factory=lambda: [])
def __post_init__(self):
if isinstance(self.tracks, str):
self.tracks = json.loads(self.tracks)
@dataclass
class QueueFetchResult:
guild_id: int
room_id: int
track: dict = field(default_factory=lambda: {})
track_object: lavalink.Track = None
def __post_init__(self):
if isinstance(self.track, str):
self.track = json.loads(self.track)
if self.track:
self.track_object = lavalink.Track(self.track)
def standardize_scope(scope: str) -> str:
"""Convert any of the used scopes into one we are expecting."""
scope = scope.upper()
valid_scopes = ["GLOBAL", "GUILD", "AUTHOR", "USER", "SERVER", "MEMBER", "BOT"]
if scope in PlaylistScope.list():
return scope
elif scope not in valid_scopes:
raise InvalidPlaylistScope(
f'"{scope}" is not a valid playlist scope.'
f" Scope needs to be one of the following: {humanize_list(valid_scopes)}"
)
if scope in ["GLOBAL", "BOT"]:
scope = PlaylistScope.GLOBAL.value
elif scope in ["GUILD", "SERVER"]:
scope = PlaylistScope.GUILD.value
elif scope in ["USER", "MEMBER", "AUTHOR"]:
scope = PlaylistScope.USER.value
return scope
def prepare_config_scope(
bot: Red,
scope,
author: Union[discord.abc.User, int] = None,
guild: Union[discord.Guild, int] = None,
):
"""Return the scope used by Playlists."""
scope = standardize_scope(scope)
if scope == PlaylistScope.GLOBAL.value:
config_scope = [PlaylistScope.GLOBAL.value, bot.user.id]
elif scope == PlaylistScope.USER.value:
if author is None:
raise MissingAuthor("Invalid author for user scope.")
config_scope = [PlaylistScope.USER.value, int(getattr(author, "id", author))]
else:
if guild is None:
raise MissingGuild("Invalid guild for guild scope.")
config_scope = [PlaylistScope.GUILD.value, int(getattr(guild, "id", guild))]
return config_scope
def prepare_config_scope_for_migration23( # TODO: remove me in a future version ?
scope, author: Union[discord.abc.User, int] = None, guild: discord.Guild = None
):
"""Return the scope used by Playlists."""
scope = standardize_scope(scope)
if scope == PlaylistScope.GLOBAL.value:
config_scope = [PlaylistScope.GLOBAL.value]
elif scope == PlaylistScope.USER.value:
if author is None:
raise MissingAuthor("Invalid author for user scope.")
config_scope = [PlaylistScope.USER.value, str(getattr(author, "id", author))]
else:
if guild is None:
raise MissingGuild("Invalid guild for guild scope.")
config_scope = [PlaylistScope.GUILD.value, str(getattr(guild, "id", guild))]
return config_scope
FakePlaylist = namedtuple("Playlist", "author scope")
|
from homeassistant.components.homekit.const import DOMAIN
from homeassistant.const import CONF_NAME, CONF_PORT
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.common import MockConfigEntry
PATH_HOMEKIT = "homeassistant.components.homekit"
async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry:
"""Set up the homekit integration in Home Assistant."""
with patch(f"{PATH_HOMEKIT}.HomeKit.async_start"):
entry = MockConfigEntry(
domain=DOMAIN, data={CONF_NAME: "mock_name", CONF_PORT: 12345}
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
async def async_init_entry(hass: HomeAssistant, entry: MockConfigEntry):
"""Set up the homekit integration in Home Assistant."""
with patch(f"{PATH_HOMEKIT}.HomeKit.async_start"):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
from configparser import ConfigParser, RawConfigParser
import glob as glob
import re as re
import os.path as op
import datetime as dt
import numpy as np
from ..base import BaseRaw
from ..utils import _mult_cal_one
from ..constants import FIFF
from ..meas_info import create_info, _format_dig_points
from ...annotations import Annotations
from ...transforms import apply_trans, _get_trans
from ...utils import logger, verbose, fill_doc, warn
@fill_doc
def read_raw_nirx(fname, preload=False, verbose=None):
"""Reader for a NIRX fNIRS recording.
This function has only been tested with NIRScout devices.
Parameters
----------
fname : str
Path to the NIRX data folder or header file.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawNIRX
A Raw object containing NIRX data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawNIRX(fname, preload, verbose)
def _open(fname):
return open(fname, 'r', encoding='latin-1')
@fill_doc
class RawNIRX(BaseRaw):
"""Raw object from a NIRX fNIRS file.
Parameters
----------
fname : str
Path to the NIRX data folder or header file.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
from ...externals.pymatreader import read_mat
from ...coreg import get_mni_fiducials # avoid circular import prob
logger.info('Loading %s' % fname)
if fname.endswith('.hdr'):
fname = op.dirname(op.abspath(fname))
if not op.isdir(fname):
raise FileNotFoundError('The path you specified does not exist.')
# Check if required files exist and store names for later use
files = dict()
keys = ('hdr', 'inf', 'set', 'tpl', 'wl1', 'wl2',
'config.txt', 'probeInfo.mat')
for key in keys:
files[key] = glob.glob('%s/*%s' % (fname, key))
if len(files[key]) != 1:
raise RuntimeError('Expect one %s file, got %d' %
(key, len(files[key]),))
files[key] = files[key][0]
if len(glob.glob('%s/*%s' % (fname, 'dat'))) != 1:
warn("A single dat file was expected in the specified path, but "
"got %d. This may indicate that the file structure has been "
"modified since the measurement was saved." %
(len(glob.glob('%s/*%s' % (fname, 'dat')))))
# Read number of rows/samples of wavelength data
last_sample = -1
with _open(files['wl1']) as fid:
for line in fid:
last_sample += 1
# Read header file
# The header file isn't compliant with the configparser. So all the
# text between comments must be removed before passing to parser
with _open(files['hdr']) as f:
hdr_str = f.read()
hdr_str = re.sub('#.*?#', '', hdr_str, flags=re.DOTALL)
hdr = RawConfigParser()
hdr.read_string(hdr_str)
# Check that the file format version is supported
if hdr['GeneralInfo']['NIRStar'] not in ['"15.0"', '"15.2"', '"15.3"']:
raise RuntimeError('MNE does not support this NIRStar version'
' (%s)' % (hdr['GeneralInfo']['NIRStar'],))
if "NIRScout" not in hdr['GeneralInfo']['Device']:
warn("Only import of data from NIRScout devices have been "
"thoroughly tested. You are using a %s device. " %
hdr['GeneralInfo']['Device'])
# Parse required header fields
# Extract measurement date and time
datetime_str = hdr['GeneralInfo']['Date'] + hdr['GeneralInfo']['Time']
meas_date = None
# Several formats have been observed so we try each in turn
for dt_code in ['"%a, %b %d, %Y""%H:%M:%S.%f"',
'"%a, %d %b %Y""%H:%M:%S.%f"']:
try:
meas_date = dt.datetime.strptime(datetime_str, dt_code)
meas_date = meas_date.replace(tzinfo=dt.timezone.utc)
break
except ValueError:
pass
if meas_date is None:
warn("Extraction of measurement date from NIRX file failed. "
"This can be caused by files saved in certain locales. "
"Please report this as a github issue. "
"The date is being set to January 1st, 2000, "
"instead of {}".format(datetime_str))
meas_date = dt.datetime(2000, 1, 1, 0, 0, 0,
tzinfo=dt.timezone.utc)
# Extract frequencies of light used by machine
fnirs_wavelengths = [int(s) for s in
re.findall(r'(\d+)',
hdr['ImagingParameters'][
'Wavelengths'])]
# Extract source-detectors
sources = np.asarray([int(s) for s in re.findall(r'(\d+)-\d+:\d+',
hdr['DataStructure'][
'S-D-Key'])], int)
detectors = np.asarray([int(s) for s in re.findall(r'\d+-(\d+):\d+',
hdr['DataStructure']
['S-D-Key'])],
int)
# Extract sampling rate
samplingrate = float(hdr['ImagingParameters']['SamplingRate'])
# Read participant information file
inf = ConfigParser(allow_no_value=True)
inf.read(files['inf'])
inf = inf._sections['Subject Demographics']
# Store subject information from inf file in mne format
# Note: NIRX also records "Study Type", "Experiment History",
# "Additional Notes", "Contact Information" and this information
# is currently discarded
# NIRStar does not record an id, or handedness by default
subject_info = {}
names = inf['name'].split()
if len(names) > 0:
subject_info['first_name'] = \
inf['name'].split()[0].replace("\"", "")
if len(names) > 1:
subject_info['last_name'] = \
inf['name'].split()[-1].replace("\"", "")
if len(names) > 2:
subject_info['middle_name'] = \
inf['name'].split()[-2].replace("\"", "")
subject_info['sex'] = inf['gender'].replace("\"", "")
# Recode values
if subject_info['sex'] in {'M', 'Male', '1'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_MALE
elif subject_info['sex'] in {'F', 'Female', '2'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_FEMALE
else:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_UNKNOWN
subject_info['birthday'] = (meas_date.year - int(inf['age']),
meas_date.month,
meas_date.day)
# Read information about probe/montage/optodes
# A word on terminology used here:
# Sources produce light
# Detectors measure light
# Sources and detectors are both called optodes
# Each source - detector pair produces a channel
# Channels are defined as the midpoint between source and detector
mat_data = read_mat(files['probeInfo.mat'], uint16_codec=None)
requested_channels = mat_data['probeInfo']['probes']['index_c']
src_locs = mat_data['probeInfo']['probes']['coords_s3'] / 100.
det_locs = mat_data['probeInfo']['probes']['coords_d3'] / 100.
ch_locs = mat_data['probeInfo']['probes']['coords_c3'] / 100.
# These are all in MNI coordinates, so let's transform them to
# the Neuromag head coordinate frame
mri_head_t, _ = _get_trans('fsaverage', 'mri', 'head')
src_locs = apply_trans(mri_head_t, src_locs)
det_locs = apply_trans(mri_head_t, det_locs)
ch_locs = apply_trans(mri_head_t, ch_locs)
# Set up digitization
dig = get_mni_fiducials('fsaverage', verbose=False)
for fid in dig:
fid['r'] = apply_trans(mri_head_t, fid['r'])
fid['coord_frame'] = FIFF.FIFFV_COORD_HEAD
for ii, ch_loc in enumerate(ch_locs, 1):
dig.append(dict(
kind=FIFF.FIFFV_POINT_EEG, # misnomer but probably okay
r=ch_loc,
ident=ii,
coord_frame=FIFF.FIFFV_COORD_HEAD,
))
dig = _format_dig_points(dig)
del mri_head_t
# Determine requested channel indices
# The wl1 and wl2 files include all possible source - detector pairs.
# But most of these are not relevant. We want to extract only the
# subset requested in the probe file
req_ind = np.array([], int)
for req_idx in range(requested_channels.shape[0]):
sd_idx = np.where((sources == requested_channels[req_idx][0]) &
(detectors == requested_channels[req_idx][1]))
req_ind = np.concatenate((req_ind, sd_idx[0]))
req_ind = req_ind.astype(int)
# Generate meaningful channel names
def prepend(li, str):
str += '{0}'
li = [str.format(i) for i in li]
return li
snames = prepend(sources[req_ind], 'S')
dnames = prepend(detectors[req_ind], '_D')
sdnames = [m + str(n) for m, n in zip(snames, dnames)]
sd1 = [s + ' ' + str(fnirs_wavelengths[0]) for s in sdnames]
sd2 = [s + ' ' + str(fnirs_wavelengths[1]) for s in sdnames]
chnames = [val for pair in zip(sd1, sd2) for val in pair]
# Create mne structure
info = create_info(chnames,
samplingrate,
ch_types='fnirs_cw_amplitude')
info.update(subject_info=subject_info, dig=dig)
info['meas_date'] = meas_date
# Store channel, source, and detector locations
# The channel location is stored in the first 3 entries of loc.
# The source location is stored in the second 3 entries of loc.
# The detector location is stored in the third 3 entries of loc.
# NIRx NIRSite uses MNI coordinates.
# Also encode the light frequency in the structure.
for ch_idx2 in range(requested_channels.shape[0]):
# Find source and store location
src = int(requested_channels[ch_idx2, 0]) - 1
info['chs'][ch_idx2 * 2]['loc'][3:6] = src_locs[src, :]
info['chs'][ch_idx2 * 2 + 1]['loc'][3:6] = src_locs[src, :]
# Find detector and store location
det = int(requested_channels[ch_idx2, 1]) - 1
info['chs'][ch_idx2 * 2]['loc'][6:9] = det_locs[det, :]
info['chs'][ch_idx2 * 2 + 1]['loc'][6:9] = det_locs[det, :]
# Store channel location as midpoint between source and detector.
midpoint = (src_locs[src, :] + det_locs[det, :]) / 2
info['chs'][ch_idx2 * 2]['loc'][:3] = midpoint
info['chs'][ch_idx2 * 2 + 1]['loc'][:3] = midpoint
info['chs'][ch_idx2 * 2]['loc'][9] = fnirs_wavelengths[0]
info['chs'][ch_idx2 * 2 + 1]['loc'][9] = fnirs_wavelengths[1]
# Extract the start/stop numbers for samples in the CSV. In theory the
# sample bounds should just be 10 * the number of channels, but some
# files have mixed \n and \n\r endings (!) so we can't rely on it, and
# instead make a single pass over the entire file at the beginning so
# that we know how to seek and read later.
bounds = dict()
for key in ('wl1', 'wl2'):
offset = 0
bounds[key] = [offset]
with open(files[key], 'rb') as fid:
for line in fid:
offset += len(line)
bounds[key].append(offset)
assert offset == fid.tell()
# Extras required for reading data
raw_extras = {
'sd_index': req_ind,
'files': files,
'bounds': bounds,
}
super(RawNIRX, self).__init__(
info, preload, filenames=[fname], last_samps=[last_sample],
raw_extras=[raw_extras], verbose=verbose)
# Read triggers from event file
if op.isfile(files['hdr'][:-3] + 'evt'):
with _open(files['hdr'][:-3] + 'evt') as fid:
t = [re.findall(r'(\d+)', line) for line in fid]
onset = np.zeros(len(t), float)
duration = np.zeros(len(t), float)
description = [''] * len(t)
for t_idx in range(len(t)):
binary_value = ''.join(t[t_idx][1:])[::-1]
trigger_frame = float(t[t_idx][0])
onset[t_idx] = (trigger_frame) * (1.0 / samplingrate)
duration[t_idx] = 1.0 # No duration info stored in files
description[t_idx] = int(binary_value, 2) * 1.
annot = Annotations(onset, duration, description)
self.set_annotations(annot)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a segment of data from a file.
The NIRX machine records raw data as two different wavelengths.
The returned data interleaves the wavelengths.
"""
sdindex = self._raw_extras[fi]['sd_index']
wls = [
_read_csv_rows_cols(
self._raw_extras[fi]['files'][key],
start, stop, sdindex,
self._raw_extras[fi]['bounds'][key]).T
for key in ('wl1', 'wl2')
]
# TODO: Make this more efficient by only indexing above what we need.
# For now let's just construct the full data matrix and index.
# Interleave wavelength 1 and 2 to match channel names:
this_data = np.zeros((len(wls[0]) * 2, stop - start))
this_data[0::2, :] = wls[0]
this_data[1::2, :] = wls[1]
_mult_cal_one(data, this_data, idx, cals, mult)
return data
def _read_csv_rows_cols(fname, start, stop, cols, bounds):
with open(fname, 'rb') as fid:
fid.seek(bounds[start])
data = fid.read(bounds[stop] - bounds[start]).decode('latin-1')
x = np.fromstring(data, float, sep=' ')
x.shape = (stop - start, -1)
x = x[:, cols]
return x
|
from __future__ import print_function
import argparse
import random
from collections import defaultdict
from datetime import datetime as dt
from datetime import timedelta as td
import numpy as np
import pandas as pd
from dateutil.rrule import rrule, MINUTELY
import arctic
from arctic import Arctic
from arctic._config import FwPointersCfg
# import matplotlib.pyplot as plt
price_template = (800.0, 1200.0)
ONE_MIN_ATTRIBUTES = {
'BID': price_template,
'BID_TWAP': price_template,
'ASK': price_template,
'ASK_TWAP': price_template,
'HIGH': price_template,
'LOW': price_template,
'CLOSE': price_template,
'TWAP': price_template,
'ASKSIZE': (0.0, 400.0),
'BIDSIZE': (0.0, 400.0),
'TICK_COUNT': (1.0, 50.0),
'VOLUME': (0.0, 1000.0),
}
APPEND_NROWS = 10
class FwPointersCtx:
def __init__(self, value_to_test, do_reconcile=False):
self.value_to_test = value_to_test
self.do_reconcile = do_reconcile
def __enter__(self):
self.orig_value = arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_CFG
arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_CFG = self.value_to_test
self.reconcile_orig_value = arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_RECONCILE
arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_RECONCILE = self.do_reconcile
def __exit__(self, *args):
arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_CFG = self.orig_value
arctic.store._ndarray_store.ARCTIC_FORWARD_POINTERS_RECONCILE = self.reconcile_orig_value
def gen_sparse_rows_for_range(n_rows, low, high, dense):
if dense:
return [random.uniform(low, high) for _ in range(n_rows)]
current = 0
rows = []
while current < n_rows:
value = float(random.randrange(low, high))
repetitions = min(random.randint(0, 20), n_rows - current)
rows.extend([value] * repetitions)
current += repetitions
return rows
def gen_one_minute_rows(n_rows, dense):
data = {}
for header, header_range in ONE_MIN_ATTRIBUTES.iteritems():
data[header] = gen_sparse_rows_for_range(n_rows, header_range[0], header_range[1], dense)
return data
def gen_oneminute_dataset(n_row, n_col, dense):
timestamps = []
active_minutes_daily = 120
for day in range(0, n_row // 120):
timestamps.extend(list(rrule(MINUTELY, count=active_minutes_daily, dtstart=dt(2005, 1, 1) + td(days=day))))
timestamps.extend(list(rrule(
MINUTELY,
count=n_row % active_minutes_daily,
dtstart=dt(random.randrange(2006, 2016), 1, 1)),
))
return pd.DataFrame(
index=timestamps,
data=gen_one_minute_rows(n_row, dense)
)
def lib_name_from_args(config):
return 'bench2_{cfg}'.format(
cfg=config.name,
)
def insert_random_data(config, args, n_rows):
store = Arctic(args.mongodb, app_name="benchmark")
lib_name = lib_name_from_args(config)
store.delete_library(lib_name)
store.initialize_library(lib_name, segment='month')
lib = store[lib_name]
for sym in range(args.symbols):
df = gen_oneminute_dataset(n_row=n_rows, n_col=n_rows, dense=args.dense)
lib.write('sym' + str(sym), df)
def append_random_rows(config, args, n_rows):
store = Arctic(args.mongodb, app_name="benchmark")
lib_name = lib_name_from_args(config)
lib = store[lib_name]
for _ in range(args.appends):
for sym in range(args.symbols):
df = gen_oneminute_dataset(n_row=APPEND_NROWS, n_col=n_rows, dense=False)
lib.append('sym' + str(sym), df)
def read_all_symbols(config, args):
store = Arctic(args.mongodb, app_name="benchmark")
lib_name = lib_name_from_args(config)
lib = store[lib_name]
symbol_df = []
for sym in range(args.symbols):
symbol_df.append(lib.read('sym' + str(sym)))
# Basic sanity checks while reading back
sample_df = symbol_df[0].data
assert sorted(sample_df.dtypes) == ['float64'] * len(ONE_MIN_ATTRIBUTES)
assert 800.0 <= sample_df['BID'][0] <= 1200.0
def mean_timedelta(timedelta_list):
return np.sum(timedelta_list) / len(timedelta_list)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--rounds', type=int, help="number of rounds to run benchmarks", default=2)
parser.add_argument('-a', '--appends', type=int, help="number of appends for each symbol", default=75)
parser.add_argument('-n', '--ndim', type=int, help="dimension of dataframe = size * size", default=500)
parser.add_argument('-e', '--dense', help="Use dense or sparse (70 ish Nans) data", action="store_true")
parser.add_argument('-d', '--mongodb', help="Mongo db endpoint.", default="127.0.0.1")
parser.add_argument('-y', '--symbols', type=int, help="Total number of symbols to use", default=5)
return parser.parse_args()
def main(args):
measure = defaultdict(list)
data_size = [
100,
500,
1000,
# 5000,
# 10000,
# 200000
]
print('Arguments=', args)
for fwd_ptr in [FwPointersCfg.ENABLED, FwPointersCfg.DISABLED, FwPointersCfg.HYBRID]:
for n_rows in data_size:
for rounds in range(1, args.rounds + 1):
with FwPointersCtx(fwd_ptr):
w_start = dt.now()
# Writes data to lib with above config.
insert_random_data(fwd_ptr, args, n_rows)
w_end = dt.now()
# Appends multiple rows to each symbol
append_random_rows(fwd_ptr, args, n_rows)
a_end = dt.now()
# Read everything.
read_all_symbols(fwd_ptr, args)
r_end = dt.now()
print('read time=', r_end - a_end)
measure[n_rows].append(
{
'dfsize': (n_rows, len(ONE_MIN_ATTRIBUTES)),
'wtime': w_end - w_start,
'atime': a_end - w_end,
'rtime': r_end - a_end,
'fwd': fwd_ptr
}
)
enabled_reads = {}
disabled_reads = {}
for dsize in data_size:
enabled_reads[dsize] = mean_timedelta(
[data['rtime'] for data in measure[dsize] if data['fwd'] == FwPointersCfg.ENABLED])
disabled_reads[dsize] = mean_timedelta(
[data['rtime'] for data in measure[dsize] if data['fwd'] == FwPointersCfg.DISABLED])
print('enabled read times=', enabled_reads)
print('disabled read times=', disabled_reads)
if __name__ == '__main__':
main(parse_args())
|
import pywink
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SUPPORT_DIRECTION,
SUPPORT_SET_SPEED,
FanEntity,
)
from . import DOMAIN, WinkDevice
SPEED_AUTO = "auto"
SPEED_LOWEST = "lowest"
SUPPORTED_FEATURES = SUPPORT_DIRECTION + SUPPORT_SET_SPEED
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for fan in pywink.get_fans():
if fan.object_id() + fan.name() not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkFanDevice(fan, hass)])
class WinkFanDevice(WinkDevice, FanEntity):
"""Representation of a Wink fan."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["fan"].append(self)
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self.wink.set_fan_direction(direction)
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
self.wink.set_state(True, speed)
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
self.wink.set_state(True, speed)
def turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
self.wink.set_state(False)
@property
def is_on(self):
"""Return true if the entity is on."""
return self.wink.state()
@property
def speed(self) -> str:
"""Return the current speed."""
current_wink_speed = self.wink.current_fan_speed()
if SPEED_AUTO == current_wink_speed:
return SPEED_AUTO
if SPEED_LOWEST == current_wink_speed:
return SPEED_LOWEST
if SPEED_LOW == current_wink_speed:
return SPEED_LOW
if SPEED_MEDIUM == current_wink_speed:
return SPEED_MEDIUM
if SPEED_HIGH == current_wink_speed:
return SPEED_HIGH
return None
@property
def current_direction(self):
"""Return direction of the fan [forward, reverse]."""
return self.wink.current_fan_direction()
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
wink_supported_speeds = self.wink.fan_speeds()
supported_speeds = []
if SPEED_AUTO in wink_supported_speeds:
supported_speeds.append(SPEED_AUTO)
if SPEED_LOWEST in wink_supported_speeds:
supported_speeds.append(SPEED_LOWEST)
if SPEED_LOW in wink_supported_speeds:
supported_speeds.append(SPEED_LOW)
if SPEED_MEDIUM in wink_supported_speeds:
supported_speeds.append(SPEED_MEDIUM)
if SPEED_HIGH in wink_supported_speeds:
supported_speeds.append(SPEED_HIGH)
return supported_speeds
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORTED_FEATURES
|
from datetime import datetime, timedelta
import enum
import functools
from random import randint
from typing import List, Optional, Tuple
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
DOMAIN,
FAN_AUTO,
FAN_ON,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, TEMP_CELSIUS
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_time_interval
import homeassistant.util.dt as dt_util
from .core import discovery
from .core.const import (
CHANNEL_FAN,
CHANNEL_THERMOSTAT,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
)
from .core.registries import ZHA_ENTITIES
from .entity import ZhaEntity
DEPENDENCIES = ["zha"]
ATTR_SYS_MODE = "system_mode"
ATTR_RUNNING_MODE = "running_mode"
ATTR_SETPT_CHANGE_SRC = "setpoint_change_source"
ATTR_SETPT_CHANGE_AMT = "setpoint_change_amount"
ATTR_OCCUPANCY = "occupancy"
ATTR_PI_COOLING_DEMAND = "pi_cooling_demand"
ATTR_PI_HEATING_DEMAND = "pi_heating_demand"
ATTR_OCCP_COOL_SETPT = "occupied_cooling_setpoint"
ATTR_OCCP_HEAT_SETPT = "occupied_heating_setpoint"
ATTR_UNOCCP_HEAT_SETPT = "unoccupied_heating_setpoint"
ATTR_UNOCCP_COOL_SETPT = "unoccupied_cooling_setpoint"
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
RUNNING_MODE = {0x00: HVAC_MODE_OFF, 0x03: HVAC_MODE_COOL, 0x04: HVAC_MODE_HEAT}
class ThermostatFanMode(enum.IntEnum):
"""Fan channel enum for thermostat Fans."""
OFF = 0x00
ON = 0x04
AUTO = 0x05
class RunningState(enum.IntFlag):
"""ZCL Running state enum."""
HEAT = 0x0001
COOL = 0x0002
FAN = 0x0004
HEAT_STAGE_2 = 0x0008
COOL_STAGE_2 = 0x0010
FAN_STAGE_2 = 0x0020
FAN_STAGE_3 = 0x0040
SEQ_OF_OPERATION = {
0x00: (HVAC_MODE_OFF, HVAC_MODE_COOL), # cooling only
0x01: (HVAC_MODE_OFF, HVAC_MODE_COOL), # cooling with reheat
0x02: (HVAC_MODE_OFF, HVAC_MODE_HEAT), # heating only
0x03: (HVAC_MODE_OFF, HVAC_MODE_HEAT), # heating with reheat
# cooling and heating 4-pipes
0x04: (HVAC_MODE_OFF, HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT),
# cooling and heating 4-pipes
0x05: (HVAC_MODE_OFF, HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT),
0x06: (HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF), # centralite specific
0x07: (HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF), # centralite specific
}
class SystemMode(enum.IntEnum):
"""ZCL System Mode attribute enum."""
OFF = 0x00
HEAT_COOL = 0x01
COOL = 0x03
HEAT = 0x04
AUX_HEAT = 0x05
PRE_COOL = 0x06
FAN_ONLY = 0x07
DRY = 0x08
SLEEP = 0x09
HVAC_MODE_2_SYSTEM = {
HVAC_MODE_OFF: SystemMode.OFF,
HVAC_MODE_HEAT_COOL: SystemMode.HEAT_COOL,
HVAC_MODE_COOL: SystemMode.COOL,
HVAC_MODE_HEAT: SystemMode.HEAT,
HVAC_MODE_FAN_ONLY: SystemMode.FAN_ONLY,
HVAC_MODE_DRY: SystemMode.DRY,
}
SYSTEM_MODE_2_HVAC = {
SystemMode.OFF: HVAC_MODE_OFF,
SystemMode.HEAT_COOL: HVAC_MODE_HEAT_COOL,
SystemMode.COOL: HVAC_MODE_COOL,
SystemMode.HEAT: HVAC_MODE_HEAT,
SystemMode.AUX_HEAT: HVAC_MODE_HEAT,
SystemMode.PRE_COOL: HVAC_MODE_COOL, # this is 'precooling'. is it the same?
SystemMode.FAN_ONLY: HVAC_MODE_FAN_ONLY,
SystemMode.DRY: HVAC_MODE_DRY,
SystemMode.SLEEP: HVAC_MODE_OFF,
}
ZCL_TEMP = 100
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation sensor from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
@STRICT_MATCH(channel_names=CHANNEL_THERMOSTAT, aux_channels=CHANNEL_FAN)
class Thermostat(ZhaEntity, ClimateEntity):
"""Representation of a ZHA Thermostat device."""
DEFAULT_MAX_TEMP = 35
DEFAULT_MIN_TEMP = 7
_domain = DOMAIN
value_attribute = 0x0000
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize ZHA Thermostat instance."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._thrm = self.cluster_channels.get(CHANNEL_THERMOSTAT)
self._preset = PRESET_NONE
self._presets = []
self._supported_flags = SUPPORT_TARGET_TEMPERATURE
self._fan = self.cluster_channels.get(CHANNEL_FAN)
@property
def current_temperature(self):
"""Return the current temperature."""
if self._thrm.local_temp is None:
return None
return self._thrm.local_temp / ZCL_TEMP
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
data = {}
if self.hvac_mode:
mode = SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode, "unknown")
data[ATTR_SYS_MODE] = f"[{self._thrm.system_mode}]/{mode}"
if self._thrm.occupancy is not None:
data[ATTR_OCCUPANCY] = self._thrm.occupancy
if self._thrm.occupied_cooling_setpoint is not None:
data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint
if self._thrm.occupied_heating_setpoint is not None:
data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint
if self._thrm.pi_heating_demand is not None:
data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand
if self._thrm.pi_cooling_demand is not None:
data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand
unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint
if unoccupied_cooling_setpoint is not None:
data[ATTR_UNOCCP_HEAT_SETPT] = unoccupied_cooling_setpoint
unoccupied_heating_setpoint = self._thrm.unoccupied_heating_setpoint
if unoccupied_heating_setpoint is not None:
data[ATTR_UNOCCP_COOL_SETPT] = unoccupied_heating_setpoint
return data
@property
def fan_mode(self) -> Optional[str]:
"""Return current FAN mode."""
if self._thrm.running_state is None:
return FAN_AUTO
if self._thrm.running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return FAN_ON
return FAN_AUTO
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return supported FAN modes."""
if not self._fan:
return None
return [FAN_AUTO, FAN_ON]
@property
def hvac_action(self) -> Optional[str]:
"""Return the current HVAC action."""
if (
self._thrm.pi_heating_demand is None
and self._thrm.pi_cooling_demand is None
):
return self._rm_rs_action
return self._pi_demand_action
@property
def _rm_rs_action(self) -> Optional[str]:
"""Return the current HVAC action based on running mode and running state."""
running_mode = self._thrm.running_mode
if running_mode == SystemMode.HEAT:
return CURRENT_HVAC_HEAT
if running_mode == SystemMode.COOL:
return CURRENT_HVAC_COOL
running_state = self._thrm.running_state
if running_state and running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return CURRENT_HVAC_FAN
if self.hvac_mode != HVAC_MODE_OFF and running_mode == SystemMode.OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@property
def _pi_demand_action(self) -> Optional[str]:
"""Return the current HVAC action based on pi_demands."""
heating_demand = self._thrm.pi_heating_demand
if heating_demand is not None and heating_demand > 0:
return CURRENT_HVAC_HEAT
cooling_demand = self._thrm.pi_cooling_demand
if cooling_demand is not None and cooling_demand > 0:
return CURRENT_HVAC_COOL
if self.hvac_mode != HVAC_MODE_OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@property
def hvac_mode(self) -> Optional[str]:
"""Return HVAC operation mode."""
return SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode)
@property
def hvac_modes(self) -> Tuple[str, ...]:
"""Return the list of available HVAC operation modes."""
return SEQ_OF_OPERATION.get(self._thrm.ctrl_seqe_of_oper, (HVAC_MODE_OFF,))
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_HALVES
@property
def preset_mode(self) -> Optional[str]:
"""Return current preset mode."""
return self._preset
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return supported preset modes."""
return self._presets
@property
def supported_features(self):
"""Return the list of supported features."""
features = self._supported_flags
if HVAC_MODE_HEAT_COOL in self.hvac_modes:
features |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._fan is not None:
self._supported_flags |= SUPPORT_FAN_MODE
return features
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
temp = None
if self.hvac_mode == HVAC_MODE_COOL:
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_cooling_setpoint
else:
temp = self._thrm.occupied_cooling_setpoint
elif self.hvac_mode == HVAC_MODE_HEAT:
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_heating_setpoint
else:
temp = self._thrm.occupied_heating_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_HEAT_COOL:
return None
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_cooling_setpoint
else:
temp = self._thrm.occupied_cooling_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_HEAT_COOL:
return None
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_heating_setpoint
else:
temp = self._thrm.occupied_heating_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
temps = []
if HVAC_MODE_HEAT in self.hvac_modes:
temps.append(self._thrm.max_heat_setpoint_limit)
if HVAC_MODE_COOL in self.hvac_modes:
temps.append(self._thrm.max_cool_setpoint_limit)
if not temps:
return self.DEFAULT_MAX_TEMP
return round(max(temps) / ZCL_TEMP, 1)
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
temps = []
if HVAC_MODE_HEAT in self.hvac_modes:
temps.append(self._thrm.min_heat_setpoint_limit)
if HVAC_MODE_COOL in self.hvac_modes:
temps.append(self._thrm.min_cool_setpoint_limit)
if not temps:
return self.DEFAULT_MIN_TEMP
return round(min(temps) / ZCL_TEMP, 1)
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._thrm, SIGNAL_ATTR_UPDATED, self.async_attribute_updated
)
async def async_attribute_updated(self, record):
"""Handle attribute update from device."""
if (
record.attr_name in (ATTR_OCCP_COOL_SETPT, ATTR_OCCP_HEAT_SETPT)
and self.preset_mode == PRESET_AWAY
):
# occupancy attribute is an unreportable attribute, but if we get
# an attribute update for an "occupied" setpoint, there's a chance
# occupancy has changed
occupancy = await self._thrm.get_occupancy()
if occupancy is True:
self._preset = PRESET_NONE
self.debug("Attribute '%s' = %s update", record.attr_name, record.value)
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set fan mode."""
if fan_mode not in self.fan_modes:
self.warning("Unsupported '%s' fan mode", fan_mode)
return
if fan_mode == FAN_ON:
mode = ThermostatFanMode.ON
else:
mode = ThermostatFanMode.AUTO
await self._fan.async_set_speed(mode)
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target operation mode."""
if hvac_mode not in self.hvac_modes:
self.warning(
"can't set '%s' mode. Supported modes are: %s",
hvac_mode,
self.hvac_modes,
)
return
if await self._thrm.async_set_operation_mode(HVAC_MODE_2_SYSTEM[hvac_mode]):
self.async_write_ha_state()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if preset_mode not in self.preset_modes:
self.debug("preset mode '%s' is not supported", preset_mode)
return
if self.preset_mode not in (preset_mode, PRESET_NONE):
if not await self.async_preset_handler(self.preset_mode, enable=False):
self.debug("Couldn't turn off '%s' preset", self.preset_mode)
return
if preset_mode != PRESET_NONE:
if not await self.async_preset_handler(preset_mode, enable=True):
self.debug("Couldn't turn on '%s' preset", preset_mode)
return
self._preset = preset_mode
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
temp = kwargs.get(ATTR_TEMPERATURE)
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
if hvac_mode is not None:
await self.async_set_hvac_mode(hvac_mode)
thrm = self._thrm
if self.hvac_mode == HVAC_MODE_HEAT_COOL:
success = True
if low_temp is not None:
low_temp = int(low_temp * ZCL_TEMP)
success = success and await thrm.async_set_heating_setpoint(
low_temp, self.preset_mode == PRESET_AWAY
)
self.debug("Setting heating %s setpoint: %s", low_temp, success)
if high_temp is not None:
high_temp = int(high_temp * ZCL_TEMP)
success = success and await thrm.async_set_cooling_setpoint(
high_temp, self.preset_mode == PRESET_AWAY
)
self.debug("Setting cooling %s setpoint: %s", low_temp, success)
elif temp is not None:
temp = int(temp * ZCL_TEMP)
if self.hvac_mode == HVAC_MODE_COOL:
success = await thrm.async_set_cooling_setpoint(
temp, self.preset_mode == PRESET_AWAY
)
elif self.hvac_mode == HVAC_MODE_HEAT:
success = await thrm.async_set_heating_setpoint(
temp, self.preset_mode == PRESET_AWAY
)
else:
self.debug("Not setting temperature for '%s' mode", self.hvac_mode)
return
else:
self.debug("incorrect %s setting for '%s' mode", kwargs, self.hvac_mode)
return
if success:
self.async_write_ha_state()
async def async_preset_handler(self, preset: str, enable: bool = False) -> bool:
"""Set the preset mode via handler."""
handler = getattr(self, f"async_preset_handler_{preset}")
return await handler(enable)
@STRICT_MATCH(
channel_names={CHANNEL_THERMOSTAT, "sinope_manufacturer_specific"},
manufacturers="Sinope Technologies",
)
class SinopeTechnologiesThermostat(Thermostat):
"""Sinope Technologies Thermostat."""
manufacturer = 0x119C
update_time_interval = timedelta(minutes=randint(45, 75))
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize ZHA Thermostat instance."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._presets = [PRESET_AWAY, PRESET_NONE]
self._supported_flags |= SUPPORT_PRESET_MODE
self._manufacturer_ch = self.cluster_channels["sinope_manufacturer_specific"]
@callback
def _async_update_time(self, timestamp=None) -> None:
"""Update thermostat's time display."""
secs_2k = (
dt_util.now().replace(tzinfo=None) - datetime(2000, 1, 1, 0, 0, 0, 0)
).total_seconds()
self.debug("Updating time: %s", secs_2k)
self._manufacturer_ch.cluster.create_catching_task(
self._manufacturer_ch.cluster.write_attributes(
{"secs_since_2k": secs_2k}, manufacturer=self.manufacturer
)
)
async def async_added_to_hass(self):
"""Run when about to be added to Hass."""
await super().async_added_to_hass()
async_track_time_interval(
self.hass, self._async_update_time, self.update_time_interval
)
self._async_update_time()
async def async_preset_handler_away(self, is_away: bool = False) -> bool:
"""Set occupancy."""
mfg_code = self._zha_device.manufacturer_code
res = await self._thrm.write_attributes(
{"set_occupancy": 0 if is_away else 1}, manufacturer=mfg_code
)
self.debug("set occupancy to %s. Status: %s", 0 if is_away else 1, res)
return res
@STRICT_MATCH(
channel_names=CHANNEL_THERMOSTAT,
aux_channels=CHANNEL_FAN,
manufacturers="Zen Within",
)
class ZenWithinThermostat(Thermostat):
"""Zen Within Thermostat implementation."""
@property
def _rm_rs_action(self) -> Optional[str]:
"""Return the current HVAC action based on running mode and running state."""
running_state = self._thrm.running_state
if running_state is None:
return None
if running_state & (RunningState.HEAT | RunningState.HEAT_STAGE_2):
return CURRENT_HVAC_HEAT
if running_state & (RunningState.COOL | RunningState.COOL_STAGE_2):
return CURRENT_HVAC_COOL
if running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return CURRENT_HVAC_FAN
if self.hvac_mode != HVAC_MODE_OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@STRICT_MATCH(
channel_names=CHANNEL_THERMOSTAT,
aux_channels=CHANNEL_FAN,
manufacturers="Centralite",
models="3157100",
)
class CentralitePearl(ZenWithinThermostat):
"""Centralite Pearl Thermostat implementation."""
|
import unittest
from trashcli.put import shrinkuser
class TestTrashDirectoryName(unittest.TestCase):
def setUp(self):
self.environ = {}
def test_should_substitute_tilde_in_place_of_home_dir(self):
self.environ['HOME']='/home/user'
self.trash_dir = "/home/user/.local/share/Trash"
self.assert_name_is('~/.local/share/Trash')
def test_when_not_in_home_dir(self):
self.environ['HOME']='/home/user'
self.trash_dir = "/not-in-home/Trash"
self.assert_name_is('/not-in-home/Trash')
def test_tilde_works_also_with_trailing_slash(self):
self.environ['HOME']='/home/user/'
self.trash_dir = "/home/user/.local/share/Trash"
self.assert_name_is('~/.local/share/Trash')
def test_str_uses_tilde_with_many_slashes(self):
self.environ['HOME']='/home/user////'
self.trash_dir = "/home/user/.local/share/Trash"
self.assert_name_is('~/.local/share/Trash')
def test_dont_get_confused_by_empty_home_dir(self):
self.environ['HOME']=''
self.trash_dir = "/foo/Trash"
self.assert_name_is('/foo/Trash')
def assert_name_is(self, expected_name):
shrinked = shrinkuser(self.trash_dir, self.environ)
assert expected_name == shrinked
|
from logilab.common.testlib import TestCase, unittest_main, tag
from logilab.common.date import (date_range, endOfMonth, add_days_worked,
nb_open_days, get_national_holidays, ustrftime, ticks2datetime,
utcdatetime, datetime2ticks)
from datetime import date, datetime, timedelta
from calendar import timegm
import pytz
try:
from mx.DateTime import Date as mxDate, DateTime as mxDateTime, \
now as mxNow, RelativeDateTime, RelativeDate
except ImportError:
mxDate = mxDateTime = RelativeDateTime = mxNow = None
class DateTC(TestCase):
datecls = date
datetimecls = datetime
timedeltacls = timedelta
now = datetime.now
def test_day(self):
"""enumerate days"""
r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 4)))
expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 2), self.datecls(2000, 1, 3)]
self.assertListEqual(r, expected)
r = list(date_range(self.datecls(2000, 1, 31), self.datecls(2000, 2, 3)))
expected = [self.datecls(2000, 1, 31), self.datecls(2000, 2, 1), self.datecls(2000, 2, 2)]
self.assertListEqual(r, expected)
r = list(date_range(self.datecls(2000, 1, 1), self.datecls(2000, 1, 6), 2))
expected = [self.datecls(2000, 1, 1), self.datecls(2000, 1, 3), self.datecls(2000, 1, 5)]
self.assertListEqual(r, expected)
def test_add_days_worked(self):
add = add_days_worked
# normal
self.assertEqual(add(self.datecls(2008, 1, 3), 1), self.datecls(2008, 1, 4))
# skip week-end
self.assertEqual(add(self.datecls(2008, 1, 3), 2), self.datecls(2008, 1, 7))
# skip 2 week-ends
self.assertEqual(add(self.datecls(2008, 1, 3), 8), self.datecls(2008, 1, 15))
# skip holiday + week-end
self.assertEqual(add(self.datecls(2008, 4, 30), 2), self.datecls(2008, 5, 5))
def test_get_national_holidays(self):
holidays = get_national_holidays
yield self.assertEqual, holidays(self.datecls(2008, 4, 29), self.datecls(2008, 5, 2)), \
[self.datecls(2008, 5, 1)]
yield self.assertEqual, holidays(self.datecls(2008, 5, 7), self.datecls(2008, 5, 8)), []
x = self.datetimecls(2008, 5, 7, 12, 12, 12)
yield self.assertEqual, holidays(x, x + self.timedeltacls(days=1)), []
def test_open_days_now_and_before(self):
nb = nb_open_days
x = self.now()
y = x - self.timedeltacls(seconds=1)
self.assertRaises(AssertionError, nb, x, y)
def assertOpenDays(self, start, stop, expected):
got = nb_open_days(start, stop)
self.assertEqual(got, expected)
def test_open_days_tuesday_friday(self):
self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 7), 3)
def test_open_days_day_nextday(self):
self.assertOpenDays(self.datecls(2008, 3, 4), self.datecls(2008, 3, 5), 1)
def test_open_days_friday_monday(self):
self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 10), 1)
def test_open_days_friday_monday_with_two_weekends(self):
self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 17), 6)
def test_open_days_tuesday_wednesday(self):
"""week-end + easter monday"""
self.assertOpenDays(self.datecls(2008, 3, 18), self.datecls(2008, 3, 26), 5)
def test_open_days_friday_saturday(self):
self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 8), 1)
def test_open_days_friday_sunday(self):
self.assertOpenDays(self.datecls(2008, 3, 7), self.datecls(2008, 3, 9), 1)
def test_open_days_saturday_sunday(self):
self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 9), 0)
def test_open_days_saturday_monday(self):
self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 10), 0)
def test_open_days_saturday_tuesday(self):
self.assertOpenDays(self.datecls(2008, 3, 8), self.datecls(2008, 3, 11), 1)
def test_open_days_now_now(self):
x = self.now()
self.assertOpenDays(x, x, 0)
def test_open_days_now_now2(self):
x = self.datetimecls(2010, 5, 24)
self.assertOpenDays(x, x, 0)
def test_open_days_afternoon_before_holiday(self):
self.assertOpenDays(self.datetimecls(2008, 5, 7, 14), self.datetimecls(2008, 5, 8, 0), 1)
def test_open_days_afternoon_before_saturday(self):
self.assertOpenDays(self.datetimecls(2008, 5, 9, 14), self.datetimecls(2008, 5, 10, 14), 1)
def test_open_days_afternoon(self):
self.assertOpenDays(self.datetimecls(2008, 5, 6, 14), self.datetimecls(2008, 5, 7, 14), 1)
@tag('posix', '1900')
def test_ustrftime_before_1900(self):
date = self.datetimecls(1328, 3, 12, 6, 30)
self.assertEqual(ustrftime(date, '%Y-%m-%d %H:%M:%S'), u'1328-03-12 06:30:00')
@tag('posix', '1900')
def test_ticks2datetime_before_1900(self):
ticks = -2209075200000
date = ticks2datetime(ticks)
self.assertEqual(ustrftime(date, '%Y-%m-%d'), u'1899-12-31')
def test_month(self):
"""enumerate months"""
r = list(date_range(self.datecls(2006, 5, 6), self.datecls(2006, 8, 27),
incmonth=True))
expected = [self.datecls(2006, 5, 6), self.datecls(2006, 6, 1), self.datecls(2006, 7, 1), self.datecls(2006, 8, 1)]
self.assertListEqual(expected, r)
def test_utcdatetime(self):
if self.datetimecls is mxDateTime:
return
d = self.datetimecls(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc)
d = utcdatetime(d)
self.assertEqual(d, self.datetimecls(2014, 11, 26, 12, 0, 0, 57))
self.assertIsNone(d.tzinfo)
d = pytz.timezone('Europe/Paris').localize(
self.datetimecls(2014, 11, 26, 12, 0, 0, 57))
d = utcdatetime(d)
self.assertEqual(d, self.datetimecls(2014, 11, 26, 11, 0, 0, 57))
self.assertIsNone(d.tzinfo)
d = pytz.timezone('Europe/Paris').localize(
self.datetimecls(2014, 7, 26, 12, 0, 0, 57))
d = utcdatetime(d)
self.assertEqual(d, self.datetimecls(2014, 7, 26, 10, 0, 0, 57))
self.assertIsNone(d.tzinfo)
def test_datetime2ticks(self):
d = datetime(2014, 11, 26, 12, 0, 0, 57, tzinfo=pytz.utc)
timestamp = timegm(d.timetuple())
self.assertEqual(datetime2ticks(d), timestamp * 1000)
d = d.replace(microsecond=123456)
self.assertEqual(datetime2ticks(d), timestamp * 1000 + 123)
def test_datetime2ticks_date_argument(self):
d = date(2014, 11, 26)
timestamp = timegm(d.timetuple())
self.assertEqual(datetime2ticks(d), timestamp * 1000)
class MxDateTC(DateTC):
datecls = mxDate
datetimecls = mxDateTime
timedeltacls = RelativeDateTime
now = mxNow
def check_mx(self):
if mxDate is None:
self.skipTest('mx.DateTime is not installed')
def setUp(self):
self.check_mx()
def test_month(self):
"""enumerate months"""
r = list(date_range(self.datecls(2000, 1, 2), self.datecls(2000, 4, 4), endOfMonth))
expected = [self.datecls(2000, 1, 2), self.datecls(2000, 2, 29), self.datecls(2000, 3, 31)]
self.assertListEqual(r, expected)
r = list(date_range(self.datecls(2000, 11, 30), self.datecls(2001, 2, 3), endOfMonth))
expected = [self.datecls(2000, 11, 30), self.datecls(2000, 12, 31), self.datecls(2001, 1, 31)]
self.assertListEqual(r, expected)
if __name__ == '__main__':
unittest_main()
|
import ambiclimate
from homeassistant import data_entry_flow
from homeassistant.components.ambiclimate import config_flow
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.setup import async_setup_component
from homeassistant.util import aiohttp
from tests.async_mock import AsyncMock, patch
async def init_config_flow(hass):
"""Init a configuration flow."""
await async_setup_component(
hass, "http", {"http": {"base_url": "https://hass.com"}}
)
config_flow.register_flow_implementation(hass, "id", "secret")
flow = config_flow.AmbiclimateFlowHandler()
flow.hass = hass
return flow
async def test_abort_if_no_implementation_registered(hass):
"""Test we abort if no implementation is registered."""
flow = config_flow.AmbiclimateFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "missing_configuration"
async def test_abort_if_already_setup(hass):
"""Test we abort if Ambiclimate is already setup."""
flow = await init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_code()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_flow_implementation(hass):
"""Test registering an implementation and finishing flow works."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert (
result["description_placeholders"]["cb_url"]
== "https://hass.com/api/ambiclimate"
)
url = result["description_placeholders"]["authorization_url"]
assert "https://api.ambiclimate.com/oauth2/authorize" in url
assert "client_id=id" in url
assert "response_type=code" in url
assert "redirect_uri=https%3A%2F%2Fhass.com%2Fapi%2Fambiclimate" in url
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value="test"):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Ambiclimate"
assert result["data"]["callback_url"] == "https://hass.com/api/ambiclimate"
assert result["data"][CONF_CLIENT_SECRET] == "secret"
assert result["data"][CONF_CLIENT_ID] == "id"
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value=None):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
with patch(
"ambiclimate.AmbiclimateOAuth.get_access_token",
side_effect=ambiclimate.AmbiclimateOauthError(),
):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_abort_invalid_code(hass):
"""Test if no code is given to step_code."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value=None):
result = await flow.async_step_code("invalid")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "access_token"
async def test_already_setup(hass):
"""Test when already setup."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=True):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_view(hass):
"""Test view."""
hass.config_entries.flow.async_init = AsyncMock()
request = aiohttp.MockRequest(
b"", query_string="code=test_code", mock_source="test"
)
request.app = {"hass": hass}
view = config_flow.AmbiclimateAuthCallbackView()
assert await view.get(request) == "OK!"
request = aiohttp.MockRequest(b"", query_string="", mock_source="test")
request.app = {"hass": hass}
view = config_flow.AmbiclimateAuthCallbackView()
assert await view.get(request) == "No code"
|
import logging
import re
log = logging.getLogger(__name__)
def try_compile(regex, flags=0):
try:
compiled = re.compile(regex, flags)
except re.error:
log.warning(
'Error compiling regex {!r} with flags {!r}'.format(regex, flags))
compiled = None
return compiled
class FilterEntry:
__slots__ = ("label", "active", "filter", "byte_filter", "filter_string")
REGEX, SHELL = 0, 1
def __init__(self, label, active, filter, byte_filter, filter_string):
self.label = label
self.active = active
self.filter = filter
self.byte_filter = byte_filter
self.filter_string = filter_string
@classmethod
def compile_regex(cls, regex, byte_regex=False):
if byte_regex and not isinstance(regex, bytes):
# TODO: Register a custom error handling function to replace
# encoding errors with '.'?
regex = regex.encode('utf8', 'replace')
return try_compile(regex, re.M)
@classmethod
def compile_shell_pattern(cls, pattern):
bits = pattern.split()
if not bits:
# An empty pattern would match everything, so skip it
return None
elif len(bits) > 1:
regexes = [shell_to_regex(b)[:-1] for b in bits]
regex = "(%s)$" % "|".join(regexes)
else:
regex = shell_to_regex(bits[0])
return try_compile(regex)
@classmethod
def new_from_gsetting(cls, elements, filter_type):
name, active, filter_string = elements
if filter_type == cls.REGEX:
str_re = cls.compile_regex(filter_string)
bytes_re = cls.compile_regex(filter_string, byte_regex=True)
elif filter_type == cls.SHELL:
str_re = cls.compile_shell_pattern(filter_string)
bytes_re = None
else:
raise ValueError("Unknown filter type")
active = active and bool(str_re)
return cls(name, active, str_re, bytes_re, filter_string)
@classmethod
def check_filter(cls, filter_string, filter_type):
if filter_type == cls.REGEX:
compiled = cls.compile_regex(filter_string)
elif filter_type == cls.SHELL:
compiled = cls.compile_shell_pattern(filter_string)
return compiled is not None
def __copy__(self):
new = type(self)(
self.label, self.active, None, None, self.filter_string)
if self.filter is not None:
new.filter = re.compile(self.filter.pattern, self.filter.flags)
if self.byte_filter is not None:
new.byte_filter = re.compile(
self.byte_filter.pattern, self.byte_filter.flags)
return new
def shell_to_regex(pat):
"""Translate a shell PATTERN to a regular expression.
Based on fnmatch.translate().
We also handle {a,b,c} where fnmatch does not.
"""
i, n = 0, len(pat)
res = ''
while i < n:
c = pat[i]
i += 1
if c == '\\':
try:
c = pat[i]
except IndexError:
pass
else:
i += 1
res += re.escape(c)
elif c == '*':
res += '.*'
elif c == '?':
res += '.'
elif c == '[':
try:
j = pat.index(']', i)
except ValueError:
res += r'\['
else:
stuff = pat[i:j]
i = j + 1
if stuff[0] == '!':
stuff = '^%s' % stuff[1:]
elif stuff[0] == '^':
stuff = r'\^%s' % stuff[1:]
res += '[%s]' % stuff
elif c == '{':
try:
j = pat.index('}', i)
except ValueError:
res += '\\{'
else:
stuff = pat[i:j]
i = j + 1
res += '(%s)' % "|".join(
[shell_to_regex(p)[:-1] for p in stuff.split(",")]
)
else:
res += re.escape(c)
return res + "$"
|
import time
import math
import multiprocessing
import os
import random
import sys
import signal
try:
from setproctitle import getproctitle, setproctitle
except ImportError:
setproctitle = None
from diamond.utils.signals import signal_to_exception
from diamond.utils.signals import SIGALRMException
from diamond.utils.signals import SIGHUPException
def collector_process(collector, metric_queue, log):
"""
"""
proc = multiprocessing.current_process()
if setproctitle:
setproctitle('%s - %s' % (getproctitle(), proc.name))
signal.signal(signal.SIGALRM, signal_to_exception)
signal.signal(signal.SIGHUP, signal_to_exception)
signal.signal(signal.SIGUSR2, signal_to_exception)
# Reset signal handlers of forks/threads
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
interval = float(collector.config['interval'])
log.debug('Starting')
log.debug('Interval: %s seconds', interval)
# Validate the interval
if interval <= 0:
log.critical('interval of %s is not valid!', interval)
sys.exit(1)
# Start the next execution at the next window plus some stagger delay to
# avoid having all collectors running at the same time
next_window = math.floor(time.time() / interval) * interval
stagger_offset = random.uniform(0, interval - 1)
# Allocate time till the end of the window for the collector to run. With a
# minimum of 1 second
max_time = int(max(interval - stagger_offset, 1))
log.debug('Max collection time: %s seconds', max_time)
# Setup stderr/stdout as /dev/null so random print statements in thrid
# party libs do not fail and prevent collectors from running.
# https://github.com/BrightcoveOS/Diamond/issues/722
sys.stdout = open(os.devnull, 'w')
sys.stderr = open(os.devnull, 'w')
while(True):
try:
time_to_sleep = (next_window + stagger_offset) - time.time()
if time_to_sleep > 0:
time.sleep(time_to_sleep)
elif time_to_sleep < 0:
# clock has jumped, lets skip missed intervals
next_window = time.time()
next_window += interval
# Ensure collector run times fit into the collection window
signal.alarm(max_time)
# Collect!
collector._run()
# Success! Disable the alarm
signal.alarm(0)
except SIGALRMException:
log.error('Took too long to run! Killed!')
# Adjust the stagger_offset to allow for more time to run the
# collector
stagger_offset = stagger_offset * 0.9
max_time = int(max(interval - stagger_offset, 1))
log.debug('Max collection time: %s seconds', max_time)
except SIGHUPException:
# Reload the config if requested
# We must first disable the alarm as we don't want it to interrupt
# us and end up with half a loaded config
signal.alarm(0)
log.info('Reloading config reload due to HUP')
collector.load_config()
log.info('Config reloaded')
except Exception:
log.exception('Collector failed!')
break
def handler_process(handlers, metric_queue, log):
proc = multiprocessing.current_process()
if setproctitle:
setproctitle('%s - %s' % (getproctitle(), proc.name))
log.debug('Starting process %s', proc.name)
while(True):
metric = metric_queue.get(block=True, timeout=None)
for handler in handlers:
if metric is not None:
handler._process(metric)
else:
handler._flush()
|
import base64
import logging
import sys
from io import BytesIO
import defusedxml.ElementTree as DefusedET
import radicale
from radicale import xmlutils
# Enable debug output
radicale.log.logger.setLevel(logging.DEBUG)
class BaseTest:
"""Base class for tests."""
def request(self, method, path, data=None, login=None, **args):
"""Send a request."""
for key in args:
args[key.upper()] = args[key]
if login:
args["HTTP_AUTHORIZATION"] = "Basic " + base64.b64encode(
login.encode()).decode()
args["REQUEST_METHOD"] = method.upper()
args["PATH_INFO"] = path
if data:
data = data.encode()
args["wsgi.input"] = BytesIO(data)
args["CONTENT_LENGTH"] = str(len(data))
args["wsgi.errors"] = sys.stderr
status = headers = None
def start_response(status_, headers_):
nonlocal status, headers
status = status_
headers = headers_
answer = self.application(args, start_response)
return (int(status.split()[0]), dict(headers),
answer[0].decode() if answer else None)
@staticmethod
def parse_responses(text):
xml = DefusedET.fromstring(text)
assert xml.tag == xmlutils.make_clark("D:multistatus")
path_responses = {}
for response in xml.findall(xmlutils.make_clark("D:response")):
href = response.find(xmlutils.make_clark("D:href"))
assert href.text not in path_responses
prop_respones = {}
for propstat in response.findall(
xmlutils.make_clark("D:propstat")):
status = propstat.find(xmlutils.make_clark("D:status"))
assert status.text.startswith("HTTP/1.1 ")
status_code = int(status.text.split(" ")[1])
for element in propstat.findall(
"./%s/*" % xmlutils.make_clark("D:prop")):
human_tag = xmlutils.make_human_tag(element.tag)
assert human_tag not in prop_respones
prop_respones[human_tag] = (status_code, element)
status = response.find(xmlutils.make_clark("D:status"))
if status is not None:
assert not prop_respones
assert status.text.startswith("HTTP/1.1 ")
status_code = int(status.text.split(" ")[1])
path_responses[href.text] = status_code
else:
path_responses[href.text] = prop_respones
return path_responses
@staticmethod
def _check_status(status, good_status, check=True):
if check is True:
assert status == good_status
elif check is not False:
assert status == check
return status == good_status
def get(self, path, check=True, **args):
status, _, answer = self.request("GET", path, **args)
self._check_status(status, 200, check)
return status, answer
def post(self, path, data=None, check=True, **args):
status, _, answer = self.request("POST", path, data, **args)
self._check_status(status, 200, check)
return status, answer
def put(self, path, data, check=True, **args):
status, _, answer = self.request("PUT", path, data, **args)
self._check_status(status, 201, check)
return status, answer
def propfind(self, path, data=None, check=True, **args):
status, _, answer = self.request("PROPFIND", path, data, **args)
if not self._check_status(status, 207, check):
return status, None
responses = self.parse_responses(answer)
if args.get("HTTP_DEPTH", 0) == 0:
assert len(responses) == 1 and path in responses
return status, responses
def proppatch(self, path, data=None, check=True, **args):
status, _, answer = self.request("PROPPATCH", path, data, **args)
if not self._check_status(status, 207, check):
return status, None
responses = self.parse_responses(answer)
assert len(responses) == 1 and path in responses
return status, responses
def report(self, path, data, check=True, **args):
status, _, answer = self.request("REPORT", path, data, **args)
if not self._check_status(status, 207, check):
return status, None
return status, self.parse_responses(answer)
def delete(self, path, check=True, **args):
status, _, answer = self.request("DELETE", path, **args)
if not self._check_status(status, 200, check):
return status, None
responses = self.parse_responses(answer)
assert len(responses) == 1 and path in responses
return status, responses
def mkcalendar(self, path, data=None, check=True, **args):
status, _, answer = self.request("MKCALENDAR", path, data, **args)
self._check_status(status, 201, check)
return status, answer
def mkcol(self, path, data=None, check=True, **args):
status, _, _ = self.request("MKCOL", path, data, **args)
self._check_status(status, 201, check)
return status
def create_addressbook(self, path, check=True, **args):
return self.mkcol(path, """\
<?xml version="1.0" encoding="UTF-8" ?>
<create xmlns="DAV:" xmlns:CR="urn:ietf:params:xml:ns:carddav">
<set>
<prop>
<resourcetype>
<collection />
<CR:addressbook />
</resourcetype>
</prop>
</set>
</create>""", check=check, **args)
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.sensor import DOMAIN
from homeassistant.components.sensor.device_condition import ENTITY_CONDITIONS
from homeassistant.const import CONF_PLATFORM, PERCENTAGE, STATE_UNKNOWN
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.testing_config.custom_components.test.sensor import DEVICE_CLASSES
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
for condition in ENTITY_CONDITIONS[device_class]
if device_class != "none"
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor condition."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["battery"].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"extra_fields": [
{
"description": {"suffix": PERCENTAGE},
"name": "above",
"optional": True,
"type": "float",
},
{
"description": {"suffix": PERCENTAGE},
"name": "below",
"optional": True,
"type": "float",
},
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert len(conditions) == 1
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_get_condition_capabilities_none(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor condition."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
conditions = [
{
"condition": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": "sensor.beer",
"type": "is_battery_level",
},
{
"condition": "device",
"device_id": "8770c43885354d5fa27604db6817f63f",
"domain": "sensor",
"entity_id": platform.ENTITIES["none"].entity_id,
"type": "is_battery_level",
},
]
expected_capabilities = {}
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state_not_above_below(hass, calls, caplog):
"""Test for bad value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
}
],
"action": {"service": "test.automation"},
}
]
},
)
assert "must contain at least one of below, above" in caplog.text
async def test_if_state_above(hass, calls):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"above": 10,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
async def test_if_state_below(hass, calls):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"below": 10,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
async def test_if_state_between(hass, calls):
"""Test for value conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_battery_level",
"above": 10,
"below": 20,
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_UNKNOWN
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 9)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, 11)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "event - test_event1"
hass.states.async_set(sensor1.entity_id, 21)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
hass.states.async_set(sensor1.entity_id, 19)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "event - test_event1"
|
from unittest import TestCase
from django.core.exceptions import ImproperlyConfigured
from django.test.utils import override_settings
from weblate.utils.classloader import ClassLoader, load_class
class LoadClassTest(TestCase):
def test_correct(self):
cls = load_class("unittest.TestCase", "TEST")
self.assertEqual(cls, TestCase)
def test_invalid_name(self):
with self.assertRaisesRegex(
ImproperlyConfigured,
'Error importing class unittest in TEST: .*"' "(not enough|need more than)",
):
load_class("unittest", "TEST")
def test_invalid_module(self):
with self.assertRaisesRegex(
ImproperlyConfigured,
'weblate.trans.tests.missing in TEST: "' "No module named .*missing[\"']",
):
load_class("weblate.trans.tests.missing.Foo", "TEST")
def test_invalid_class(self):
with self.assertRaisesRegex(
ImproperlyConfigured,
'"weblate.utils.tests.test_classloader"' ' does not define a "Foo" class',
):
load_class("weblate.utils.tests.test_classloader.Foo", "TEST")
class ClassLoaderTestCase(TestCase):
@override_settings(TEST_SERVICES=("weblate.addons.cleanup.CleanupAddon",))
def test_load(self):
loader = ClassLoader("TEST_SERVICES", construct=False)
loader.load_data()
self.assertEqual(len(list(loader.keys())), 1)
@override_settings(TEST_SERVICES=("weblate.addons.cleanup.CleanupAddon"))
def test_invalid(self):
loader = ClassLoader("TEST_SERVICES", construct=False)
with self.assertRaisesRegex(
ImproperlyConfigured, "Setting TEST_SERVICES must be list or tuple!"
):
loader.load_data()
|
import logging
from pwmled import Color
from pwmled.driver.gpio import GpioDriver
from pwmled.driver.pca9685 import Pca9685Driver
from pwmled.led import SimpleLed
from pwmled.led.rgb import RgbLed
from pwmled.led.rgbw import RgbwLed
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
ATTR_TRANSITION,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.const import CONF_ADDRESS, CONF_HOST, CONF_NAME, CONF_TYPE, STATE_ON
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
CONF_LEDS = "leds"
CONF_DRIVER = "driver"
CONF_PINS = "pins"
CONF_FREQUENCY = "frequency"
CONF_DRIVER_GPIO = "gpio"
CONF_DRIVER_PCA9685 = "pca9685"
CONF_DRIVER_TYPES = [CONF_DRIVER_GPIO, CONF_DRIVER_PCA9685]
CONF_LED_TYPE_SIMPLE = "simple"
CONF_LED_TYPE_RGB = "rgb"
CONF_LED_TYPE_RGBW = "rgbw"
CONF_LED_TYPES = [CONF_LED_TYPE_SIMPLE, CONF_LED_TYPE_RGB, CONF_LED_TYPE_RGBW]
DEFAULT_BRIGHTNESS = 255
DEFAULT_COLOR = [0, 0]
SUPPORT_SIMPLE_LED = SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
SUPPORT_RGB_LED = SUPPORT_BRIGHTNESS | SUPPORT_COLOR | SUPPORT_TRANSITION
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_LEDS): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_DRIVER): vol.In(CONF_DRIVER_TYPES),
vol.Required(CONF_PINS): vol.All(cv.ensure_list, [cv.positive_int]),
vol.Required(CONF_TYPE): vol.In(CONF_LED_TYPES),
vol.Optional(CONF_FREQUENCY): cv.positive_int,
vol.Optional(CONF_ADDRESS): cv.byte,
vol.Optional(CONF_HOST): cv.string,
}
],
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PWM LED lights."""
leds = []
for led_conf in config[CONF_LEDS]:
driver_type = led_conf[CONF_DRIVER]
pins = led_conf[CONF_PINS]
opt_args = {}
if CONF_FREQUENCY in led_conf:
opt_args["freq"] = led_conf[CONF_FREQUENCY]
if driver_type == CONF_DRIVER_GPIO:
if CONF_HOST in led_conf:
opt_args["host"] = led_conf[CONF_HOST]
driver = GpioDriver(pins, **opt_args)
elif driver_type == CONF_DRIVER_PCA9685:
if CONF_ADDRESS in led_conf:
opt_args["address"] = led_conf[CONF_ADDRESS]
driver = Pca9685Driver(pins, **opt_args)
else:
_LOGGER.error("Invalid driver type")
return
name = led_conf[CONF_NAME]
led_type = led_conf[CONF_TYPE]
if led_type == CONF_LED_TYPE_SIMPLE:
led = PwmSimpleLed(SimpleLed(driver), name)
elif led_type == CONF_LED_TYPE_RGB:
led = PwmRgbLed(RgbLed(driver), name)
elif led_type == CONF_LED_TYPE_RGBW:
led = PwmRgbLed(RgbwLed(driver), name)
else:
_LOGGER.error("Invalid led type")
return
leds.append(led)
add_entities(leds)
class PwmSimpleLed(LightEntity, RestoreEntity):
"""Representation of a simple one-color PWM LED."""
def __init__(self, led, name):
"""Initialize one-color PWM LED."""
self._led = led
self._name = name
self._is_on = False
self._brightness = DEFAULT_BRIGHTNESS
async def async_added_to_hass(self):
"""Handle entity about to be added to hass event."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if last_state:
self._is_on = last_state.state == STATE_ON
self._brightness = last_state.attributes.get(
"brightness", DEFAULT_BRIGHTNESS
)
self._led.set(
is_on=self._is_on, brightness=_from_hass_brightness(self._brightness)
)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the group."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
@property
def brightness(self):
"""Return the brightness property."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_SIMPLE_LED
def turn_on(self, **kwargs):
"""Turn on a led."""
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_TRANSITION in kwargs:
transition_time = kwargs[ATTR_TRANSITION]
self._led.transition(
transition_time,
is_on=True,
brightness=_from_hass_brightness(self._brightness),
)
else:
self._led.set(
is_on=True, brightness=_from_hass_brightness(self._brightness)
)
self._is_on = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn off a LED."""
if self.is_on:
if ATTR_TRANSITION in kwargs:
transition_time = kwargs[ATTR_TRANSITION]
self._led.transition(transition_time, is_on=False)
else:
self._led.off()
self._is_on = False
self.schedule_update_ha_state()
class PwmRgbLed(PwmSimpleLed):
"""Representation of a RGB(W) PWM LED."""
def __init__(self, led, name):
"""Initialize a RGB(W) PWM LED."""
super().__init__(led, name)
self._color = DEFAULT_COLOR
async def async_added_to_hass(self):
"""Handle entity about to be added to hass event."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if last_state:
self._color = last_state.attributes.get("hs_color", DEFAULT_COLOR)
self._led.set(color=_from_hass_color(self._color))
@property
def hs_color(self):
"""Return the color property."""
return self._color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_RGB_LED
def turn_on(self, **kwargs):
"""Turn on a LED."""
if ATTR_HS_COLOR in kwargs:
self._color = kwargs[ATTR_HS_COLOR]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_TRANSITION in kwargs:
transition_time = kwargs[ATTR_TRANSITION]
self._led.transition(
transition_time,
is_on=True,
brightness=_from_hass_brightness(self._brightness),
color=_from_hass_color(self._color),
)
else:
self._led.set(
is_on=True,
brightness=_from_hass_brightness(self._brightness),
color=_from_hass_color(self._color),
)
self._is_on = True
self.schedule_update_ha_state()
def _from_hass_brightness(brightness):
"""Convert Home Assistant brightness units to percentage."""
return brightness / 255
def _from_hass_color(color):
"""Convert Home Assistant RGB list to Color tuple."""
rgb = color_util.color_hs_to_RGB(*color)
return Color(*tuple(rgb))
|
revision = "556ceb3e3c3e"
down_revision = "449c3d5c7299"
from alembic import op
import sqlalchemy as sa
from lemur.utils import Vault
from sqlalchemy.dialects import postgresql
from sqlalchemy_utils import ArrowType
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"pending_certs",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("external_id", sa.String(length=128), nullable=True),
sa.Column("owner", sa.String(length=128), nullable=False),
sa.Column("name", sa.String(length=256), nullable=True),
sa.Column("description", sa.String(length=1024), nullable=True),
sa.Column("notify", sa.Boolean(), nullable=True),
sa.Column("number_attempts", sa.Integer(), nullable=True),
sa.Column("rename", sa.Boolean(), nullable=True),
sa.Column("cn", sa.String(length=128), nullable=True),
sa.Column("csr", sa.Text(), nullable=False),
sa.Column("chain", sa.Text(), nullable=True),
sa.Column("private_key", Vault(), nullable=True),
sa.Column(
"date_created", ArrowType(), server_default=sa.text("now()"), nullable=False
),
sa.Column("status", sa.String(length=128), nullable=True),
sa.Column("rotation", sa.Boolean(), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("authority_id", sa.Integer(), nullable=True),
sa.Column("root_authority_id", sa.Integer(), nullable=True),
sa.Column("rotation_policy_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["authority_id"], ["authorities.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(
["root_authority_id"], ["authorities.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["rotation_policy_id"], ["rotation_policies.id"]),
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
op.create_table(
"pending_cert_destination_associations",
sa.Column("destination_id", sa.Integer(), nullable=True),
sa.Column("pending_cert_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["destination_id"], ["destinations.id"], ondelete="cascade"
),
sa.ForeignKeyConstraint(
["pending_cert_id"], ["pending_certs.id"], ondelete="cascade"
),
)
op.create_index(
"pending_cert_destination_associations_ix",
"pending_cert_destination_associations",
["destination_id", "pending_cert_id"],
unique=False,
)
op.create_table(
"pending_cert_notification_associations",
sa.Column("notification_id", sa.Integer(), nullable=True),
sa.Column("pending_cert_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["notification_id"], ["notifications.id"], ondelete="cascade"
),
sa.ForeignKeyConstraint(
["pending_cert_id"], ["pending_certs.id"], ondelete="cascade"
),
)
op.create_index(
"pending_cert_notification_associations_ix",
"pending_cert_notification_associations",
["notification_id", "pending_cert_id"],
unique=False,
)
op.create_table(
"pending_cert_replacement_associations",
sa.Column("replaced_certificate_id", sa.Integer(), nullable=True),
sa.Column("pending_cert_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["pending_cert_id"], ["pending_certs.id"], ondelete="cascade"
),
sa.ForeignKeyConstraint(
["replaced_certificate_id"], ["certificates.id"], ondelete="cascade"
),
)
op.create_index(
"pending_cert_replacement_associations_ix",
"pending_cert_replacement_associations",
["replaced_certificate_id", "pending_cert_id"],
unique=False,
)
op.create_table(
"pending_cert_role_associations",
sa.Column("pending_cert_id", sa.Integer(), nullable=True),
sa.Column("role_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["pending_cert_id"], ["pending_certs.id"]),
sa.ForeignKeyConstraint(["role_id"], ["roles.id"]),
)
op.create_index(
"pending_cert_role_associations_ix",
"pending_cert_role_associations",
["pending_cert_id", "role_id"],
unique=False,
)
op.create_table(
"pending_cert_source_associations",
sa.Column("source_id", sa.Integer(), nullable=True),
sa.Column("pending_cert_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["pending_cert_id"], ["pending_certs.id"], ondelete="cascade"
),
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="cascade"),
)
op.create_index(
"pending_cert_source_associations_ix",
"pending_cert_source_associations",
["source_id", "pending_cert_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(
"pending_cert_source_associations_ix",
table_name="pending_cert_source_associations",
)
op.drop_table("pending_cert_source_associations")
op.drop_index(
"pending_cert_role_associations_ix", table_name="pending_cert_role_associations"
)
op.drop_table("pending_cert_role_associations")
op.drop_index(
"pending_cert_replacement_associations_ix",
table_name="pending_cert_replacement_associations",
)
op.drop_table("pending_cert_replacement_associations")
op.drop_index(
"pending_cert_notification_associations_ix",
table_name="pending_cert_notification_associations",
)
op.drop_table("pending_cert_notification_associations")
op.drop_index(
"pending_cert_destination_associations_ix",
table_name="pending_cert_destination_associations",
)
op.drop_table("pending_cert_destination_associations")
op.drop_table("pending_certs")
# ### end Alembic commands ###
|
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import State
from homeassistant.helpers import location
def test_has_location_with_invalid_states():
"""Set up the tests."""
for state in (None, 1, "hello", object):
assert not location.has_location(state)
def test_has_location_with_states_with_invalid_locations():
"""Set up the tests."""
state = State(
"hello.world", "invalid", {ATTR_LATITUDE: "no number", ATTR_LONGITUDE: 123.12}
)
assert not location.has_location(state)
def test_has_location_with_states_with_valid_location():
"""Set up the tests."""
state = State(
"hello.world", "invalid", {ATTR_LATITUDE: 123.12, ATTR_LONGITUDE: 123.12}
)
assert location.has_location(state)
def test_closest_with_no_states_with_location():
"""Set up the tests."""
state = State("light.test", "on")
state2 = State(
"light.test", "on", {ATTR_LATITUDE: "invalid", ATTR_LONGITUDE: 123.45}
)
state3 = State("light.test", "on", {ATTR_LONGITUDE: 123.45})
assert location.closest(123.45, 123.45, [state, state2, state3]) is None
def test_closest_returns_closest():
"""Test ."""
state = State("light.test", "on", {ATTR_LATITUDE: 124.45, ATTR_LONGITUDE: 124.45})
state2 = State("light.test", "on", {ATTR_LATITUDE: 125.45, ATTR_LONGITUDE: 125.45})
assert state == location.closest(123.45, 123.45, [state, state2])
async def test_coordinates_function_as_attributes(hass):
"""Test coordinates function."""
hass.states.async_set(
"test.object", "happy", {"latitude": 32.87336, "longitude": -117.22943}
)
assert location.find_coordinates(hass, "test.object") == "32.87336,-117.22943"
async def test_coordinates_function_as_state(hass):
"""Test coordinates function."""
hass.states.async_set("test.object", "32.87336,-117.22943")
assert location.find_coordinates(hass, "test.object") == "32.87336,-117.22943"
async def test_coordinates_function_device_tracker_in_zone(hass):
"""Test coordinates function."""
hass.states.async_set(
"zone.home",
"zoning",
{"latitude": 32.87336, "longitude": -117.22943},
)
hass.states.async_set("device_tracker.device", "home")
assert (
location.find_coordinates(hass, "device_tracker.device")
== "32.87336,-117.22943"
)
async def test_coordinates_function_device_tracker_from_input_select(hass):
"""Test coordinates function."""
hass.states.async_set(
"input_select.select",
"device_tracker.device",
{"options": "device_tracker.device"},
)
hass.states.async_set("device_tracker.device", "32.87336,-117.22943")
assert (
location.find_coordinates(hass, "input_select.select") == "32.87336,-117.22943"
)
def test_coordinates_function_returns_none_on_recursion(hass):
"""Test coordinates function."""
hass.states.async_set(
"test.first",
"test.second",
)
hass.states.async_set("test.second", "test.first")
assert location.find_coordinates(hass, "test.first") is None
async def test_coordinates_function_returns_none_if_invalid_coord(hass):
"""Test test_coordinates function."""
hass.states.async_set(
"test.object",
"abc",
)
assert location.find_coordinates(hass, "test.object") is None
def test_coordinates_function_returns_none_if_invalid_input(hass):
"""Test test_coordinates function."""
assert location.find_coordinates(hass, "test.abc") is None
|
from django.test.utils import override_settings
from django.urls import reverse
from weblate.accounts.models import Profile
from weblate.lang.models import Language
from weblate.trans.models import Announcement, ComponentList, Project
from weblate.trans.tests.test_views import ViewTestCase
class DashboardTest(ViewTestCase):
"""Test for home/index view."""
def setUp(self):
super().setUp()
self.user.profile.languages.add(Language.objects.get(code="cs"))
def test_view_home_anonymous(self):
self.client.logout()
response = self.client.get(reverse("home"))
self.assertContains(response, "Browse 1 project")
def test_view_home(self):
response = self.client.get(reverse("home"))
self.assertContains(response, "test/test")
def test_view_projects(self):
response = self.client.get(reverse("projects"))
self.assertContains(response, "Test")
def test_view_projects_slash(self):
response = self.client.get("/projects")
self.assertRedirects(response, reverse("projects"), status_code=301)
def test_home_with_announcement(self):
msg = Announcement(message="test_message")
msg.save()
response = self.client.get(reverse("home"))
self.assertContains(response, "announcement")
self.assertContains(response, "test_message")
def test_home_without_announcement(self):
response = self.client.get(reverse("home"))
self.assertNotContains(response, "announcement")
def test_component_list(self):
clist = ComponentList.objects.create(name="TestCL", slug="testcl")
clist.components.add(self.component)
response = self.client.get(reverse("home"))
self.assertContains(response, "TestCL")
self.assertContains(
response, reverse("component-list", kwargs={"name": "testcl"})
)
self.assertEqual(len(response.context["componentlists"]), 1)
def test_component_list_ghost(self):
clist = ComponentList.objects.create(name="TestCL", slug="testcl")
clist.components.add(self.component)
self.user.profile.languages.add(Language.objects.get(code="es"))
response = self.client.get(reverse("home"))
self.assertContains(response, "Spanish")
def test_user_component_list(self):
clist = ComponentList.objects.create(name="TestCL", slug="testcl")
clist.components.add(self.component)
self.user.profile.dashboard_view = Profile.DASHBOARD_COMPONENT_LIST
self.user.profile.dashboard_component_list = clist
self.user.profile.save()
response = self.client.get(reverse("home"))
self.assertContains(response, "TestCL")
self.assertEqual(response.context["active_tab_slug"], "list-testcl")
def test_subscriptions(self):
# no subscribed projects at first
response = self.client.get(reverse("home"))
self.assertFalse(len(response.context["watched_projects"]))
# subscribe a project
self.user.profile.watched.add(self.project)
response = self.client.get(reverse("home"))
self.assertEqual(len(response.context["watched_projects"]), 1)
def test_language_filters(self):
# check language filters
response = self.client.get(reverse("home"))
self.assertFalse(response.context["usersubscriptions"])
# add a language
response = self.client.get(reverse("home"))
self.assertFalse(response.context["usersubscriptions"])
# add a subscription
self.user.profile.watched.add(self.project)
response = self.client.get(reverse("home"))
self.assertEqual(len(response.context["usersubscriptions"]), 1)
def test_user_nolang(self):
self.user.profile.languages.clear()
# This picks up random language
self.client.get(reverse("home"), HTTP_ACCEPT_LANGUAGE="en")
self.client.get(reverse("home"))
# Pick language from request
response = self.client.get(reverse("home"), HTTP_ACCEPT_LANGUAGE="cs")
self.assertTrue(response.context["suggestions"])
def test_user_hide_completed(self):
self.user.profile.hide_completed = True
self.user.profile.save()
response = self.client.get(reverse("home"))
self.assertContains(response, "test/test")
@override_settings(SINGLE_PROJECT=True)
def test_single_project(self):
response = self.client.get(reverse("home"))
self.assertRedirects(response, reverse("component", kwargs=self.kw_component))
@override_settings(SINGLE_PROJECT="test")
def test_single_project_slug(self):
response = self.client.get(reverse("home"))
self.assertRedirects(response, reverse("project", kwargs=self.kw_project))
@override_settings(SINGLE_PROJECT=True)
def test_single_project_restricted(self):
# Additional component to redirect to a project
self.create_link_existing()
# Make the project private
self.project.access_control = Project.ACCESS_PRIVATE
self.project.save()
self.client.logout()
response = self.client.get(reverse("home"))
self.assertRedirects(response, "/accounts/login/?next=/projects/test/")
|
from collections import deque
import json
import logging
import os
import tempfile
from typing import Any, Callable, Dict, List, Optional, Type, Union
from homeassistant.core import Event, State
from homeassistant.exceptions import HomeAssistantError
_LOGGER = logging.getLogger(__name__)
class SerializationError(HomeAssistantError):
"""Error serializing the data to JSON."""
class WriteError(HomeAssistantError):
"""Error writing the data."""
def load_json(
filename: str, default: Union[List, Dict, None] = None
) -> Union[List, Dict]:
"""Load JSON data from a file and return as dict or list.
Defaults to returning empty dict if file is not found.
"""
try:
with open(filename, encoding="utf-8") as fdesc:
return json.loads(fdesc.read()) # type: ignore
except FileNotFoundError:
# This is not a fatal error
_LOGGER.debug("JSON file not found: %s", filename)
except ValueError as error:
_LOGGER.exception("Could not parse JSON content: %s", filename)
raise HomeAssistantError(error) from error
except OSError as error:
_LOGGER.exception("JSON file reading failed: %s", filename)
raise HomeAssistantError(error) from error
return {} if default is None else default
def save_json(
filename: str,
data: Union[List, Dict],
private: bool = False,
*,
encoder: Optional[Type[json.JSONEncoder]] = None,
) -> None:
"""Save JSON data to a file.
Returns True on success.
"""
try:
json_data = json.dumps(data, indent=4, cls=encoder)
except TypeError as error:
msg = f"Failed to serialize to JSON: {filename}. Bad data at {format_unserializable_data(find_paths_unserializable_data(data))}"
_LOGGER.error(msg)
raise SerializationError(msg) from error
tmp_filename = ""
tmp_path = os.path.split(filename)[0]
try:
# Modern versions of Python tempfile create this file with mode 0o600
with tempfile.NamedTemporaryFile(
mode="w", encoding="utf-8", dir=tmp_path, delete=False
) as fdesc:
fdesc.write(json_data)
tmp_filename = fdesc.name
if not private:
os.chmod(tmp_filename, 0o644)
os.replace(tmp_filename, filename)
except OSError as error:
_LOGGER.exception("Saving JSON file failed: %s", filename)
raise WriteError(error) from error
finally:
if os.path.exists(tmp_filename):
try:
os.remove(tmp_filename)
except OSError as err:
# If we are cleaning up then something else went wrong, so
# we should suppress likely follow-on errors in the cleanup
_LOGGER.error("JSON replacement cleanup failed: %s", err)
def format_unserializable_data(data: Dict[str, Any]) -> str:
"""Format output of find_paths in a friendly way.
Format is comma separated: <path>=<value>(<type>)
"""
return ", ".join(f"{path}={value}({type(value)}" for path, value in data.items())
def find_paths_unserializable_data(
bad_data: Any, *, dump: Callable[[Any], str] = json.dumps
) -> Dict[str, Any]:
"""Find the paths to unserializable data.
This method is slow! Only use for error handling.
"""
to_process = deque([(bad_data, "$")])
invalid = {}
while to_process:
obj, obj_path = to_process.popleft()
try:
dump(obj)
continue
except (ValueError, TypeError):
pass
# We convert states and events to dict so we can find bad data inside it
if isinstance(obj, State):
obj_path += f"(state: {obj.entity_id})"
obj = obj.as_dict()
elif isinstance(obj, Event):
obj_path += f"(event: {obj.event_type})"
obj = obj.as_dict()
if isinstance(obj, dict):
for key, value in obj.items():
try:
# Is key valid?
dump({key: None})
except TypeError:
invalid[f"{obj_path}<key: {key}>"] = key
else:
# Process value
to_process.append((value, f"{obj_path}.{key}"))
elif isinstance(obj, list):
for idx, value in enumerate(obj):
to_process.append((value, f"{obj_path}[{idx}]"))
else:
invalid[obj_path] = obj
return invalid
|
from functools import partial
import numpy as np
from scipy import linalg
from .._ola import _COLA, _Storer
from ..io.pick import _picks_to_idx
from ..surface import _normalize_vectors
from ..utils import logger, verbose
def _svd_cov(cov, data):
"""Use a covariance matrix to compute the SVD faster."""
# This makes use of mathematical equivalences between PCA and SVD
# on zero-mean data
s, u = linalg.eigh(cov)
norm = np.ones((s.size,))
mask = s > np.finfo(float).eps * s[-1] # largest is last
s = np.sqrt(s, out=s)
norm[mask] = 1. / s[mask]
u *= norm
v = np.dot(u.T[mask], data)
return u, s, v
@verbose
def oversampled_temporal_projection(raw, duration=10., picks=None,
verbose=None):
"""Denoise MEG channels using leave-one-out temporal projection.
Parameters
----------
raw : instance of Raw
Raw data to denoise.
duration : float | str
The window duration (in seconds; default 10.) to use. Can also
be "min" to use as short a window as possible.
%(picks_all_data)s
%(verbose)s
Returns
-------
raw_clean : instance of Raw
The cleaned data.
Notes
-----
This algorithm is computationally expensive, and can be several times
slower than realtime for conventional M/EEG datasets. It uses a
leave-one-out procedure with parallel temporal projection to remove
individual sensor noise under the assumption that sampled fields
(e.g., MEG and EEG) are oversampled by the sensor array [1]_.
OTP can improve sensor noise levels (especially under visual
inspection) and repair some bad channels. This noise reduction is known
to interact with :func:`tSSS <mne.preprocessing.maxwell_filter>` such
that increasing the ``st_correlation`` value will likely be necessary.
Channels marked as bad will not be used to reconstruct good channels,
but good channels will be used to process the bad channels. Depending
on the type of noise present in the bad channels, this might make
them usable again.
Use of this algorithm is covered by a provisional patent.
.. versionadded:: 0.16
References
----------
.. [1] Larson E, Taulu S (2017). Reducing Sensor Noise in MEG and EEG
Recordings Using Oversampled Temporal Projection.
IEEE Transactions on Biomedical Engineering.
"""
logger.info('Processing MEG data using oversampled temporal projection')
picks = _picks_to_idx(raw.info, picks, exclude=())
picks_good, picks_bad = list(), list() # these are indices into picks
for ii, pi in enumerate(picks):
if raw.ch_names[pi] in raw.info['bads']:
picks_bad.append(ii)
else:
picks_good.append(ii)
picks_good = np.array(picks_good, int)
picks_bad = np.array(picks_bad, int)
n_samples = int(round(float(duration) * raw.info['sfreq']))
if n_samples < len(picks_good) - 1:
raise ValueError('duration (%s) yielded %s samples, which is fewer '
'than the number of channels -1 (%s)'
% (n_samples / raw.info['sfreq'], n_samples,
len(picks_good) - 1))
n_overlap = n_samples // 2
raw_otp = raw.copy().load_data(verbose=False)
otp = _COLA(
partial(_otp, picks_good=picks_good, picks_bad=picks_bad),
_Storer(raw_otp._data, picks=picks),
len(raw.times), n_samples, n_overlap, raw.info['sfreq'])
read_lims = list(range(0, len(raw.times), n_samples)) + [len(raw.times)]
for start, stop in zip(read_lims[:-1], read_lims[1:]):
logger.info(' Denoising % 8.2f - % 8.2f sec'
% tuple(raw.times[[start, stop - 1]]))
otp.feed(raw[picks, start:stop][0])
return raw_otp
def _otp(data, picks_good, picks_bad):
"""Perform OTP on one segment of data."""
if not np.isfinite(data).all():
raise RuntimeError('non-finite data (inf or nan) found in raw '
'instance')
# demean our data
data_means = np.mean(data, axis=-1, keepdims=True)
data -= data_means
# make a copy
data_good = data[picks_good]
# scale the copy that will be used to form the temporal basis vectors
# so that _orth_svdvals thresholding should work properly with
# different channel types (e.g., M-EEG)
norms = _normalize_vectors(data_good)
cov = np.dot(data_good, data_good.T)
if len(picks_bad) > 0:
full_basis = _svd_cov(cov, data_good)[2]
for mi, pick in enumerate(picks_good):
# operate on original data
idx = list(range(mi)) + list(range(mi + 1, len(data_good)))
# Equivalent: linalg.svd(data[idx], full_matrices=False)[2]
t_basis = _svd_cov(cov[np.ix_(idx, idx)], data_good[idx])[2]
x = np.dot(np.dot(data_good[mi], t_basis.T), t_basis)
x *= norms[mi]
x += data_means[pick]
data[pick] = x
for pick in picks_bad:
data[pick] = np.dot(np.dot(data[pick], full_basis.T), full_basis)
data[pick] += data_means[pick]
return [data]
|
try:
import boto3
from botocore import exceptions
from botocore.awsrequest import AWSRequest
from botocore.response import get_response
except ImportError:
boto3 = None
class _void:
pass
class BotoCoreError(Exception):
pass
exceptions = _void()
exceptions.BotoCoreError = BotoCoreError
AWSRequest = _void()
get_response = _void()
__all__ = (
'exceptions', 'AWSRequest', 'get_response'
)
|
import pytest
from lemur.endpoints.views import * # noqa
from lemur.tests.factories import EndpointFactory, CertificateFactory
from .vectors import (
VALID_ADMIN_API_TOKEN,
VALID_ADMIN_HEADER_TOKEN,
VALID_USER_HEADER_TOKEN,
)
def test_rotate_certificate(client, source_plugin):
from lemur.deployment.service import rotate_certificate
new_certificate = CertificateFactory()
endpoint = EndpointFactory()
rotate_certificate(endpoint, new_certificate)
assert endpoint.certificate == new_certificate
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 404),
(VALID_ADMIN_HEADER_TOKEN, 404),
(VALID_ADMIN_API_TOKEN, 404),
("", 401),
],
)
def test_endpoint_get(client, token, status):
assert (
client.get(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_post_(client, token, status):
assert (
client.post(
api.url_for(Endpoints, endpoint_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_put(client, token, status):
assert (
client.put(
api.url_for(Endpoints, endpoint_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_delete(client, token, status):
assert (
client.delete(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_patch(client, token, status):
assert (
client.patch(
api.url_for(Endpoints, endpoint_id=1), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_list_post_(client, token, status):
assert (
client.post(api.url_for(EndpointsList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
(VALID_ADMIN_API_TOKEN, 200),
("", 401),
],
)
def test_endpoint_list_get(client, token, status):
assert client.get(api.url_for(EndpointsList), headers=token).status_code == status
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_list_delete(client, token, status):
assert (
client.delete(api.url_for(EndpointsList), headers=token).status_code == status
)
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
(VALID_ADMIN_API_TOKEN, 405),
("", 405),
],
)
def test_endpoint_list_patch(client, token, status):
assert (
client.patch(api.url_for(EndpointsList), data={}, headers=token).status_code
== status
)
|
import abc
import logging
import re
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
from perfkitbenchmarker import resource
flags.DEFINE_string('nfs_tier', None, 'NFS Mode')
flags.DEFINE_string('nfs_version', None, 'NFS Version')
FLAGS = flags.FLAGS
_MOUNT_NFS_RE = re.compile(r'.*type nfs \((.*?)\)', re.MULTILINE)
UNMANAGED = 'Unmanaged'
def GetNfsServiceClass(cloud):
"""Get the NFS service corresponding to the cloud.
Args:
cloud: The name of the cloud to supply the NFS service.
Returns:
The NFS service class for this cloud.
Raises:
NotImplementedError: No service found for this cloud.
"""
return resource.GetResourceClass(BaseNfsService, CLOUD=cloud)
class BaseNfsService(resource.BaseResource):
"""Object representing an NFS Service."""
# subclasses must override this with a list or tuple for acceptable
# "nfs_tier" values if applicable.
NFS_TIERS = None
RESOURCE_TYPE = 'BaseNfsService'
DEFAULT_NFS_VERSION = None
DEFAULT_TIER = None
def __init__(self, disk_spec, zone):
super(BaseNfsService, self).__init__()
self.disk_spec = disk_spec
self.zone = zone
self.server_directory = '/'
self.nfs_tier = FLAGS.nfs_tier or self.DEFAULT_TIER
if self.nfs_tier and self.NFS_TIERS and self.nfs_tier not in self.NFS_TIERS:
# NFS service does not have to have a list of nfs_tiers nor does it have
# to be implemented by a provider
raise errors.Config.InvalidValue(
('nfs_tier "%s" not in acceptable list "%s" '
'for cloud %s') % (self.nfs_tier, self.NFS_TIERS, self.CLOUD))
logging.debug('%s NFS service with nfs_tier %s zone %s default version %s',
self.CLOUD, self.nfs_tier, self.zone,
self.DEFAULT_NFS_VERSION)
def CreateNfsDisk(self):
mount_point = '%s:%s' % (self.GetRemoteAddress(), self.server_directory)
return disk.NfsDisk(self.disk_spec, mount_point, self.DEFAULT_NFS_VERSION,
self.nfs_tier)
@abc.abstractmethod
def _IsReady(self):
"""Boolean function to determine if disk is NFS mountable."""
pass
@abc.abstractmethod
def GetRemoteAddress(self):
"""The NFS server's address."""
pass
class StaticNfsService(BaseNfsService):
"""Object allowing VMs to connect to a preprovisioned NFS endpoint."""
CLOUD = 'Static'
def __init__(self, disk_spec):
super(StaticNfsService, self).__init__(disk_spec, None)
self.ip_address = disk_spec.nfs_ip_address
self.server_directory = disk_spec.nfs_directory or '/'
def _Create(self):
pass
def _Delete(self):
pass
def CreateNfsDisk(self):
mount_point = '%s:/%s' % (self.GetRemoteAddress(), self.server_directory)
return disk.NfsDisk(self.disk_spec, mount_point, None, None)
def _IsReady(self):
"""Boolean function to determine if disk is NFS mountable."""
return True
def GetRemoteAddress(self):
"""The NFS server's address."""
return self.ip_address
class UnmanagedNfsService(BaseNfsService):
"""Object allowing VMs to connect to a local NFS disk."""
CLOUD = UNMANAGED
# Allows anybody to write to the NFS mount.
_EXPORT_FS_COMMAND = ' && '.join([
'sudo mkdir -p {export_dir}',
'sudo chown $USER:$USER {export_dir}',
'sudo chmod 777 {export_dir}',
'echo "{export_dir} *(rw,sync,no_subtree_check,no_root_squash)" | '
'sudo tee -a /etc/exports',
'sudo exportfs -a'
])
_NFS_NAME = {
os_types.RHEL: 'nfs-server',
os_types.DEBIAN: 'nfs-kernel-server',
}
_NFS_RESTART_CMD = 'sudo systemctl restart {nfs_name}'
def __init__(self, disk_spec, server_vm, check_export_not_same_mount=True):
super(UnmanagedNfsService, self).__init__(disk_spec, None)
self.server_vm = server_vm
# Path on the server to export. Must be different from mount_point.
self.server_directory = disk_spec.device_path or '/pkb-nfs-server-directory'
logging.info('Exporting server directory %s', self.server_directory)
if check_export_not_same_mount:
assert self.server_directory != disk_spec.mount_point, (
'export server directory must be different from mount point')
def GetRemoteAddress(self):
"""The NFS server's address."""
return self.server_vm.internal_ip
def _ExportNfsDir(self, export_dir_path):
"""Export a directory on the NFS server to be shared with NFS clients.
Args:
export_dir_path: Path to the directory to export.
"""
self.server_vm.RemoteCommand(
self._EXPORT_FS_COMMAND.format(export_dir=export_dir_path))
nfs_name = self._NFS_NAME[self.server_vm.BASE_OS_TYPE]
self.server_vm.RemoteCommand(
self._NFS_RESTART_CMD.format(nfs_name=nfs_name))
def _Create(self):
assert self.server_vm, 'NFS server VM not created.'
self.server_vm.Install('nfs_server')
self._ExportNfsDir(self.server_directory)
def _Delete(self):
pass
def _IsReady(self):
"""Boolean function to determine if disk is NFS mountable."""
return True
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from . import (
ATTR_DISCOVER_CONFIG,
ATTR_DISCOVER_DEVICES,
DATA_TELLSTICK,
DEFAULT_SIGNAL_REPETITIONS,
TellstickDevice,
)
SUPPORT_TELLSTICK = SUPPORT_BRIGHTNESS
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tellstick lights."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickLight(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
)
class TellstickLight(TellstickDevice, LightEntity):
"""Representation of a Tellstick light."""
def __init__(self, tellcore_device, signal_repetitions):
"""Initialize the Tellstick light."""
super().__init__(tellcore_device, signal_repetitions)
self._brightness = 255
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_TELLSTICK
def _parse_ha_data(self, kwargs):
"""Turn the value from HA into something useful."""
return kwargs.get(ATTR_BRIGHTNESS)
def _parse_tellcore_data(self, tellcore_data):
"""Turn the value received from tellcore into something useful."""
if tellcore_data:
return int(tellcore_data) # brightness
return None
def _update_model(self, new_state, data):
"""Update the device entity state to match the arguments."""
if new_state:
brightness = data
if brightness is not None:
self._brightness = brightness
# _brightness is not defined when called from super
try:
self._state = self._brightness > 0
except AttributeError:
self._state = True
else:
self._state = False
def _send_device_command(self, requested_state, requested_data):
"""Let tellcore update the actual device to the requested state."""
if requested_state:
if requested_data is not None:
self._brightness = int(requested_data)
self._tellcore_device.dim(self._brightness)
else:
self._tellcore_device.turn_off()
|
from Handler import Handler
import Queue
class QueueHandler(Handler):
def __init__(self, config=None, queue=None, log=None):
# Initialize Handler
Handler.__init__(self, config=config, log=log)
self.queue = queue
def __del__(self):
"""
Ensure as many of the metrics as possible are sent to the handers on
a shutdown
"""
self._flush()
def process(self, metric):
return self._process(metric)
def _process(self, metric):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
try:
self.queue.put(metric, block=False)
except Queue.Full:
self._throttle_error('Queue full, check handlers for delays')
def flush(self):
return self._flush()
def _flush(self):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
# Send a None down the queue to indicate a flush
try:
self.queue.put(None, block=False)
except Queue.Full:
self._throttle_error('Queue full, check handlers for delays')
|
from __future__ import division
import numpy as np
import six
from chainercv.visualizations.vis_image import vis_image
def vis_point(img, point, visible=None, ax=None):
"""Visualize points in an image.
Example:
>>> import chainercv
>>> import matplotlib.pyplot as plt
>>> dataset = chainercv.datasets.CUBKeypointDataset()
>>> img, point, visible = dataset[0]
>>> chainercv.visualizations.vis_point(img, point, visible)
>>> plt.show()
Args:
img (~numpy.ndarray): See the table below.
If this is :obj:`None`, no image is displayed.
point (~numpy.ndarray or list of arrays): See the table below.
visible (~numpy.ndarray or list of arrays): See the table below.
ax (matplotlib.axes.Axes): If provided, plot on this axis.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`point`, ":math:`(R, K, 2)` or :math:`[(K, 2)]`", \
:obj:`float32`, ":math:`(y, x)`"
:obj:`visible`, ":math:`(R, K)` or :math:`[(K,)]`", :obj:`bool`, --
Returns:
~matploblib.axes.Axes:
Returns the Axes object with the plot for further tweaking.
"""
import matplotlib.pyplot as plt
# Returns newly instantiated matplotlib.axes.Axes object if ax is None
ax = vis_image(img, ax=ax)
_, H, W = img.shape
n_inst = len(point)
cm = plt.get_cmap('gist_rainbow')
for i in range(n_inst):
pnt = point[i]
n_point = len(pnt)
if visible is not None:
vsbl = visible[i]
else:
vsbl = np.ones((n_point,), dtype=np.bool)
colors = [cm(k / n_point) for k in six.moves.range(n_point)]
for k in range(n_point):
if vsbl[k]:
ax.scatter(pnt[k][1], pnt[k][0], c=colors[k], s=100)
ax.set_xlim(left=0, right=W)
ax.set_ylim(bottom=H - 1, top=0)
return ax
|
from functools import reduce, wraps
import logging
from operator import ior
from typing import Sequence
from pyheos import HeosError, const as heos_const
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
ATTR_MEDIA_ENQUEUE,
DOMAIN,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_URL,
SUPPORT_CLEAR_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_SHUFFLE_SET,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_IDLE, STATE_PAUSED, STATE_PLAYING
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.dt import utcnow
from .const import DATA_SOURCE_MANAGER, DOMAIN as HEOS_DOMAIN, SIGNAL_HEOS_UPDATED
BASE_SUPPORTED_FEATURES = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
| SUPPORT_CLEAR_PLAYLIST
| SUPPORT_SHUFFLE_SET
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY_MEDIA
)
PLAY_STATE_TO_STATE = {
heos_const.PLAY_STATE_PLAY: STATE_PLAYING,
heos_const.PLAY_STATE_STOP: STATE_IDLE,
heos_const.PLAY_STATE_PAUSE: STATE_PAUSED,
}
CONTROL_TO_SUPPORT = {
heos_const.CONTROL_PLAY: SUPPORT_PLAY,
heos_const.CONTROL_PAUSE: SUPPORT_PAUSE,
heos_const.CONTROL_STOP: SUPPORT_STOP,
heos_const.CONTROL_PLAY_PREVIOUS: SUPPORT_PREVIOUS_TRACK,
heos_const.CONTROL_PLAY_NEXT: SUPPORT_NEXT_TRACK,
}
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
):
"""Add media players for a config entry."""
players = hass.data[HEOS_DOMAIN][DOMAIN]
devices = [HeosMediaPlayer(player) for player in players.values()]
async_add_entities(devices, True)
def log_command_error(command: str):
"""Return decorator that logs command failure."""
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
try:
await func(*args, **kwargs)
except (HeosError, ValueError) as ex:
_LOGGER.error("Unable to %s: %s", command, ex)
return wrapper
return decorator
class HeosMediaPlayer(MediaPlayerEntity):
"""The HEOS player."""
def __init__(self, player):
"""Initialize."""
self._media_position_updated_at = None
self._player = player
self._signals = []
self._supported_features = BASE_SUPPORTED_FEATURES
self._source_manager = None
async def _player_update(self, player_id, event):
"""Handle player attribute updated."""
if self._player.player_id != player_id:
return
if event == heos_const.EVENT_PLAYER_NOW_PLAYING_PROGRESS:
self._media_position_updated_at = utcnow()
await self.async_update_ha_state(True)
async def _heos_updated(self):
"""Handle sources changed."""
await self.async_update_ha_state(True)
async def async_added_to_hass(self):
"""Device added to hass."""
# Update state when attributes of the player change
self._signals.append(
self._player.heos.dispatcher.connect(
heos_const.SIGNAL_PLAYER_EVENT, self._player_update
)
)
# Update state when heos changes
self._signals.append(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_HEOS_UPDATED, self._heos_updated
)
)
@log_command_error("clear playlist")
async def async_clear_playlist(self):
"""Clear players playlist."""
await self._player.clear_queue()
@log_command_error("pause")
async def async_media_pause(self):
"""Send pause command."""
await self._player.pause()
@log_command_error("play")
async def async_media_play(self):
"""Send play command."""
await self._player.play()
@log_command_error("move to previous track")
async def async_media_previous_track(self):
"""Send previous track command."""
await self._player.play_previous()
@log_command_error("move to next track")
async def async_media_next_track(self):
"""Send next track command."""
await self._player.play_next()
@log_command_error("stop")
async def async_media_stop(self):
"""Send stop command."""
await self._player.stop()
@log_command_error("set mute")
async def async_mute_volume(self, mute):
"""Mute the volume."""
await self._player.set_mute(mute)
@log_command_error("play media")
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
if media_type in (MEDIA_TYPE_URL, MEDIA_TYPE_MUSIC):
await self._player.play_url(media_id)
return
if media_type == "quick_select":
# media_id may be an int or a str
selects = await self._player.get_quick_selects()
try:
index = int(media_id)
except ValueError:
# Try finding index by name
index = next(
(index for index, select in selects.items() if select == media_id),
None,
)
if index is None:
raise ValueError(f"Invalid quick select '{media_id}'")
await self._player.play_quick_select(index)
return
if media_type == MEDIA_TYPE_PLAYLIST:
playlists = await self._player.heos.get_playlists()
playlist = next((p for p in playlists if p.name == media_id), None)
if not playlist:
raise ValueError(f"Invalid playlist '{media_id}'")
add_queue_option = (
heos_const.ADD_QUEUE_ADD_TO_END
if kwargs.get(ATTR_MEDIA_ENQUEUE)
else heos_const.ADD_QUEUE_REPLACE_AND_PLAY
)
await self._player.add_to_queue(playlist, add_queue_option)
return
if media_type == "favorite":
# media_id may be an int or str
try:
index = int(media_id)
except ValueError:
# Try finding index by name
index = next(
(
index
for index, favorite in self._source_manager.favorites.items()
if favorite.name == media_id
),
None,
)
if index is None:
raise ValueError(f"Invalid favorite '{media_id}'")
await self._player.play_favorite(index)
return
raise ValueError(f"Unsupported media type '{media_type}'")
@log_command_error("select source")
async def async_select_source(self, source):
"""Select input source."""
await self._source_manager.play_source(source, self._player)
@log_command_error("set shuffle")
async def async_set_shuffle(self, shuffle):
"""Enable/disable shuffle mode."""
await self._player.set_play_mode(self._player.repeat, shuffle)
@log_command_error("set volume level")
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
await self._player.set_volume(int(volume * 100))
async def async_update(self):
"""Update supported features of the player."""
controls = self._player.now_playing_media.supported_controls
current_support = [CONTROL_TO_SUPPORT[control] for control in controls]
self._supported_features = reduce(ior, current_support, BASE_SUPPORTED_FEATURES)
if self._source_manager is None:
self._source_manager = self.hass.data[HEOS_DOMAIN][DATA_SOURCE_MANAGER]
async def async_will_remove_from_hass(self):
"""Disconnect the device when removed."""
for signal_remove in self._signals:
signal_remove()
self._signals.clear()
@property
def available(self) -> bool:
"""Return True if the device is available."""
return self._player.available
@property
def device_info(self) -> dict:
"""Get attributes about the device."""
return {
"identifiers": {(HEOS_DOMAIN, self._player.player_id)},
"name": self._player.name,
"model": self._player.model,
"manufacturer": "HEOS",
"sw_version": self._player.version,
}
@property
def device_state_attributes(self) -> dict:
"""Get additional attribute about the state."""
return {
"media_album_id": self._player.now_playing_media.album_id,
"media_queue_id": self._player.now_playing_media.queue_id,
"media_source_id": self._player.now_playing_media.source_id,
"media_station": self._player.now_playing_media.station,
"media_type": self._player.now_playing_media.type,
}
@property
def is_volume_muted(self) -> bool:
"""Boolean if volume is currently muted."""
return self._player.is_muted
@property
def media_album_name(self) -> str:
"""Album name of current playing media, music track only."""
return self._player.now_playing_media.album
@property
def media_artist(self) -> str:
"""Artist of current playing media, music track only."""
return self._player.now_playing_media.artist
@property
def media_content_id(self) -> str:
"""Content ID of current playing media."""
return self._player.now_playing_media.media_id
@property
def media_content_type(self) -> str:
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
duration = self._player.now_playing_media.duration
if isinstance(duration, int):
return duration / 1000
return None
@property
def media_position(self):
"""Position of current playing media in seconds."""
# Some media doesn't have duration but reports position, return None
if not self._player.now_playing_media.duration:
return None
return self._player.now_playing_media.current_position / 1000
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
# Some media doesn't have duration but reports position, return None
if not self._player.now_playing_media.duration:
return None
return self._media_position_updated_at
@property
def media_image_remotely_accessible(self) -> bool:
"""If the image url is remotely accessible."""
return True
@property
def media_image_url(self) -> str:
"""Image url of current playing media."""
# May be an empty string, if so, return None
image_url = self._player.now_playing_media.image_url
return image_url if image_url else None
@property
def media_title(self) -> str:
"""Title of current playing media."""
return self._player.now_playing_media.song
@property
def name(self) -> str:
"""Return the name of the device."""
return self._player.name
@property
def should_poll(self) -> bool:
"""No polling needed for this device."""
return False
@property
def shuffle(self) -> bool:
"""Boolean if shuffle is enabled."""
return self._player.shuffle
@property
def source(self) -> str:
"""Name of the current input source."""
return self._source_manager.get_current_source(self._player.now_playing_media)
@property
def source_list(self) -> Sequence[str]:
"""List of available input sources."""
return self._source_manager.source_list
@property
def state(self) -> str:
"""State of the player."""
return PLAY_STATE_TO_STATE[self._player.state]
@property
def supported_features(self) -> int:
"""Flag media player features that are supported."""
return self._supported_features
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return str(self._player.player_id)
@property
def volume_level(self) -> float:
"""Volume level of the media player (0..1)."""
return self._player.volume / 100
|
import asyncio
import threading
##
def append_current_loop(container, make_new_loop=False):
if make_new_loop:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
container.append(asyncio.get_event_loop())
except Exception as err:
container.append(str(err))
def test_asyncio_thread1():
# Tests that asyncio.get_event_loop() returns a different loop instance
# for each thread.
r = []
r.append(asyncio.get_event_loop())
t = threading.Thread(target=append_current_loop, args=(r, False))
t.start()
t.join()
t = threading.Thread(target=append_current_loop, args=(r, True))
t.start()
t.join()
r.append(asyncio.get_event_loop())
assert len(r) == 4
assert isinstance(r[1], str) and 'no current event loop in thread' in r[1]
assert r[0] is not r[2]
assert r[0] is r[3]
return r
##
def make_new_loop_and_run():
loop = asyncio.new_event_loop()
loop.call_later(0.2, lambda: print('greeting 1 from thread', threading.current_thread().getName()))
loop.call_later(0.7, lambda: print('greeting 2 from thread', threading.current_thread().getName()))
loop.call_later(0.9, loop.stop)
loop.run_forever()
def test_asyncio_thread2():
# Run multiple loops in multiple threads at the same time.
loop = asyncio.get_event_loop()
assert not loop.is_running()
tt = []
for i in range(5):
t = threading.Thread(target=make_new_loop_and_run)
tt.append(t)
for t in tt:
t.start()
make_new_loop_and_run()
for t in tt:
t.join()
if __name__ == '__main__':
r = test_asyncio_thread1()
|
import os
import unittest
from absl import flags
import mock
from perfkitbenchmarker import flag_util
from perfkitbenchmarker.linux_benchmarks import stencil2d_benchmark
flags.FLAGS.mark_as_parsed()
class Stencil2DBenchmarkTestCase(unittest.TestCase):
def setUp(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'stencil2d_output.txt')
with open(path) as fp:
self.test_output = fp.read()
def testMakeSampleFromOutput(self):
testMetadata = {'foo': 'bar'}
actual = stencil2d_benchmark._MakeSamplesFromStencilOutput(
self.test_output, testMetadata)
results_dict = {x.metric: x for x in actual}
stencil_dp_median_results = results_dict['Stencil2D DP median']
self.assertEqual('Stencil2D DP median', stencil_dp_median_results.metric)
self.assertEqual(474.761, stencil_dp_median_results.value)
self.assertEqual('GFLOPS', stencil_dp_median_results.unit)
self.assertEqual(testMetadata, stencil_dp_median_results.metadata)
stencil_sp_median_results = results_dict['Stencil2D SP median']
self.assertEqual('Stencil2D SP median', stencil_sp_median_results.metric)
self.assertEqual(753.795, stencil_sp_median_results.value)
self.assertEqual('GFLOPS', stencil_sp_median_results.unit)
self.assertEqual(testMetadata, stencil_sp_median_results.metadata)
stencil_dp_stddev_results = results_dict['Stencil2D DP stddev']
self.assertEqual('Stencil2D DP stddev', stencil_dp_stddev_results.metric)
self.assertEqual(2.51807, stencil_dp_stddev_results.value)
self.assertEqual('GFLOPS', stencil_dp_stddev_results.unit)
self.assertEqual(testMetadata, stencil_dp_stddev_results.metadata)
stencil_sp_stddev_results = results_dict['Stencil2D SP stddev']
self.assertEqual('Stencil2D SP stddev', stencil_sp_stddev_results.metric)
self.assertEqual(9.13922, stencil_sp_stddev_results.value)
self.assertEqual('GFLOPS', stencil_sp_stddev_results.unit)
self.assertEqual(testMetadata, stencil_sp_stddev_results.metadata)
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.GetPeerToPeerTopology'))
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.GetGpuType'))
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.GetDriverVersion'))
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.QueryNumberOfGpus'))
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.QueryGpuClockSpeed'))
@mock.patch(('perfkitbenchmarker.linux_packages.'
'nvidia_driver.QueryAutoboostPolicy'))
@mock.patch(('perfkitbenchmarker.linux_benchmarks.'
'stencil2d_benchmark._RunSingleIteration'))
def testRun(self,
run_single_iteration_mock,
query_autoboost_policy_mock,
query_gpu_clock_speed_mock,
query_number_of_gpus_mock,
get_driver_version_mock,
get_gpu_type,
get_peer_to_peer_topology):
get_gpu_type.return_value = 'k80'
get_driver_version_mock.return_value = '123.45'
query_number_of_gpus_mock.return_value = 8
query_gpu_clock_speed_mock.return_value = [100, 200]
query_autoboost_policy_mock.return_value = {
'autoboost': True,
'autoboost_default': True,
}
benchmark_spec = mock.MagicMock()
problem_sizes = [2, 3, 4]
stencil2d_benchmark.FLAGS.stencil2d_problem_sizes = (
flag_util.IntegerList(problem_sizes))
expected_calls = [
mock.call(mock.ANY, size, mock.ANY, mock.ANY, mock.ANY)
for size in problem_sizes
]
stencil2d_benchmark.Run(benchmark_spec)
run_single_iteration_mock.assert_has_calls(expected_calls, any_order=True)
if __name__ == '__main__':
unittest.main()
|
try:
import json
except ImportError:
import simplejson as json
import subprocess
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import patch, call
from diamond.collector import Collector
import ceph
def run_only_if_assertSequenceEqual_is_available(func):
pred = lambda: 'assertSequenceEqual' in dir(unittest.TestCase)
return run_only(func, pred)
def run_only_if_subprocess_check_output_is_available(func):
pred = lambda: 'check_output' in dir(subprocess)
return run_only(func, pred)
class TestCounterIterator(unittest.TestCase):
@run_only_if_assertSequenceEqual_is_available
def test_empty(self):
data = {}
expected = []
actual = list(ceph.flatten_dictionary(data))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_simple(self):
data = {'a': 1, 'b': 2}
expected = [('a', 1), ('b', 2)]
actual = list(ceph.flatten_dictionary(data))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_prefix(self):
data = {'a': 1, 'b': 2}
expected = [('Z.a', 1), ('Z.b', 2)]
actual = list(ceph.flatten_dictionary(data, prefix='Z'))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_sep(self):
data = {'a': 1, 'b': 2}
expected = [('Z:a', 1), ('Z:b', 2)]
actual = list(ceph.flatten_dictionary(data, prefix='Z', sep=':'))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_nested(self):
data = {'a': 1, 'b': 2, 'c': {'d': 3}}
expected = [('a', 1), ('b', 2), ('c.d', 3)]
actual = list(ceph.flatten_dictionary(data))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_doubly_nested(self):
data = {'a': 1, 'b': 2, 'c': {'d': 3}, 'e': {'f': {'g': 1}}}
expected = [('a', 1), ('b', 2), ('c.d', 3), ('e.f.g', 1)]
actual = list(ceph.flatten_dictionary(data))
self.assertSequenceEqual(actual, expected)
@run_only_if_assertSequenceEqual_is_available
def test_complex(self):
data = {"val": 0,
"max": 524288000,
"get": 60910,
"wait": {"avgcount": 0,
"sum": 0},
}
expected = [
('get', 60910),
('max', 524288000),
('val', 0),
('wait.avgcount', 0),
('wait.sum', 0),
]
actual = list(ceph.flatten_dictionary(data))
self.assertSequenceEqual(actual, expected)
class TestCephCollectorSocketNameHandling(CollectorTestCase):
def setUp(self):
config = get_collector_config('CephCollector', {
'interval': 10,
})
self.collector = ceph.CephCollector(config, None)
def test_counter_default_prefix(self):
expected = 'ceph.osd.325'
sock = '/var/run/ceph/ceph-osd.325.asok'
actual = self.collector._get_counter_prefix_from_socket_name(sock)
self.assertEquals(actual, expected)
def test_counter_alternate_prefix(self):
expected = 'ceph.keep-osd.325'
sock = '/var/run/ceph/keep-osd.325.asok'
actual = self.collector._get_counter_prefix_from_socket_name(sock)
self.assertEquals(actual, expected)
@patch('glob.glob')
def test_get_socket_paths(self, glob_mock):
config = get_collector_config('CephCollector', {
'socket_path': '/path/',
'socket_prefix': 'prefix-',
'socket_ext': 'ext',
})
collector = ceph.CephCollector(config, None)
collector._get_socket_paths()
glob_mock.assert_called_with('/path/prefix-*.ext')
class TestCephCollectorGettingStats(CollectorTestCase):
def setUp(self):
config = get_collector_config('CephCollector', {
'interval': 10,
})
self.collector = ceph.CephCollector(config, None)
def test_import(self):
self.assertTrue(ceph.CephCollector)
@run_only_if_subprocess_check_output_is_available
@patch('subprocess.check_output')
def test_load_works(self, check_output):
expected = {'a': 1,
'b': 2,
}
check_output.return_value = json.dumps(expected)
actual = self.collector._get_stats_from_socket('a_socket_name')
check_output.assert_called_with(['/usr/bin/ceph',
'--admin-daemon',
'a_socket_name',
'perf',
'dump',
])
self.assertEqual(actual, expected)
@run_only_if_subprocess_check_output_is_available
@patch('subprocess.check_output')
def test_ceph_command_fails(self, check_output):
check_output.side_effect = subprocess.CalledProcessError(
255, ['/usr/bin/ceph'], 'error!',
)
actual = self.collector._get_stats_from_socket('a_socket_name')
check_output.assert_called_with(['/usr/bin/ceph',
'--admin-daemon',
'a_socket_name',
'perf',
'dump',
])
self.assertEqual(actual, {})
@run_only_if_subprocess_check_output_is_available
@patch('json.loads')
@patch('subprocess.check_output')
def test_json_decode_fails(self, check_output, loads):
input = {'a': 1,
'b': 2,
}
check_output.return_value = json.dumps(input)
loads.side_effect = ValueError('bad data')
actual = self.collector._get_stats_from_socket('a_socket_name')
check_output.assert_called_with(['/usr/bin/ceph',
'--admin-daemon',
'a_socket_name',
'perf',
'dump',
])
loads.assert_called_with(json.dumps(input))
self.assertEqual(actual, {})
class TestCephCollectorPublish(CollectorTestCase):
def setUp(self):
config = get_collector_config('CephCollector', {
'interval': 10,
})
self.collector = ceph.CephCollector(config, None)
@patch.object(Collector, 'publish')
def test_simple(self, publish_mock):
self.collector._publish_stats('prefix', {'a': 1})
publish_mock.assert_called_with('prefix.a', 1,
metric_type='GAUGE', instance=None,
precision=0)
@patch.object(Collector, 'publish')
def test_multiple(self, publish_mock):
self.collector._publish_stats('prefix', {'a': 1, 'b': 2})
publish_mock.assert_has_calls([call('prefix.a', 1,
metric_type='GAUGE', instance=None,
precision=0),
call('prefix.b', 2,
metric_type='GAUGE', instance=None,
precision=0),
])
if __name__ == "__main__":
unittest.main()
|
from datetime import timedelta
from pysochain import ChainSo
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_ADDRESS, CONF_NAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTRIBUTION = "Data provided by chain.so"
CONF_NETWORK = "network"
DEFAULT_NAME = "Crypto Balance"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Required(CONF_NETWORK): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the sochain sensors."""
address = config.get(CONF_ADDRESS)
network = config.get(CONF_NETWORK)
name = config.get(CONF_NAME)
session = async_get_clientsession(hass)
chainso = ChainSo(network, address, hass.loop, session)
async_add_entities([SochainSensor(name, network.upper(), chainso)], True)
class SochainSensor(Entity):
"""Representation of a Sochain sensor."""
def __init__(self, name, unit_of_measurement, chainso):
"""Initialize the sensor."""
self._name = name
self._unit_of_measurement = unit_of_measurement
self.chainso = chainso
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return (
self.chainso.data.get("confirmed_balance")
if self.chainso is not None
else None
)
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
async def async_update(self):
"""Get the latest state of the sensor."""
await self.chainso.async_get_data()
|
from urllib.parse import urljoin
import aiohttp
from . import exceptions
from . import mesos_file
from . import util
from paasta_tools.async_utils import async_ttl_cache
from paasta_tools.utils import get_user_agent
class MesosSlave:
def __init__(self, config, items):
self.config = config
self.__items = items
def __getitem__(self, name):
return self.__items[name]
def __str__(self):
return self.key()
def key(self):
return self["pid"].split("@")[-1]
@property
def host(self):
return "{}://{}:{}".format(
self.config["scheme"], self["hostname"], self["pid"].split(":")[-1]
)
async def fetch(self, url, **kwargs) -> aiohttp.ClientResponse:
headers = {"User-Agent": get_user_agent()}
async with aiohttp.ClientSession(
conn_timeout=self.config["response_timeout"],
read_timeout=self.config["response_timeout"],
) as session:
try:
async with session.get(
urljoin(self.host, url), headers=headers, **kwargs
) as response:
await response.text()
return response
except aiohttp.ClientConnectionError:
raise exceptions.SlaveDoesNotExist(
f"Unable to connect to the slave at {self.host}"
)
@async_ttl_cache(ttl=5, cleanup_self=True)
async def state(self):
return await (await self.fetch("/slave(1)/state.json")).json()
async def frameworks(self):
return util.merge(await self.state(), "frameworks", "completed_frameworks")
async def task_executor(self, task_id):
for fw in await self.frameworks():
for exc in util.merge(fw, "executors", "completed_executors"):
if task_id in list(
map(
lambda x: x["id"],
util.merge(exc, "completed_tasks", "tasks", "queued_tasks"),
)
):
return exc
raise exceptions.MissingExecutor("No executor has a task by that id")
async def file_list(self, path):
# The sandbox does not exist on the slave.
if path == "":
return []
resp = self.fetch("/files/browse.json", params={"path": path})
if resp.status_code == 404:
return []
return await resp.json()
def file(self, task, path):
return mesos_file.File(self, task, path)
@async_ttl_cache(ttl=30, cleanup_self=True)
async def stats(self):
return await (await self.fetch("/monitor/statistics.json")).json()
def executor_stats(self, _id):
return list(filter(lambda x: x["executor_id"]))
async def task_stats(self, _id):
stats = list(filter(lambda x: x["executor_id"] == _id, await self.stats()))
# Tasks that are not yet in a RUNNING state have no stats.
if len(stats) == 0:
return {}
else:
return stats[0]["statistics"]
@property # type: ignore
@util.memoize
def log(self):
return mesos_file.File(self, path="/slave/log")
|
import os
from unittest import mock
import pytest
from nikola.metadata_extractors import (
MetaCondition,
check_conditions,
default_metadata_extractors_by,
load_defaults,
)
from nikola.plugins.compile.rest import CompileRest
from nikola.plugins.compile.markdown import CompileMarkdown
from nikola.plugins.compile.ipynb import CompileIPynb
from nikola.plugins.compile.html import CompileHtml
from nikola.post import get_meta
from .helper import FakeSite
@pytest.mark.parametrize(
"filecount, expected, unexpected",
[(1, "onefile", "twofile"), (2, "twofile", "onefile")],
)
@pytest.mark.parametrize(
"format_lc, format_friendly",
[("nikola", "Nikola"), ("toml", "TOML"), ("yaml", "YAML")],
)
def test_builtin_extractors_rest(
metadata_extractors_by,
testfiledir,
filecount,
expected,
unexpected,
format_lc,
format_friendly,
):
is_two_files = filecount == 2
source_filename = "f-rest-{0}-{1}.rst".format(filecount, format_lc)
metadata_filename = "f-rest-{0}-{1}.meta".format(filecount, format_lc)
source_path = os.path.join(testfiledir, source_filename)
metadata_path = os.path.join(testfiledir, metadata_filename)
post = FakePost(source_path, metadata_path, {}, None, metadata_extractors_by)
assert os.path.exists(source_path)
if is_two_files:
assert os.path.exists(metadata_path)
meta, extractor = get_meta(post, None)
assert meta
assert extractor is metadata_extractors_by["name"][format_lc]
assert meta["title"] == "T: reST, {0}, {1}".format(filecount, format_friendly)
assert meta["slug"] == "s-rest-{0}-{1}".format(filecount, format_lc)
assert expected in meta["tags"]
assert unexpected not in meta["tags"]
assert "meta" in meta["tags"]
assert format_friendly in meta["tags"]
assert "reST" in meta["tags"]
assert meta["date"] == "2017-07-01 00:00:00 UTC"
@pytest.mark.parametrize(
"filecount, expected, unexpected",
[(1, "onefile", "twofile"), (2, "twofile", "onefile")],
)
def test_nikola_meta_markdown(
metadata_extractors_by, testfiledir, filecount, expected, unexpected
):
is_two_files = filecount == 2
source_filename = "f-markdown-{0}-nikola.md".format(filecount)
metadata_filename = "f-markdown-{0}-nikola.meta".format(filecount)
source_path = os.path.join(testfiledir, source_filename)
metadata_path = os.path.join(testfiledir, metadata_filename)
post = FakePost(source_path, metadata_path, {}, None, metadata_extractors_by)
assert os.path.exists(source_path)
if is_two_files:
assert os.path.exists(metadata_path)
meta, extractor = get_meta(post, None)
assert extractor is metadata_extractors_by["name"]["nikola"]
assert meta["title"] == "T: Markdown, {0}, Nikola".format(filecount)
assert meta["slug"] == "s-markdown-{0}-nikola".format(filecount)
assert expected in meta["tags"]
assert unexpected not in meta["tags"]
assert "meta" in meta["tags"]
assert "Nikola" in meta["tags"]
assert "Markdown" in meta["tags"]
assert meta["date"] == "2017-07-01 00:00:00 UTC"
@pytest.mark.parametrize(
"compiler, fileextension, compiler_lc, name",
[
(CompileRest, "rst", "rest", "reST"),
(CompileMarkdown, "md", "markdown", "Markdown"),
(CompileIPynb, "ipynb", "ipynb", "Jupyter Notebook"),
(CompileHtml, "html", "html", "HTML"),
],
)
def test_compiler_metadata(
metadata_extractors_by, testfiledir, compiler, fileextension, compiler_lc, name
):
source_filename = "f-{0}-1-compiler.{1}".format(compiler_lc, fileextension)
metadata_filename = "f-{0}-1-compiler.meta".format(compiler_lc)
title = "T: {0}, 1, compiler".format(name)
slug = "s-{0}-1-compiler".format(compiler_lc)
source_path = os.path.join(testfiledir, source_filename)
metadata_path = os.path.join(testfiledir, metadata_filename)
config = {
"USE_REST_DOCINFO_METADATA": True,
"MARKDOWN_EXTENSIONS": ["markdown.extensions.meta"],
}
site = FakeSite()
site.config.update(config)
compiler_obj = compiler()
compiler_obj.set_site(site)
post = FakePost(
source_path, metadata_path, config, compiler_obj, metadata_extractors_by
)
class FakeBorg:
current_lang = "en"
def __call__(self):
return self
with mock.patch("nikola.plugins.compile." + compiler_lc + ".LocaleBorg", FakeBorg):
meta, extractor = get_meta(post, None)
assert meta["title"] == title
assert meta["slug"] == slug
assert "meta" in meta["tags"]
assert "onefile" in meta["tags"]
assert "compiler" in meta["tags"]
assert name in meta["tags"]
assert meta["date"] == "2017-07-01 00:00:00 UTC"
def test_yaml_none_handling(metadata_extractors_by):
yaml_extractor = metadata_extractors_by["name"]["yaml"]
meta = yaml_extractor.extract_text("---\ntitle: foo\nslug: null")
assert meta["title"] == "foo"
assert meta["slug"] == ""
@pytest.mark.parametrize(
"conditions",
[
[(MetaCondition.config_bool, "baz"), (MetaCondition.config_present, "quux")],
pytest.param(
[(MetaCondition.config_bool, "quux")], marks=pytest.mark.xfail(strict=True)
),
pytest.param(
[(MetaCondition.config_present, "foobar")],
marks=pytest.mark.xfail(strict=True),
),
[(MetaCondition.extension, "bar")],
pytest.param(
[(MetaCondition.extension, "baz")], marks=pytest.mark.xfail(strict=True)
),
[(MetaCondition.compiler, "foo")],
pytest.param(
[(MetaCondition.compiler, "foobar")], marks=pytest.mark.xfail(strict=True)
),
pytest.param(
[(MetaCondition.never, None), (MetaCondition.config_present, "bar")],
marks=pytest.mark.xfail(strict=True),
),
],
)
def test_check_conditions(conditions, dummy_post):
filename = "foo.bar"
config = {"baz": True, "quux": False}
assert check_conditions(dummy_post, filename, conditions, config, "")
class FakePost:
def __init__(
self, source_path, metadata_path, config, compiler, metadata_extractors_by
):
self.source_path = source_path
self.metadata_path = metadata_path
self.is_two_file = True
self.config = {"TRANSLATIONS": {"en": "./"}, "DEFAULT_LANG": "en"}
self.config.update(config)
self.default_lang = self.config["DEFAULT_LANG"]
self.metadata_extractors_by = metadata_extractors_by
if compiler:
self.compiler = compiler
def translated_source_path(self, _):
return self.source_path
@pytest.fixture
def metadata_extractors_by():
metadata_extractors = default_metadata_extractors_by()
load_defaults(None, metadata_extractors)
return metadata_extractors
@pytest.fixture(scope="module")
def testfiledir(test_dir):
return os.path.join(test_dir, "data", "metadata_extractors")
@pytest.fixture(scope="module")
def dummy_post():
class DummyCompiler:
name = "foo"
class DummyPost:
compiler = DummyCompiler()
return DummyPost()
|
from homeassistant.components.air_quality import AirQualityEntity
from homeassistant.const import CONF_DEVICE_ID, CONF_NAME
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_AQI_LEVEL,
ATTR_AQI_POLLUTANT,
ATTR_VOC,
DISPATCHER_KAITERRA,
DOMAIN,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the air_quality kaiterra sensor."""
if discovery_info is None:
return
api = hass.data[DOMAIN]
name = discovery_info[CONF_NAME]
device_id = discovery_info[CONF_DEVICE_ID]
async_add_entities([KaiterraAirQuality(api, name, device_id)])
class KaiterraAirQuality(AirQualityEntity):
"""Implementation of a Kaittera air quality sensor."""
def __init__(self, api, name, device_id):
"""Initialize the sensor."""
self._api = api
self._name = f"{name} Air Quality"
self._device_id = device_id
def _data(self, key):
return self._device.get(key, {}).get("value")
@property
def _device(self):
return self._api.data.get(self._device_id, {})
@property
def should_poll(self):
"""Return that the sensor should not be polled."""
return False
@property
def available(self):
"""Return the availability of the sensor."""
return self._api.data.get(self._device_id) is not None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def air_quality_index(self):
"""Return the Air Quality Index (AQI)."""
return self._data("aqi")
@property
def air_quality_index_level(self):
"""Return the Air Quality Index level."""
return self._data("aqi_level")
@property
def air_quality_index_pollutant(self):
"""Return the Air Quality Index level."""
return self._data("aqi_pollutant")
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self._data("rpm25c")
@property
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self._data("rpm10c")
@property
def carbon_dioxide(self):
"""Return the CO2 (carbon dioxide) level."""
return self._data("rco2")
@property
def volatile_organic_compounds(self):
"""Return the VOC (Volatile Organic Compounds) level."""
return self._data("rtvoc")
@property
def unique_id(self):
"""Return the sensor's unique id."""
return f"{self._device_id}_air_quality"
@property
def device_state_attributes(self):
"""Return the device state attributes."""
data = {}
attributes = [
(ATTR_VOC, self.volatile_organic_compounds),
(ATTR_AQI_LEVEL, self.air_quality_index_level),
(ATTR_AQI_POLLUTANT, self.air_quality_index_pollutant),
]
for attr, value in attributes:
if value is not None:
data[attr] = value
return data
async def async_added_to_hass(self):
"""Register callback."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, DISPATCHER_KAITERRA, self.async_write_ha_state
)
)
|
import logging
from ecoaliface.simple import ECoalController
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_PASSWORD,
CONF_SENSORS,
CONF_SWITCHES,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
DOMAIN = "ecoal_boiler"
DATA_ECOAL_BOILER = f"data_{DOMAIN}"
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "admin"
# Available pump ids with assigned HA names
# Available as switches
AVAILABLE_PUMPS = {
"central_heating_pump": "Central heating pump",
"central_heating_pump2": "Central heating pump2",
"domestic_hot_water_pump": "Domestic hot water pump",
}
# Available temp sensor ids with assigned HA names
# Available as sensors
AVAILABLE_SENSORS = {
"outdoor_temp": "Outdoor temperature",
"indoor_temp": "Indoor temperature",
"indoor2_temp": "Indoor temperature 2",
"domestic_hot_water_temp": "Domestic hot water temperature",
"target_domestic_hot_water_temp": "Target hot water temperature",
"feedwater_in_temp": "Feedwater input temperature",
"feedwater_out_temp": "Feedwater output temperature",
"target_feedwater_temp": "Target feedwater temperature",
"fuel_feeder_temp": "Fuel feeder temperature",
"exhaust_temp": "Exhaust temperature",
}
SWITCH_SCHEMA = vol.Schema(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(AVAILABLE_PUMPS)): vol.All(
cv.ensure_list, [vol.In(AVAILABLE_PUMPS)]
)
}
)
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(AVAILABLE_SENSORS)
): vol.All(cv.ensure_list, [vol.In(AVAILABLE_SENSORS)])
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA,
vol.Optional(CONF_SWITCHES, default={}): SWITCH_SCHEMA,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, hass_config):
"""Set up global ECoalController instance same for sensors and switches."""
conf = hass_config[DOMAIN]
host = conf[CONF_HOST]
username = conf[CONF_USERNAME]
passwd = conf[CONF_PASSWORD]
# Creating ECoalController instance makes HTTP request to controller.
ecoal_contr = ECoalController(host, username, passwd)
if ecoal_contr.version is None:
# Wrong credentials nor network config
_LOGGER.error(
"Unable to read controller status from %s@%s (wrong host/credentials)",
username,
host,
)
return False
_LOGGER.debug("Detected controller version: %r @%s", ecoal_contr.version, host)
hass.data[DATA_ECOAL_BOILER] = ecoal_contr
# Setup switches
switches = conf[CONF_SWITCHES][CONF_MONITORED_CONDITIONS]
load_platform(hass, "switch", DOMAIN, switches, hass_config)
# Setup temp sensors
sensors = conf[CONF_SENSORS][CONF_MONITORED_CONDITIONS]
load_platform(hass, "sensor", DOMAIN, sensors, hass_config)
return True
|
import asyncio
import logging
from typing import List, Optional, Set, Tuple
# pylint: disable=import-error
import bluetooth
from bt_proximity import BluetoothRSSI
import voluptuous as vol
from homeassistant.components.device_tracker import PLATFORM_SCHEMA
from homeassistant.components.device_tracker.const import (
CONF_SCAN_INTERVAL,
CONF_TRACK_NEW,
DEFAULT_TRACK_NEW,
SCAN_INTERVAL,
SOURCE_TYPE_BLUETOOTH,
)
from homeassistant.components.device_tracker.legacy import (
YAML_DEVICES,
async_load_config,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN, SERVICE_UPDATE
_LOGGER = logging.getLogger(__name__)
BT_PREFIX = "BT_"
CONF_REQUEST_RSSI = "request_rssi"
CONF_DEVICE_ID = "device_id"
DEFAULT_DEVICE_ID = -1
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TRACK_NEW): cv.boolean,
vol.Optional(CONF_REQUEST_RSSI): cv.boolean,
vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All(
vol.Coerce(int), vol.Range(min=-1)
),
}
)
def is_bluetooth_device(device) -> bool:
"""Check whether a device is a bluetooth device by its mac."""
return device.mac and device.mac[:3].upper() == BT_PREFIX
def discover_devices(device_id: int) -> List[Tuple[str, str]]:
"""Discover Bluetooth devices."""
result = bluetooth.discover_devices(
duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False,
device_id=device_id,
)
_LOGGER.debug("Bluetooth devices discovered = %d", len(result))
return result
async def see_device(
hass: HomeAssistantType, async_see, mac: str, device_name: str, rssi=None
) -> None:
"""Mark a device as seen."""
attributes = {}
if rssi is not None:
attributes["rssi"] = rssi
await async_see(
mac=f"{BT_PREFIX}{mac}",
host_name=device_name,
attributes=attributes,
source_type=SOURCE_TYPE_BLUETOOTH,
)
async def get_tracking_devices(hass: HomeAssistantType) -> Tuple[Set[str], Set[str]]:
"""
Load all known devices.
We just need the devices so set consider_home and home range to 0
"""
yaml_path: str = hass.config.path(YAML_DEVICES)
devices = await async_load_config(yaml_path, hass, 0)
bluetooth_devices = [device for device in devices if is_bluetooth_device(device)]
devices_to_track: Set[str] = {
device.mac[3:] for device in bluetooth_devices if device.track
}
devices_to_not_track: Set[str] = {
device.mac[3:] for device in bluetooth_devices if not device.track
}
return devices_to_track, devices_to_not_track
def lookup_name(mac: str) -> Optional[str]:
"""Lookup a Bluetooth device name."""
_LOGGER.debug("Scanning %s", mac)
return bluetooth.lookup_name(mac, timeout=5)
async def async_setup_scanner(
hass: HomeAssistantType, config: dict, async_see, discovery_info=None
):
"""Set up the Bluetooth Scanner."""
device_id: int = config[CONF_DEVICE_ID]
interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
request_rssi = config.get(CONF_REQUEST_RSSI, False)
update_bluetooth_lock = asyncio.Lock()
# If track new devices is true discover new devices on startup.
track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
_LOGGER.debug("Tracking new devices is set to %s", track_new)
devices_to_track, devices_to_not_track = await get_tracking_devices(hass)
if not devices_to_track and not track_new:
_LOGGER.debug("No Bluetooth devices to track and not tracking new devices")
if request_rssi:
_LOGGER.debug("Detecting RSSI for devices")
async def perform_bluetooth_update():
"""Discover Bluetooth devices and update status."""
_LOGGER.debug("Performing Bluetooth devices discovery and update")
tasks = []
try:
if track_new:
devices = await hass.async_add_executor_job(discover_devices, device_id)
for mac, device_name in devices:
if mac not in devices_to_track and mac not in devices_to_not_track:
devices_to_track.add(mac)
for mac in devices_to_track:
device_name = await hass.async_add_executor_job(lookup_name, mac)
if device_name is None:
# Could not lookup device name
continue
rssi = None
if request_rssi:
client = BluetoothRSSI(mac)
rssi = await hass.async_add_executor_job(client.request_rssi)
client.close()
tasks.append(see_device(hass, async_see, mac, device_name, rssi))
if tasks:
await asyncio.wait(tasks)
except bluetooth.BluetoothError:
_LOGGER.exception("Error looking up Bluetooth device")
async def update_bluetooth(now=None):
"""Lookup Bluetooth devices and update status."""
# If an update is in progress, we don't do anything
if update_bluetooth_lock.locked():
_LOGGER.debug(
"Previous execution of update_bluetooth is taking longer than the scheduled update of interval %s",
interval,
)
return
async with update_bluetooth_lock:
await perform_bluetooth_update()
async def handle_manual_update_bluetooth(call):
"""Update bluetooth devices on demand."""
await update_bluetooth()
hass.async_create_task(update_bluetooth())
async_track_time_interval(hass, update_bluetooth, interval)
hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth)
return True
|
from typing import Dict, List, Optional
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.catalog.const import HOME_APP_IDS, SYSTEM_PFN_ID_MAP
from xbox.webapi.api.provider.catalog.models import (
AlternateIdType,
CatalogResponse,
FieldsTemplate,
Image,
)
from xbox.webapi.api.provider.smartglass.models import (
InstalledPackage,
InstalledPackagesList,
)
from homeassistant.components.media_player import BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_APP,
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_GAME,
MEDIA_TYPE_APP,
MEDIA_TYPE_GAME,
)
TYPE_MAP = {
"App": {
"type": MEDIA_TYPE_APP,
"class": MEDIA_CLASS_APP,
},
"Game": {
"type": MEDIA_TYPE_GAME,
"class": MEDIA_CLASS_GAME,
},
}
async def build_item_response(
client: XboxLiveClient,
device_id: str,
tv_configured: bool,
media_content_type: str,
media_content_id: str,
) -> Optional[BrowseMedia]:
"""Create response payload for the provided media query."""
apps: InstalledPackagesList = await client.smartglass.get_installed_apps(device_id)
if media_content_type in [None, "library"]:
library_info = BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="library",
media_content_type="library",
title="Installed Applications",
can_play=False,
can_expand=True,
children=[],
)
# Add Home
id_type = AlternateIdType.LEGACY_XBOX_PRODUCT_ID
home_catalog: CatalogResponse = (
await client.catalog.get_product_from_alternate_id(
HOME_APP_IDS[id_type], id_type
)
)
home_thumb = _find_media_image(
home_catalog.products[0].localized_properties[0].images
)
library_info.children.append(
BrowseMedia(
media_class=MEDIA_CLASS_APP,
media_content_id="Home",
media_content_type=MEDIA_TYPE_APP,
title="Home",
can_play=True,
can_expand=False,
thumbnail=home_thumb.uri,
)
)
# Add TV if configured
if tv_configured:
tv_catalog: CatalogResponse = (
await client.catalog.get_product_from_alternate_id(
SYSTEM_PFN_ID_MAP["Microsoft.Xbox.LiveTV_8wekyb3d8bbwe"][id_type],
id_type,
)
)
tv_thumb = _find_media_image(
tv_catalog.products[0].localized_properties[0].images
)
library_info.children.append(
BrowseMedia(
media_class=MEDIA_CLASS_APP,
media_content_id="TV",
media_content_type=MEDIA_TYPE_APP,
title="Live TV",
can_play=True,
can_expand=False,
thumbnail=tv_thumb.uri,
)
)
content_types = sorted(
{app.content_type for app in apps.result if app.content_type in TYPE_MAP}
)
for c_type in content_types:
library_info.children.append(
BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id=c_type,
media_content_type=TYPE_MAP[c_type]["type"],
title=f"{c_type}s",
can_play=False,
can_expand=True,
children_media_class=TYPE_MAP[c_type]["class"],
)
)
return library_info
app_details = await client.catalog.get_products(
[
app.one_store_product_id
for app in apps.result
if app.content_type == media_content_id and app.one_store_product_id
],
FieldsTemplate.BROWSE,
)
images = {
prod.product_id: prod.localized_properties[0].images
for prod in app_details.products
}
return BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id=media_content_id,
media_content_type=media_content_type,
title=f"{media_content_id}s",
can_play=False,
can_expand=True,
children=[
item_payload(app, images)
for app in apps.result
if app.content_type == media_content_id and app.one_store_product_id
],
children_media_class=TYPE_MAP[media_content_id]["class"],
)
def item_payload(item: InstalledPackage, images: Dict[str, List[Image]]):
"""Create response payload for a single media item."""
thumbnail = None
image = _find_media_image(images.get(item.one_store_product_id, []))
if image is not None:
thumbnail = image.uri
if thumbnail[0] == "/":
thumbnail = f"https:{thumbnail}"
return BrowseMedia(
media_class=TYPE_MAP[item.content_type]["class"],
media_content_id=item.one_store_product_id,
media_content_type=TYPE_MAP[item.content_type]["type"],
title=item.name,
can_play=True,
can_expand=False,
thumbnail=thumbnail,
)
def _find_media_image(images=List[Image]) -> Optional[Image]:
purpose_order = ["Poster", "Tile", "Logo", "BoxArt"]
for purpose in purpose_order:
for image in images:
if image.image_purpose == purpose and image.width >= 300:
return image
return None
|
import getpass
import os
import platform
import subprocess
import sys
import argparse
import asyncio
import aiohttp
import pkg_resources
from redbot import MIN_PYTHON_VERSION
from redbot.setup import (
basic_setup,
load_existing_config,
remove_instance,
remove_instance_interaction,
create_backup,
)
from redbot.core import __version__, version_info as red_version_info, VersionInfo
from redbot.core.cli import confirm
if sys.platform == "linux":
import distro # pylint: disable=import-error
INTERACTIVE_MODE = not len(sys.argv) > 1 # CLI flags = non-interactive
INTRO = "==========================\nRed Discord Bot - Launcher\n==========================\n"
IS_WINDOWS = os.name == "nt"
IS_MAC = sys.platform == "darwin"
PYTHON_OK = sys.version_info >= MIN_PYTHON_VERSION or os.getenv("READTHEDOCS", False)
def is_venv():
"""Return True if the process is in a venv or in a virtualenv."""
# credit to @calebj
return hasattr(sys, "real_prefix") or (
hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix
)
def parse_cli_args():
parser = argparse.ArgumentParser(
description="Red - Discord Bot's launcher (V3)", allow_abbrev=False
)
instances = load_existing_config()
parser.add_argument(
"instancename",
metavar="instancename",
type=str,
nargs="?",
help="The instance to run",
choices=list(instances.keys()),
)
parser.add_argument("--start", "-s", help="Starts Red", action="store_true")
parser.add_argument(
"--auto-restart", help="Autorestarts Red in case of issues", action="store_true"
)
return parser.parse_known_args()
def run_red(selected_instance, autorestart: bool = False, cliflags=None):
interpreter = sys.executable
while True:
print("Starting {}...".format(selected_instance))
cmd_list = [interpreter, "-m", "redbot", selected_instance]
if cliflags:
cmd_list += cliflags
status = subprocess.call(cmd_list)
if (not autorestart) or (autorestart and status != 26):
break
def instance_menu():
instances = load_existing_config()
if not instances:
print("No instances found!")
return None
counter = 0
print("Red instance menu\n")
name_num_map = {}
for name in list(instances.keys()):
print("{}. {}\n".format(counter + 1, name))
name_num_map[str(counter + 1)] = name
counter += 1
while True:
selection = user_choice()
try:
selection = int(selection)
except ValueError:
print("Invalid input! Please enter a number corresponding to an instance.")
else:
if selection not in list(range(1, counter + 1)):
print("Invalid selection! Please try again")
else:
return name_num_map[str(selection)]
def clear_screen():
if IS_WINDOWS:
os.system("cls")
else:
os.system("clear")
def wait():
if INTERACTIVE_MODE:
input("Press enter to continue.")
def user_choice():
return input("> ").lower().strip()
def main_menu(flags_to_pass):
if IS_WINDOWS:
os.system("TITLE Red - Discord Bot V3 Launcher")
clear_screen()
while True:
print(INTRO)
print("\033[4mCurrent version:\033[0m {}".format(__version__))
print("WARNING: The launcher is scheduled for removal at a later date.")
print("")
print("1. Run Red w/ autorestart in case of issues")
print("2. Run Red")
print("0. Exit")
choice = user_choice()
if choice == "1":
instance = instance_menu()
if instance:
run_red(instance, autorestart=True, cliflags=flags_to_pass)
wait()
elif choice == "2":
instance = instance_menu()
if instance:
run_red(instance, autorestart=False, cliflags=flags_to_pass)
wait()
elif choice == "0":
break
clear_screen()
def main():
args, flags_to_pass = parse_cli_args()
if not PYTHON_OK:
print(
"Python {req_ver} is required to run Red, but you have {sys_ver}!".format(
req_ver=".".join(map(str, MIN_PYTHON_VERSION)), sys_ver=sys.version
)
) # Don't make an f-string, these may not exist on the python version being rejected!
sys.exit(1)
if INTERACTIVE_MODE:
main_menu(flags_to_pass)
elif args.start:
print("WARNING: The launcher is scheduled for removal at a later date.")
print("Starting Red...")
run_red(args.instancename, autorestart=args.auto_restart, cliflags=flags_to_pass)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("Exiting...")
|
from django.utils.translation import get_language_from_request
from django_elasticsearch_dsl.registries import registry
from shop.models.product import ProductModel
class SearchViewMixin:
def get_document(self, language):
documents = registry.get_documents([ProductModel])
try:
return next(doc for doc in documents if doc._language == language)
except StopIteration:
return next(doc for doc in documents if doc._language is None)
class ProductSearchViewMixin(SearchViewMixin):
"""
Mixin class to be added to the ProductListView to restrict that list to entities matching
the query string.
"""
search_fields = ['product_name', 'product_code']
def get_renderer_context(self):
renderer_context = super().get_renderer_context()
if renderer_context['request'].accepted_renderer.format == 'html':
renderer_context['search_autocomplete'] = True
return renderer_context
def get_queryset(self):
query = self.request.GET.get('q')
if query:
language = get_language_from_request(self.request)
document = self.get_document(language)
search = document.search().source(excludes=['body'])
search = search.query('multi_match', query=query, fields=self.search_fields, type='bool_prefix')
queryset = search.to_queryset()
else:
queryset = super().get_queryset()
return queryset
class CatalogSearchViewMixin(SearchViewMixin):
"""
Mixin class to be added to the ProductListView in order to create a full-text search.
"""
search_fields = ['product_name', 'product_code', 'body']
def get_serializer(self, *args, **kwargs):
kwargs.setdefault('label', 'search')
return super().get_serializer(*args, **kwargs)
def get_queryset(self):
language = get_language_from_request(self.request)
document = self.get_document(language)
query = self.request.GET.get('q')
search = document.search().source(excludes=['body'])
if query:
search = search.query('multi_match', query=query, fields=self.search_fields)
return search.to_queryset()
|
import functools
from typing import Any, List
from zigpy.zcl.clusters.general import OnOff
from zigpy.zcl.foundation import Status
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.const import STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import State, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core import discovery
from .core.const import (
CHANNEL_ON_OFF,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
)
from .core.registries import ZHA_ENTITIES
from .entity import ZhaEntity, ZhaGroupEntity
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, DOMAIN)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation switch from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
class BaseSwitch(SwitchEntity):
"""Common base class for zha switches."""
def __init__(self, *args, **kwargs):
"""Initialize the ZHA switch."""
self._on_off_channel = None
self._state = None
super().__init__(*args, **kwargs)
@property
def is_on(self) -> bool:
"""Return if the switch is on based on the statemachine."""
if self._state is None:
return False
return self._state
async def async_turn_on(self, **kwargs) -> None:
"""Turn the entity on."""
result = await self._on_off_channel.on()
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
return
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
result = await self._on_off_channel.off()
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
return
self._state = False
self.async_write_ha_state()
@STRICT_MATCH(channel_names=CHANNEL_ON_OFF)
class Switch(BaseSwitch, ZhaEntity):
"""ZHA switch."""
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize the ZHA switch."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._on_off_channel = self.cluster_channels.get(CHANNEL_ON_OFF)
@callback
def async_set_state(self, attr_id: int, attr_name: str, value: Any):
"""Handle state update from channel."""
self._state = bool(value)
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state
)
@callback
def async_restore_last_state(self, last_state) -> None:
"""Restore previous state."""
self._state = last_state.state == STATE_ON
async def async_update(self) -> None:
"""Attempt to retrieve on off state from the switch."""
await super().async_update()
if self._on_off_channel:
state = await self._on_off_channel.get_attribute_value("on_off")
if state is not None:
self._state = state
@GROUP_MATCH()
class SwitchGroup(BaseSwitch, ZhaGroupEntity):
"""Representation of a switch group."""
def __init__(
self, entity_ids: List[str], unique_id: str, group_id: int, zha_device, **kwargs
) -> None:
"""Initialize a switch group."""
super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs)
self._available: bool = False
group = self.zha_device.gateway.get_group(self._group_id)
self._on_off_channel = group.endpoint[OnOff.cluster_id]
async def async_update(self) -> None:
"""Query all members and determine the light group state."""
all_states = [self.hass.states.get(x) for x in self._entity_ids]
states: List[State] = list(filter(None, all_states))
on_states = [state for state in states if state.state == STATE_ON]
self._state = len(on_states) > 0
self._available = any(state.state != STATE_UNAVAILABLE for state in states)
|
import pytest
from molecule.model import schema_v2
@pytest.fixture
def _model_platforms_docker_section_data():
return {
'driver': {
'name': 'docker',
},
'platforms': [{
'name':
'instance',
'hostname':
'instance',
'image':
'image_name:tag',
'pull':
True,
'pre_build_image':
False,
'registry': {
'url': 'registry.example.com',
'credentials': {
'username': 'username',
'password': '$PASSWORD',
'email': '[email protected]',
},
},
'override_command':
False,
'command':
'sleep infinity',
'pid_mode':
'host',
'privileged':
True,
'security_opts': [
'seccomp=unconfined',
],
'volumes': [
'/sys/fs/cgroup:/sys/fs/cgroup:ro',
],
'tmpfs': [
'/tmp',
'/run ',
],
'capabilities': [
'SYS_ADMIN',
],
'exposed_ports': [
'53/udp',
'53/tcp',
],
'published_ports': [
'0.0.0.0:8053:53/udp',
'0.0.0.0:8053:53/tcp',
],
'ulimits': [
'nofile:262144:262144',
],
'dns_servers': [
'8.8.8.8',
],
'env': {
'FOO': 'bar',
'foo': 'bar',
},
'restart_policy':
'on-failure',
'restart_retries':
1,
'networks': [
{
'name': 'foo',
},
{
'name': 'bar',
},
],
'network_mode':
'mode',
'purge_networks':
True,
'foo':
'bar'
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_docker_section_data'], indirect=True)
def test_platforms_docker(_config):
assert {} == schema_v2.validate(_config)
@pytest.mark.parametrize(
'_config', ['_model_platforms_docker_section_data'], indirect=True)
def test_platforms_unique_names(_config):
instance_name = _config['platforms'][0]['name']
_config['platforms'] += [{
'name': instance_name # duplicate platform name
}]
expected_validation_errors = {
'platforms': [{
0: [{
'name': ["'{}' is not unique".format(instance_name)]
}],
1: [{
'name': ["'{}' is not unique".format(instance_name)]
}]
}]
}
assert expected_validation_errors == schema_v2.validate(_config)
@pytest.mark.parametrize(
'_config', ['_model_platforms_docker_section_data'], indirect=True)
def test_platforms_docker_exposed_ports_coerced(_config):
_config['platforms'][0]['exposed_ports'] = [9904]
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_docker_errors_section_data():
return {
'platforms': [{
'name': int(),
'hostname': int(),
'image': int(),
'pull': int(),
'dockerfile': bool(),
'pre_build_image': int(),
'registry': {
'url': int(),
'credentials': {
'username': int(),
'password': int(),
'email': int(),
},
},
'override_command': int(),
'command': int(),
'pid_mode': int(),
'privileged': str(),
'security_opts': [
int(),
],
'volumes': [
int(),
],
'tmpfs': [
int(),
],
'capabilities': [
int(),
],
'exposed_ports': [
bool(),
],
'published_ports': [
int(),
],
'ulimits': [
int(),
],
'dns_servers': [
int(),
],
'env': str(),
'restart_policy': int(),
'restart_retries': str(),
'networks': [
{
'name': int(),
},
],
'network_mode': int(),
'purge_networks': int(),
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_docker_errors_section_data'], indirect=True)
def test_platforms_docker_has_errors(_config):
x = {
'platforms': [{
0: [{
'exposed_ports': [{
0: ['must be of string type']
}],
'dns_servers': [{
0: ['must be of string type']
}],
'name': ['must be of string type'],
'capabilities': [{
0: ['must be of string type']
}],
'image': ['must be of string type'],
'pull': ['must be of boolean type'],
'dockerfile': ['must be of string type'],
'pre_build_image': ['must be of boolean type'],
'hostname': ['must be of string type'],
'security_opts': [{
0: ['must be of string type']
}],
'pid_mode': ['must be of string type'],
'privileged': ['must be of boolean type'],
'override_command': ['must be of boolean type'],
'command': ['must be of string type'],
'registry': [{
'url': ['must be of string type'],
'credentials': [{
'username': ['must be of string type'],
'password': ['must be of string type'],
'email': ['must be of string type']
}]
}],
'volumes': [{
0: ['must be of string type']
}],
'published_ports': [{
0: ['must be of string type']
}],
'tmpfs': [{
0: ['must be of string type']
}],
'networks': [{
0: [{
'name': ['must be of string type']
}]
}],
'network_mode': ['must be of string type'],
'purge_networks': ['must be of boolean type'],
'ulimits': [{
0: ['must be of string type']
}],
'env': ['must be of dict type'],
'restart_policy': ['must be of string type'],
'restart_retries': ['must be of integer type'],
}]
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_vagrant_section_data():
return {
'driver': {
'name': 'vagrant',
},
'platforms': [{
'name':
'instance',
'interfaces': [{
'auto_config': True,
'network_name': 'private_network',
'type': 'dhcp'
}],
'instance_raw_config_args':
["vm.network 'forwarded_port', guest: 80, host: 8080"],
'config_options': {
'ssh.insert_key': False,
},
'box':
'sleep infinity',
'box_version':
'sleep infinity',
'box_url':
'sleep infinity',
'memory':
1024,
'cpus':
2,
'provider_options': {
'gui': True,
},
'provider_raw_config_args': [
"customize ['modifyvm', :id, '--cpuexecutioncap', '50']",
],
'provision':
True,
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_vagrant_section_data'], indirect=True)
def test_platforms_vagrant(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_vagrant_errors_section_data():
return {
'driver': {
'name': 'vagrant',
},
'platforms': [{
'name': int(),
'interfaces': [],
'instance_raw_config_args': [
int(),
],
'config_options': [],
'box': int(),
'box_version': int(),
'box_url': int(),
'memory': str(),
'cpus': str(),
'provider_options': [],
'provider_raw_config_args': [
int(),
],
'provision': str(),
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_vagrant_errors_section_data'], indirect=True)
def test_platforms_vagrant_has_errors(_config):
x = {
'platforms': [{
0: [{
'box': ['must be of string type'],
'box_version': ['must be of string type'],
'config_options': ['must be of dict type'],
'name': ['must be of string type'],
'provider_raw_config_args': [{
0: ['must be of string type']
}],
'cpus': ['must be of integer type'],
'box_url': ['must be of string type'],
'instance_raw_config_args': [{
0: ['must be of string type']
}],
'memory': ['must be of integer type'],
'provider_options': ['must be of dict type'],
'provision': ['must be of boolean type']
}]
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_lxd_section_data():
return {
'driver': {
'name': 'lxd',
},
'platforms': [{
'name': 'instance',
'url': 'https://127.0.0.1:8443',
'cert_file': '/root/.config/lxc/client.crt',
'key_file': '/root/.config/lxc/client.key',
'trust_password': 'password',
'source': {
'type': 'image',
'mode': 'pull',
'protocol': 'lxd',
'server': 'https://images.linuxcontainers.org',
'alias': 'ubuntu/xenial/amd64',
},
'architecture': 'x86_64',
'config': {
'limits.cpu': '2',
},
'devices': {
'kvm': {
'path': '/dev/kvm',
'type': 'unix-char',
}
},
'profiles': [
'default',
],
'force_stop': True,
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_lxd_section_data'], indirect=True)
def test_platforms_lxd(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_lxd_errors_section_data():
return {
'driver': {
'name': 'lxd',
},
'platforms': [{
'name': int(),
'url': int(),
'cert_file': int(),
'key_file': int(),
'trust_password': int(),
'source': {
'type': int(),
'mode': 'set for mode',
'server': int(),
'protocol': 'set for protocol',
'alias': int(),
},
'architecture': 'set for architecture',
'config': int(),
'devices': int(),
'profiles': [
int(),
],
'force_stop': int()
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_lxd_errors_section_data'], indirect=True)
def test_platforms_lxd_has_errors(_config):
x = {
'platforms': [{
0: [{
'name': ['must be of string type'],
'url': ['must be of string type'],
'cert_file': ['must be of string type'],
'key_file': ['must be of string type'],
'trust_password': ['must be of string type'],
'source': [{
'alias': ['must be of string type'],
'mode': ['unallowed value set for mode'],
'protocol': ['unallowed value set for protocol'],
'server': ['must be of string type'],
'type': ['must be of string type']
}],
'architecture': ['unallowed value set for architecture'],
'config': ['must be of dict type'],
'devices': ['must be of dict type'],
'profiles': [{
0: ['must be of string type']
}],
'force_stop': ['must be of boolean type']
}]
}]
}
assert x == schema_v2.validate(_config)
def test_platforms_driver_name_required(_config):
del _config['platforms'][0]['name']
x = {'platforms': [{0: [{'name': ['required field']}]}]}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_platform_linode_section_data():
return {
'driver': {
'name': 'linode',
},
'platforms': [{
'name': '',
'plan': 0,
'datacenter': 0,
'distribution': 0,
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platform_linode_section_data'], indirect=True)
def test_platforms_linode(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_platforms_linode_errors_section_data():
return {
'driver': {
'name': 'linode',
},
'platforms': [{
'name': 0,
'plan': '',
'datacenter': '',
'distribution': '',
}]
}
@pytest.mark.parametrize(
'_config', ['_model_platforms_linode_errors_section_data'], indirect=True)
def test_platforms_linode_has_errors(_config):
expected_config = {
'platforms': [{
0: [{
'name': ['must be of string type'],
'plan': ['must be of integer type'],
'datacenter': ['must be of integer type'],
'distribution': ['must be of integer type'],
}],
}],
}
assert expected_config == schema_v2.validate(_config)
@pytest.mark.parametrize(
'_config', ['_model_platform_linode_section_data'], indirect=True)
@pytest.mark.parametrize('_required_field', (
'distribution',
'plan',
'datacenter',
'distribution',
))
def test_platforms_linode_fields_required(_config, _required_field):
del _config['platforms'][0][_required_field]
expected_config = {
'platforms': [{
0: [{
_required_field: ['required field']
}]
}]
}
assert expected_config == schema_v2.validate(_config)
|
from django.db import migrations
def migrate_screenshot(apps, schema_editor):
Screenshot = apps.get_model("screenshots", "Screenshot")
db_alias = schema_editor.connection.alias
for screenshot in Screenshot.objects.using(db_alias).prefetch_related("component"):
component = screenshot.component
screenshot.translation = component.translation_set.get(
language_id=component.source_language_id
)
screenshot.save(update_fields=["translation"])
class Migration(migrations.Migration):
dependencies = [
("screenshots", "0002_screenshot_translation"),
("trans", "0099_remove_project_source_language"),
]
operations = [migrations.RunPython(migrate_screenshot, elidable=True)]
|
from collections import defaultdict
from datetime import timedelta
import crimereports
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_EXCLUDE,
CONF_INCLUDE,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_RADIUS,
LENGTH_KILOMETERS,
LENGTH_METERS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from homeassistant.util.distance import convert
from homeassistant.util.dt import now
DOMAIN = "crimereports"
EVENT_INCIDENT = f"{DOMAIN}_incident"
SCAN_INTERVAL = timedelta(minutes=30)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_RADIUS): vol.Coerce(float),
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_INCLUDE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EXCLUDE): vol.All(cv.ensure_list, [cv.string]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Crime Reports platform."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
name = config[CONF_NAME]
radius = config[CONF_RADIUS]
include = config.get(CONF_INCLUDE)
exclude = config.get(CONF_EXCLUDE)
add_entities(
[CrimeReportsSensor(hass, name, latitude, longitude, radius, include, exclude)],
True,
)
class CrimeReportsSensor(Entity):
"""Representation of a Crime Reports Sensor."""
def __init__(self, hass, name, latitude, longitude, radius, include, exclude):
"""Initialize the Crime Reports sensor."""
self._hass = hass
self._name = name
self._include = include
self._exclude = exclude
radius_kilometers = convert(radius, LENGTH_METERS, LENGTH_KILOMETERS)
self._crimereports = crimereports.CrimeReports(
(latitude, longitude), radius_kilometers
)
self._attributes = None
self._state = None
self._previous_incidents = set()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def _incident_event(self, incident):
"""Fire if an event occurs."""
data = {
"type": incident.get("type"),
"description": incident.get("friendly_description"),
"timestamp": incident.get("timestamp"),
"location": incident.get("location"),
}
if incident.get("coordinates"):
data.update(
{
ATTR_LATITUDE: incident.get("coordinates")[0],
ATTR_LONGITUDE: incident.get("coordinates")[1],
}
)
self._hass.bus.fire(EVENT_INCIDENT, data)
def update(self):
"""Update device state."""
incident_counts = defaultdict(int)
incidents = self._crimereports.get_incidents(
now().date(), include=self._include, exclude=self._exclude
)
fire_events = len(self._previous_incidents) > 0
if len(incidents) < len(self._previous_incidents):
self._previous_incidents = set()
for incident in incidents:
incident_type = slugify(incident.get("type"))
incident_counts[incident_type] += 1
if fire_events and incident.get("id") not in self._previous_incidents:
self._incident_event(incident)
self._previous_incidents.add(incident.get("id"))
self._attributes = {ATTR_ATTRIBUTION: crimereports.ATTRIBUTION}
self._attributes.update(incident_counts)
self._state = len(incidents)
|
import re
import logging
import math
from itertools import islice
from .module_exceptions import StepperConfigurationError
logging.getLogger("requests").setLevel(logging.WARNING)
def take(number, iter):
return list(islice(iter, 0, number))
def parse_duration(duration):
'''
Parse duration string, such as '3h2m3s' into milliseconds
>>> parse_duration('3h2m3s')
10923000
>>> parse_duration('0.3s')
300
>>> parse_duration('5')
5000
'''
_re_token = re.compile("([0-9.]+)([dhms]?)")
def parse_token(time, multiplier):
multipliers = {
'd': 86400,
'h': 3600,
'm': 60,
's': 1,
}
if multiplier:
if multiplier in multipliers:
return int(float(time) * multipliers[multiplier] * 1000)
else:
raise StepperConfigurationError(
'Failed to parse duration: %s' % duration)
else:
return int(float(time) * 1000)
return sum(parse_token(*token) for token in _re_token.findall(duration))
def solve_quadratic(a, b, c):
'''
>>> solve_quadratic(1.0, 2.0, 1.0)
(-1.0, -1.0)
'''
discRoot = math.sqrt((b * b) - 4 * a * c)
root1 = (-b - discRoot) / (2 * a)
root2 = (-b + discRoot) / (2 * a)
return (root1, root2)
def s_to_ms(f_sec):
return int(f_sec * 1000.0)
def proper_round(n):
"""
rounds float to closest int
:rtype: int
:param n: float
"""
return int(n) + (n / abs(n)) * int(abs(n - int(n)) >= 0.5) if n != 0 else 0
|
import re as re
import numpy as np
from ..base import BaseRaw
from ..meas_info import create_info
from ..utils import _mult_cal_one
from ...utils import logger, verbose, fill_doc
from ...annotations import Annotations
@fill_doc
def read_raw_boxy(fname, preload=False, verbose=None):
"""Reader for an optical imaging recording.
This function has been tested using the ISS Imagent I and II systems
and versions 0.40/0.84 of the BOXY recording software.
Parameters
----------
fname : str
Path to the BOXY data file.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawBOXY
A Raw object containing BOXY data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawBOXY(fname, preload, verbose)
@fill_doc
class RawBOXY(BaseRaw):
"""Raw object from a BOXY optical imaging file.
Parameters
----------
fname : str
Path to the BOXY data file.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
logger.info('Loading %s' % fname)
# Read header file and grab some info.
start_line = np.inf
col_names = mrk_col = filetype = mrk_data = end_line = None
raw_extras = dict()
raw_extras['offsets'] = list() # keep track of our offsets
sfreq = None
with open(fname, 'r') as fid:
line_num = 0
i_line = fid.readline()
while i_line:
# most of our lines will be data lines, so check that first
if line_num >= start_line:
assert col_names is not None
assert filetype is not None
if '#DATA ENDS' in i_line:
# Data ends just before this.
end_line = line_num
break
if mrk_col is not None:
if filetype == 'non-parsed':
# Non-parsed files have different lines lengths.
crnt_line = i_line.rsplit(' ')[0]
temp_data = re.findall(
r'[-+]?\d*\.?\d+', crnt_line)
if len(temp_data) == len(col_names):
mrk_data.append(float(
re.findall(r'[-+]?\d*\.?\d+', crnt_line)
[mrk_col]))
else:
crnt_line = i_line.rsplit(' ')[0]
mrk_data.append(float(re.findall(
r'[-+]?\d*\.?\d+', crnt_line)[mrk_col]))
raw_extras['offsets'].append(fid.tell())
# now proceed with more standard header parsing
elif 'BOXY.EXE:' in i_line:
boxy_ver = re.findall(r'\d*\.\d+',
i_line.rsplit(' ')[-1])[0]
# Check that the BOXY version is supported
if boxy_ver not in ['0.40', '0.84']:
raise RuntimeError('MNE has not been tested with BOXY '
'version (%s)' % boxy_ver)
elif 'Detector Channels' in i_line:
raw_extras['detect_num'] = int(i_line.rsplit(' ')[0])
elif 'External MUX Channels' in i_line:
raw_extras['source_num'] = int(i_line.rsplit(' ')[0])
elif 'Update Rate (Hz)' in i_line or \
'Updata Rate (Hz)' in i_line:
# Version 0.40 of the BOXY recording software
# (and possibly other versions lower than 0.84) contains a
# typo in the raw data file where 'Update Rate' is spelled
# "Updata Rate. This will account for this typo.
sfreq = float(i_line.rsplit(' ')[0])
elif '#DATA BEGINS' in i_line:
# Data should start a couple lines later.
start_line = line_num + 3
elif line_num == start_line - 2:
# Grab names for each column of data.
raw_extras['col_names'] = col_names = re.findall(
r'\w+\-\w+|\w+\-\d+|\w+', i_line.rsplit(' ')[0])
if 'exmux' in col_names:
# Change filetype based on data organisation.
filetype = 'non-parsed'
else:
filetype = 'parsed'
if 'digaux' in col_names:
mrk_col = col_names.index('digaux')
mrk_data = list()
# raw_extras['offsets'].append(fid.tell())
elif line_num == start_line - 1:
raw_extras['offsets'].append(fid.tell())
line_num += 1
i_line = fid.readline()
assert sfreq is not None
raw_extras.update(
filetype=filetype, start_line=start_line, end_line=end_line)
# Label each channel in our data, for each data type (DC, AC, Ph).
# Data is organised by channels x timepoint, where the first
# 'source_num' rows correspond to the first detector, the next
# 'source_num' rows correspond to the second detector, and so on.
ch_names = list()
ch_types = list()
cals = list()
for det_num in range(raw_extras['detect_num']):
for src_num in range(raw_extras['source_num']):
for i_type, ch_type in [
('DC', 'fnirs_cw_amplitude'),
('AC', 'fnirs_fd_ac_amplitude'),
('Ph', 'fnirs_fd_phase')]:
ch_names.append(
f'S{src_num + 1}_D{det_num + 1} {i_type}')
ch_types.append(ch_type)
cals.append(np.pi / 180. if i_type == 'Ph' else 1.)
# Create info structure.
info = create_info(ch_names, sfreq, ch_types)
for ch, cal in zip(info['chs'], cals):
ch['cal'] = cal
# Determine how long our data is.
delta = end_line - start_line
assert len(raw_extras['offsets']) == delta + 1
if filetype == 'non-parsed':
delta //= (raw_extras['source_num'])
super(RawBOXY, self).__init__(
info, preload, filenames=[fname], first_samps=[0],
last_samps=[delta - 1], raw_extras=[raw_extras], verbose=verbose)
# Now let's grab our markers, if they are present.
if mrk_data is not None:
mrk_data = np.array(mrk_data, float)
# We only want the first instance of each trigger.
prev_mrk = 0
mrk_idx = list()
duration = list()
tmp_dur = 0
for i_num, i_mrk in enumerate(mrk_data):
if i_mrk != 0 and i_mrk != prev_mrk:
mrk_idx.append(i_num)
if i_mrk != 0 and i_mrk == prev_mrk:
tmp_dur += 1
if i_mrk == 0 and i_mrk != prev_mrk:
duration.append((tmp_dur + 1) / sfreq)
tmp_dur = 0
prev_mrk = i_mrk
onset = np.array(mrk_idx) / sfreq
description = mrk_data[mrk_idx]
annot = Annotations(onset, duration, description)
self.set_annotations(annot)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a segment of data from a file.
Boxy file organises data in two ways, parsed or un-parsed.
Regardless of type, output has (n_montages x n_sources x n_detectors
+ n_marker_channels) rows, and (n_timepoints x n_blocks) columns.
"""
source_num = self._raw_extras[fi]['source_num']
detect_num = self._raw_extras[fi]['detect_num']
start_line = self._raw_extras[fi]['start_line']
end_line = self._raw_extras[fi]['end_line']
filetype = self._raw_extras[fi]['filetype']
col_names = self._raw_extras[fi]['col_names']
offsets = self._raw_extras[fi]['offsets']
boxy_file = self._filenames[fi]
# Non-parsed multiplexes sources, so we need source_num times as many
# lines in that case
if filetype == 'parsed':
start_read = start_line + start
stop_read = start_read + (stop - start)
else:
assert filetype == 'non-parsed'
start_read = start_line + start * source_num
stop_read = start_read + (stop - start) * source_num
assert start_read >= start_line
assert stop_read <= end_line
# Possible detector names.
detectors = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[:detect_num]
# Loop through our data.
one = np.zeros((len(col_names), stop_read - start_read))
with open(boxy_file, 'r') as fid:
# Just a more efficient version of this:
# ii = 0
# for line_num, i_line in enumerate(fid):
# if line_num >= start_read:
# if line_num >= stop_read:
# break
# # Grab actual data.
# i_data = i_line.strip().split()
# one[:len(i_data), ii] = i_data
# ii += 1
fid.seek(offsets[start_read - start_line], 0)
for oo in one.T:
i_data = fid.readline().strip().split()
oo[:len(i_data)] = i_data
# in theory we could index in the loop above, but it's painfully slow,
# so let's just take a hopefully minor memory hit
if filetype == 'non-parsed':
ch_idxs = [col_names.index(f'{det}-{i_type}')
for det in detectors
for i_type in ['DC', 'AC', 'Ph']]
one = one[ch_idxs].reshape( # each "time point" multiplexes srcs
len(detectors), 3, -1, source_num
).transpose( # reorganize into (det, source, DC/AC/Ph, t) order
0, 3, 1, 2
).reshape( # reshape the way we store it (det x source x DAP, t)
len(detectors) * source_num * 3, -1)
else:
assert filetype == 'parsed'
ch_idxs = [col_names.index(f'{det}-{i_type}{si + 1}')
for det in detectors
for si in range(source_num)
for i_type in ['DC', 'AC', 'Ph']]
one = one[ch_idxs]
# Place our data into the data object in place.
_mult_cal_one(data, one, idx, cals, mult)
|
import re
import csv as csvapi
import json as jsonapi
import sqlite3 as sqlite3api
import builtins
from functional.execution import ExecutionEngine, ParallelExecutionEngine
from functional.pipeline import Sequence
from functional.util import is_primitive
from functional.io import get_read_function
class Stream(object):
"""
Represents and implements a stream which separates the responsibilities of Sequence and
ExecutionEngine.
An instance of Stream is normally accessed as `seq`
"""
def __init__(self, disable_compression=False, max_repr_items=100):
"""
Default stream constructor.
:param disable_compression: Disable file compression detection
"""
self.disable_compression = disable_compression
self.max_repr_items = max_repr_items
def __call__(self, *args, **kwargs):
"""
Create a Sequence using a sequential ExecutionEngine.
If args has more than one argument then the argument list becomes the sequence.
If args[0] is primitive, a sequence wrapping it is created.
If args[0] is a list, tuple, iterable, or Sequence it is wrapped as a Sequence.
:param args: Sequence to wrap
:return: Wrapped sequence
"""
# pylint: disable=no-self-use
engine = ExecutionEngine()
return self._parse_args(
args, engine, "seq() takes at least 1 argument ({0} given)"
)
def _parse_args(self, args, engine, error_message):
if len(args) == 0:
raise TypeError(error_message.format(len(args)))
if len(args) == 1:
try:
if type(args[0]).__name__ == "DataFrame":
import pandas
if isinstance(args[0], pandas.DataFrame):
return Sequence(
args[0].values,
engine=engine,
max_repr_items=self.max_repr_items,
)
except ImportError: # pragma: no cover
pass
if len(args) > 1:
return Sequence(
list(args), engine=engine, max_repr_items=self.max_repr_items
)
elif is_primitive(args[0]):
return Sequence(
[args[0]], engine=engine, max_repr_items=self.max_repr_items
)
else:
return Sequence(args[0], engine=engine, max_repr_items=self.max_repr_items)
def open(
self,
path,
delimiter=None,
mode="r",
buffering=-1,
encoding=None,
errors=None,
newline=None,
):
"""
Reads and parses input files as defined.
If delimiter is not None, then the file is read in bulk then split on it. If it is None
(the default), then the file is parsed as sequence of lines. The rest of the options are
passed directly to builtins.open with the exception that write/append file modes is not
allowed.
>>> seq.open('examples/gear_list.txt').take(1)
[u'tent\\n']
:param path: path to file
:param delimiter: delimiter to split joined text on. if None, defaults to per line split
:param mode: file open mode
:param buffering: passed to builtins.open
:param encoding: passed to builtins.open
:param errors: passed to builtins.open
:param newline: passed to builtins.open
:return: output of file depending on options wrapped in a Sequence via seq
"""
if not re.match("^[rbt]{1,3}$", mode):
raise ValueError("mode argument must be only have r, b, and t")
file_open = get_read_function(path, self.disable_compression)
file = file_open(
path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
)
if delimiter is None:
return self(file)
else:
return self("".join(list(file)).split(delimiter))
def range(self, *args):
"""
Alias to range function where seq.range(args) is equivalent to seq(range(args)).
>>> seq.range(1, 8, 2)
[1, 3, 5, 7]
:param args: args to range function
:return: range(args) wrapped by a sequence
"""
return self(builtins.range(*args)) # pylint: disable=no-member
def csv(self, csv_file, dialect="excel", **fmt_params):
"""
Reads and parses the input of a csv stream or file.
csv_file can be a filepath or an object that implements the iterator interface
(defines next() or __next__() depending on python version).
>>> seq.csv('examples/camping_purchases.csv').take(2)
[['1', 'tent', '300'], ['2', 'food', '100']]
:param csv_file: path to file or iterator object
:param dialect: dialect of csv, passed to csv.reader
:param fmt_params: options passed to csv.reader
:return: Sequence wrapping csv file
"""
if isinstance(csv_file, str):
file_open = get_read_function(csv_file, self.disable_compression)
input_file = file_open(csv_file)
elif hasattr(csv_file, "next") or hasattr(csv_file, "__next__"):
input_file = csv_file
else:
raise ValueError(
"csv_file must be a file path or implement the iterator interface"
)
csv_input = csvapi.reader(input_file, dialect=dialect, **fmt_params)
return self(csv_input).cache(delete_lineage=True)
def csv_dict_reader(
self,
csv_file,
fieldnames=None,
restkey=None,
restval=None,
dialect="excel",
**kwds
):
if isinstance(csv_file, str):
file_open = get_read_function(csv_file, self.disable_compression)
input_file = file_open(csv_file)
elif hasattr(csv_file, "next") or hasattr(csv_file, "__next__"):
input_file = csv_file
else:
raise ValueError(
"csv_file must be a file path or implement the iterator interface"
)
csv_input = csvapi.DictReader(
input_file,
fieldnames=fieldnames,
restkey=restkey,
restval=restval,
dialect=dialect,
**kwds
)
return self(csv_input).cache(delete_lineage=True)
def jsonl(self, jsonl_file):
"""
Reads and parses the input of a jsonl file stream or file.
Jsonl formatted files must have a single valid json value on each line which is parsed by
the python json module.
>>> seq.jsonl('examples/chat_logs.jsonl').first()
{u'date': u'10/09', u'message': u'hello anyone there?', u'user': u'bob'}
:param jsonl_file: path or file containing jsonl content
:return: Sequence wrapping jsonl file
"""
if isinstance(jsonl_file, str):
file_open = get_read_function(jsonl_file, self.disable_compression)
input_file = file_open(jsonl_file)
else:
input_file = jsonl_file
return self(input_file).map(jsonapi.loads).cache(delete_lineage=True)
def json(self, json_file):
"""
Reads and parses the input of a json file handler or file.
Json files are parsed differently depending on if the root is a dictionary or an array.
1) If the json's root is a dictionary, these are parsed into a sequence of (Key, Value)
pairs
2) If the json's root is an array, these are parsed into a sequence
of entries
>>> seq.json('examples/users.json').first()
[u'sarah', {u'date_created': u'08/08', u'news_email': True, u'email': u'[email protected]'}]
:param json_file: path or file containing json content
:return: Sequence wrapping jsonl file
"""
if isinstance(json_file, str):
file_open = get_read_function(json_file, self.disable_compression)
input_file = file_open(json_file)
json_input = jsonapi.load(input_file)
elif hasattr(json_file, "read"):
json_input = jsonapi.load(json_file)
else:
raise ValueError(
"json_file must be a file path or implement the iterator interface"
)
if isinstance(json_input, list):
return self(json_input)
else:
return self(json_input.items())
# pylint: disable=keyword-arg-before-vararg
def sqlite3(self, conn, sql, parameters=None, *args, **kwargs):
"""
Reads input by querying from a sqlite database.
>>> seq.sqlite3('examples/users.db', 'select id, name from users where id = 1;').first()
[(1, 'Tom')]
:param conn: path or sqlite connection, cursor
:param sql: SQL query string
:param parameters: Parameters for sql query
:return: Sequence wrapping SQL cursor
"""
if parameters is None:
parameters = ()
if isinstance(conn, (sqlite3api.Connection, sqlite3api.Cursor)):
return self(conn.execute(sql, parameters))
elif isinstance(conn, str):
with sqlite3api.connect(conn, *args, **kwargs) as input_conn:
return self(input_conn.execute(sql, parameters))
else:
raise ValueError(
"conn must be a must be a file path or sqlite3 Connection/Cursor"
)
class ParallelStream(Stream):
"""
Parallelized version of functional.streams.Stream normally accessible as `pseq`
"""
def __init__(self, processes=None, partition_size=None, disable_compression=False):
"""
Configure Stream for parallel processing and file compression detection
:param processes: Number of parallel processes
:param disable_compression: Disable file compression detection
"""
super(ParallelStream, self).__init__(disable_compression=disable_compression)
self.processes = processes
self.partition_size = partition_size
def __call__(self, *args, **kwargs):
"""
Create a Sequence using a parallel ExecutionEngine.
If args has more than one argument then the argument list becomes the sequence.
If args[0] is primitive, a sequence wrapping it is created.
If args[0] is a list, tuple, iterable, or Sequence it is wrapped as a Sequence.
:param args: Sequence to wrap
:return: Wrapped sequence
"""
processes = kwargs.get("processes") or self.processes
partition_size = kwargs.get("partition_size") or self.partition_size
engine = ParallelExecutionEngine(
processes=processes, partition_size=partition_size
)
return self._parse_args(
args, engine, "pseq() takes at least 1 argument ({0} given)"
)
# pylint: disable=invalid-name
seq = Stream()
pseq = ParallelStream()
|
from datetime import date, datetime, time, timedelta
from decimal import Decimal
import pytz
from core.models import Author, Category
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from import_export import widgets
class BooleanWidgetTest(TestCase):
def setUp(self):
self.widget = widgets.BooleanWidget()
def test_clean(self):
self.assertTrue(self.widget.clean("1"))
self.assertTrue(self.widget.clean(1))
self.assertTrue(self.widget.clean("TRUE"))
self.assertTrue(self.widget.clean("True"))
self.assertTrue(self.widget.clean("true"))
self.assertFalse(self.widget.clean("0"))
self.assertFalse(self.widget.clean(0))
self.assertFalse(self.widget.clean("FALSE"))
self.assertFalse(self.widget.clean("False"))
self.assertFalse(self.widget.clean("false"))
self.assertEqual(self.widget.clean(""), None)
self.assertEqual(self.widget.clean("NONE"), None)
self.assertEqual(self.widget.clean("None"), None)
self.assertEqual(self.widget.clean("none"), None)
self.assertEqual(self.widget.clean("NULL"), None)
self.assertEqual(self.widget.clean("null"), None)
def test_render(self):
self.assertEqual(self.widget.render(True), "1")
self.assertEqual(self.widget.render(False), "0")
self.assertEqual(self.widget.render(None), "")
class DateWidgetTest(TestCase):
def setUp(self):
self.date = date(2012, 8, 13)
self.widget = widgets.DateWidget('%d.%m.%Y')
def test_render(self):
self.assertEqual(self.widget.render(self.date), "13.08.2012")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.2012"), self.date)
@override_settings(USE_TZ=True)
def test_use_tz(self):
self.assertEqual(self.widget.render(self.date), "13.08.2012")
self.assertEqual(self.widget.clean("13.08.2012"), self.date)
class DateTimeWidgetTest(TestCase):
def setUp(self):
self.datetime = datetime(2012, 8, 13, 18, 0, 0)
self.widget = widgets.DateTimeWidget('%d.%m.%Y %H:%M:%S')
def test_render(self):
self.assertEqual(self.widget.render(self.datetime),
"13.08.2012 18:00:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.2012 18:00:00"),
self.datetime)
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Ljubljana')
def test_use_tz(self):
utc_dt = timezone.make_aware(self.datetime, pytz.UTC)
self.assertEqual(self.widget.render(utc_dt), "13.08.2012 20:00:00")
self.assertEqual(self.widget.clean("13.08.2012 20:00:00"), utc_dt)
class DateWidgetBefore1900Test(TestCase):
def setUp(self):
self.date = date(1868, 8, 13)
self.widget = widgets.DateWidget('%d.%m.%Y')
def test_render(self):
self.assertEqual(self.widget.render(self.date), "13.08.1868")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.1868"), self.date)
class TimeWidgetTest(TestCase):
def setUp(self):
self.time = time(20, 15, 0)
self.widget = widgets.TimeWidget('%H:%M:%S')
def test_render(self):
self.assertEqual(self.widget.render(self.time), "20:15:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("20:15:00"), self.time)
class DurationWidgetTest(TestCase):
def setUp(self):
self.duration = timedelta(hours=1, minutes=57, seconds=0)
self.widget = widgets.DurationWidget()
def test_render(self):
self.assertEqual(self.widget.render(self.duration), "1:57:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_render_zero(self):
self.assertEqual(self.widget.render(timedelta(0)), "0:00:00")
def test_clean(self):
self.assertEqual(self.widget.clean("1:57:00"), self.duration)
def test_clean_none(self):
self.assertEqual(self.widget.clean(""), None)
def test_clean_zero(self):
self.assertEqual(self.widget.clean("0:00:00"), timedelta(0))
class FloatWidgetTest(TestCase):
def setUp(self):
self.value = 11.111
self.widget = widgets.FloatWidget()
def test_clean(self):
self.assertEqual(self.widget.clean(11.111), self.value)
def test_render(self):
self.assertEqual(self.widget.render(self.value), self.value)
def test_clean_string_zero(self):
self.assertEqual(self.widget.clean("0"), 0.0)
self.assertEqual(self.widget.clean("0.0"), 0.0)
def test_clean_empty_string(self):
self.assertEqual(self.widget.clean(""), None)
self.assertEqual(self.widget.clean(" "), None)
self.assertEqual(self.widget.clean("\r\n\t"), None)
class DecimalWidgetTest(TestCase):
def setUp(self):
self.value = Decimal("11.111")
self.widget = widgets.DecimalWidget()
def test_clean(self):
self.assertEqual(self.widget.clean("11.111"), self.value)
self.assertEqual(self.widget.clean(11.111), self.value)
def test_render(self):
self.assertEqual(self.widget.render(self.value), self.value)
def test_clean_string_zero(self):
self.assertEqual(self.widget.clean("0"), Decimal("0"))
self.assertEqual(self.widget.clean("0.0"), Decimal("0"))
def test_clean_empty_string(self):
self.assertEqual(self.widget.clean(""), None)
self.assertEqual(self.widget.clean(" "), None)
self.assertEqual(self.widget.clean("\r\n\t"), None)
class IntegerWidgetTest(TestCase):
def setUp(self):
self.value = 0
self.widget = widgets.IntegerWidget()
def test_clean_integer_zero(self):
self.assertEqual(self.widget.clean(0), self.value)
def test_clean_string_zero(self):
self.assertEqual(self.widget.clean("0"), self.value)
self.assertEqual(self.widget.clean("0.0"), self.value)
def test_clean_empty_string(self):
self.assertEqual(self.widget.clean(""), None)
self.assertEqual(self.widget.clean(" "), None)
self.assertEqual(self.widget.clean("\n\t\r"), None)
class ForeignKeyWidgetTest(TestCase):
def setUp(self):
self.widget = widgets.ForeignKeyWidget(Author)
self.author = Author.objects.create(name='Foo')
def test_clean(self):
self.assertEqual(self.widget.clean(self.author.id), self.author)
def test_clean_empty(self):
self.assertEqual(self.widget.clean(""), None)
def test_render(self):
self.assertEqual(self.widget.render(self.author), self.author.pk)
def test_render_empty(self):
self.assertEqual(self.widget.render(None), "")
def test_clean_multi_column(self):
class BirthdayWidget(widgets.ForeignKeyWidget):
def get_queryset(self, value, row):
return self.model.objects.filter(
birthday=row['birthday']
)
author2 = Author.objects.create(name='Foo')
author2.birthday = "2016-01-01"
author2.save()
birthday_widget = BirthdayWidget(Author, 'name')
row = {'name': "Foo", 'birthday': author2.birthday}
self.assertEqual(birthday_widget.clean("Foo", row), author2)
class ManyToManyWidget(TestCase):
def setUp(self):
self.widget = widgets.ManyToManyWidget(Category)
self.widget_name = widgets.ManyToManyWidget(Category, field="name")
self.cat1 = Category.objects.create(name='Cat úňíčóďě')
self.cat2 = Category.objects.create(name='Cat 2')
def test_clean(self):
value = "%s,%s" % (self.cat1.pk, self.cat2.pk)
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_field(self):
value = "%s,%s" % (self.cat1.name, self.cat2.name)
cleaned_data = self.widget_name.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_field_spaces(self):
value = "%s, %s" % (self.cat1.name, self.cat2.name)
cleaned_data = self.widget_name.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_typo(self):
value = "%s," % self.cat1.pk
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_int(self):
value = self.cat1.pk
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_float(self):
value = float(self.cat1.pk)
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_render(self):
self.assertEqual(self.widget.render(Category.objects.order_by('id')),
"%s,%s" % (self.cat1.pk, self.cat2.pk))
self.assertEqual(self.widget_name.render(Category.objects.order_by('id')),
"%s,%s" % (self.cat1.name, self.cat2.name))
class JSONWidgetTest(TestCase):
def setUp(self):
self.value = {"value": 23}
self.widget = widgets.JSONWidget()
def test_clean(self):
self.assertEqual(self.widget.clean('{"value": 23}'), self.value)
def test_render(self):
self.assertEqual(self.widget.render(self.value), '{"value": 23}')
def test_clean_single_quoted_string(self):
self.assertEqual(self.widget.clean("{'value': 23}"), self.value)
self.assertEqual(self.widget.clean("{'value': null}"), {'value': None})
def test_clean_none(self):
self.assertEqual(self.widget.clean(None), None)
self.assertEqual(self.widget.clean('{"value": null}'), {'value': None})
def test_render_none(self):
self.assertEqual(self.widget.render(None), None)
self.assertEqual(self.widget.render(dict()), None)
self.assertEqual(self.widget.render({"value": None}), '{"value": null}')
|
from Handler import Handler
class NullHandler(Handler):
"""
Implements the abstract Handler class, doing nothing except log
"""
def process(self, metric):
"""
Process a metric by doing nothing
"""
self.log.debug("Metric: %s", str(metric).rstrip().replace(' ', '\t'))
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(NullHandler, self).get_default_config_help()
config.update({
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(NullHandler, self).get_default_config()
config.update({
})
return config
|
import os
import re
from setuptools import setup, find_packages
from codecs import open
from os import path
basedir = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(basedir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# locate our version number
def read_version_py(file_name):
try:
version_string_line = open(file_name, "rt").read()
except EnvironmentError:
return None
else:
version_regex = r"^version_str = ['\"]([^'\"]*)['\"]"
mo = re.search(version_regex, version_string_line, re.M)
if mo:
return mo.group(1)
def package_files(directory):
paths = []
for (path, directories, filenames) in os.walk(directory):
for filename in filenames:
paths.append(os.path.join('..', path, filename))
return paths
VERSION_PY_FILENAME = 'kalliope/_version.py'
version = read_version_py(VERSION_PY_FILENAME)
extra_files = package_files('kalliope/trigger/snowboy')
extra_files.append('brain.yml')
extra_files.append('settings.yml')
setup(
name='kalliope',
version=version,
description='Kalliope is a modular always-on voice controlled personal assistant designed for home automation.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/kalliope-project/kalliope',
author='The dream team of Kalliope-project',
author_email='[email protected]',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Home Automation',
'Topic :: Multimedia :: Sound/Audio :: Speech',
'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
'Topic :: Scientific/Engineering :: Artificial Intelligence'
],
keywords='assistant bot TTS STT jarvis',
# included packages
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
python_requires=">=3.6",
# required libs
install_requires=[
'Werkzeug==0.16.1',
'pyyaml>=5.1',
'six>=1.12.0',
'SpeechRecognition>=3.8.1',
'markupsafe>=1.1.1',
'pyaudio>=0.2.11',
'pyasn1>=0.4.5',
'ansible>=2.9.5',
'jinja2>=2.10.1',
'cffi>=1.12.3',
'ipaddress>=1.0.17',
'flask>=1.0.3',
'Flask-Restful>=0.3.7',
'flask_cors>=3.0.8',
'requests>=2.22.0',
'httpretty>=0.8.14',
'mock>=3.0.5',
'Flask-Testing>=0.7.1',
'apscheduler>=3.6.0',
'GitPython>=3.0.4',
'packaging>=19.0',
'transitions>=0.6.9',
'sounddevice>=0.3.13',
'SoundFile>=0.10.2',
'pyalsaaudio>=0.8.4',
'paho-mqtt>=1.4.0',
'voicerss_tts>=1.0.6',
'gTTS>=2.0.3',
'urllib3>=1.25.3',
'gevent>=20.9.0'
],
# additional files
package_data={
'kalliope': extra_files,
},
# entry point script
entry_points={
'console_scripts': [
'kalliope=kalliope:main',
],
},
)
|
import os
import sys
import threading
import time
import unittest.mock
import pytest
from cherrypy.process import wspbus
CI_ON_MACOS = bool(os.getenv('CI')) and sys.platform == 'darwin'
msg = 'Listener %d on channel %s: %s.' # pylint: disable=invalid-name
@pytest.fixture
def bus():
"""Return a wspbus instance."""
return wspbus.Bus()
@pytest.fixture
def log_tracker(bus):
"""Return an instance of bus log tracker."""
class LogTracker: # pylint: disable=too-few-public-methods
"""Bus log tracker."""
log_entries = []
def __init__(self, bus):
def logit(msg, level): # pylint: disable=unused-argument
self.log_entries.append(msg)
bus.subscribe('log', logit)
return LogTracker(bus)
@pytest.fixture
def listener():
"""Return an instance of bus response tracker."""
class Listner: # pylint: disable=too-few-public-methods
"""Bus handler return value tracker."""
responses = []
def get_listener(self, channel, index):
"""Return an argument tracking listener."""
def listener(arg=None):
self.responses.append(msg % (index, channel, arg))
return listener
return Listner()
def test_builtin_channels(bus, listener):
"""Test that built-in channels trigger corresponding listeners."""
expected = []
for channel in bus.listeners:
for index, priority in enumerate([100, 50, 0, 51]):
bus.subscribe(
channel,
listener.get_listener(channel, index),
priority,
)
for channel in bus.listeners:
bus.publish(channel)
expected.extend([msg % (i, channel, None) for i in (2, 1, 3, 0)])
bus.publish(channel, arg=79347)
expected.extend([msg % (i, channel, 79347) for i in (2, 1, 3, 0)])
assert listener.responses == expected
def test_custom_channels(bus, listener):
"""Test that custom pub-sub channels work as built-in ones."""
expected = []
custom_listeners = ('hugh', 'louis', 'dewey')
for channel in custom_listeners:
for index, priority in enumerate([None, 10, 60, 40]):
bus.subscribe(
channel,
listener.get_listener(channel, index),
priority,
)
for channel in custom_listeners:
bus.publish(channel, 'ah so')
expected.extend(msg % (i, channel, 'ah so') for i in (1, 3, 0, 2))
bus.publish(channel)
expected.extend(msg % (i, channel, None) for i in (1, 3, 0, 2))
assert listener.responses == expected
def test_listener_errors(bus, listener):
"""Test that unhandled exceptions raise channel failures."""
expected = []
channels = [c for c in bus.listeners if c != 'log']
for channel in channels:
bus.subscribe(channel, listener.get_listener(channel, 1))
# This will break since the lambda takes no args.
bus.subscribe(channel, lambda: None, priority=20)
for channel in channels:
with pytest.raises(wspbus.ChannelFailures):
bus.publish(channel, 123)
expected.append(msg % (1, channel, 123))
assert listener.responses == expected
def test_start(bus, listener, log_tracker):
"""Test that bus start sequence calls all listeners."""
num = 3
for index in range(num):
bus.subscribe('start', listener.get_listener('start', index))
bus.start()
try:
# The start method MUST call all 'start' listeners.
assert (
set(listener.responses) ==
set(msg % (i, 'start', None) for i in range(num)))
# The start method MUST move the state to STARTED
# (or EXITING, if errors occur)
assert bus.state == bus.states.STARTED
# The start method MUST log its states.
assert log_tracker.log_entries == ['Bus STARTING', 'Bus STARTED']
finally:
# Exit so the atexit handler doesn't complain.
bus.exit()
def test_stop(bus, listener, log_tracker):
"""Test that bus stop sequence calls all listeners."""
num = 3
for index in range(num):
bus.subscribe('stop', listener.get_listener('stop', index))
bus.stop()
# The stop method MUST call all 'stop' listeners.
assert (set(listener.responses) ==
set(msg % (i, 'stop', None) for i in range(num)))
# The stop method MUST move the state to STOPPED
assert bus.state == bus.states.STOPPED
# The stop method MUST log its states.
assert log_tracker.log_entries == ['Bus STOPPING', 'Bus STOPPED']
def test_graceful(bus, listener, log_tracker):
"""Test that bus graceful state triggers all listeners."""
num = 3
for index in range(num):
bus.subscribe('graceful', listener.get_listener('graceful', index))
bus.graceful()
# The graceful method MUST call all 'graceful' listeners.
assert (
set(listener.responses) ==
set(msg % (i, 'graceful', None) for i in range(num)))
# The graceful method MUST log its states.
assert log_tracker.log_entries == ['Bus graceful']
def test_exit(bus, listener, log_tracker):
"""Test that bus exit sequence is correct."""
num = 3
for index in range(num):
bus.subscribe('stop', listener.get_listener('stop', index))
bus.subscribe('exit', listener.get_listener('exit', index))
bus.exit()
# The exit method MUST call all 'stop' listeners,
# and then all 'exit' listeners.
assert (set(listener.responses) ==
set([msg % (i, 'stop', None) for i in range(num)] +
[msg % (i, 'exit', None) for i in range(num)]))
# The exit method MUST move the state to EXITING
assert bus.state == bus.states.EXITING
# The exit method MUST log its states.
assert (log_tracker.log_entries ==
['Bus STOPPING', 'Bus STOPPED', 'Bus EXITING', 'Bus EXITED'])
def test_wait(bus):
"""Test that bus wait awaits for states."""
def f(method): # pylint: disable=invalid-name
time.sleep(0.2)
getattr(bus, method)()
flow = [
('start', [bus.states.STARTED]),
('stop', [bus.states.STOPPED]),
('start', [bus.states.STARTING, bus.states.STARTED]),
('exit', [bus.states.EXITING]),
]
for method, states in flow:
threading.Thread(target=f, args=(method,)).start()
bus.wait(states)
# The wait method MUST wait for the given state(s).
assert bus.state in states, 'State %r not in %r' % (bus.state, states)
@pytest.mark.xfail(CI_ON_MACOS, reason='continuous integration on macOS fails')
def test_wait_publishes_periodically(bus):
"""Test that wait publishes each tick."""
callback = unittest.mock.MagicMock()
bus.subscribe('main', callback)
def set_start():
time.sleep(0.05)
bus.start()
threading.Thread(target=set_start).start()
bus.wait(bus.states.STARTED, interval=0.01, channel='main')
assert callback.call_count > 3
def test_block(bus, log_tracker):
"""Test that bus block waits for exiting."""
def f(): # pylint: disable=invalid-name
time.sleep(0.2)
bus.exit()
def g(): # pylint: disable=invalid-name
time.sleep(0.4)
threading.Thread(target=f).start()
threading.Thread(target=g).start()
threads = [t for t in threading.enumerate() if not t.daemon]
assert len(threads) == 3
bus.block()
# The block method MUST wait for the EXITING state.
assert bus.state == bus.states.EXITING
# The block method MUST wait for ALL non-main, non-daemon threads to
# finish.
threads = [t for t in threading.enumerate() if not t.daemon]
assert len(threads) == 1
# The last message will mention an indeterminable thread name; ignore
# it
expected_bus_messages = [
'Bus STOPPING',
'Bus STOPPED',
'Bus EXITING',
'Bus EXITED',
'Waiting for child threads to terminate...',
]
bus_msg_num = len(expected_bus_messages)
# If the last message mentions an indeterminable thread name then ignore it
assert log_tracker.log_entries[:bus_msg_num] == expected_bus_messages
assert len(log_tracker.log_entries[bus_msg_num:]) <= 1, (
'No more than one extra log line with the thread name expected'
)
def test_start_with_callback(bus):
"""Test that callback fires on bus start."""
try:
events = []
def f(*args, **kwargs): # pylint: disable=invalid-name
events.append(('f', args, kwargs))
def g(): # pylint: disable=invalid-name
events.append('g')
bus.subscribe('start', g)
bus.start_with_callback(f, (1, 3, 5), {'foo': 'bar'})
# Give wait() time to run f()
time.sleep(0.2)
# The callback method MUST wait for the STARTED state.
assert bus.state == bus.states.STARTED
# The callback method MUST run after all start methods.
assert events == ['g', ('f', (1, 3, 5), {'foo': 'bar'})]
finally:
bus.exit()
def test_log(bus, log_tracker):
"""Test that bus messages and errors are logged."""
assert log_tracker.log_entries == []
# Try a normal message.
expected = []
for msg_ in ["O mah darlin'"] * 3 + ['Clementiiiiiiiine']:
bus.log(msg_)
expected.append(msg_)
assert log_tracker.log_entries == expected
# Try an error message
try:
foo
except NameError:
bus.log('You are lost and gone forever', traceback=True)
lastmsg = log_tracker.log_entries[-1]
assert 'Traceback' in lastmsg and 'NameError' in lastmsg, (
'Last log message %r did not contain '
'the expected traceback.' % lastmsg
)
else:
pytest.fail('NameError was not raised as expected.')
|
import itertools
from collections import defaultdict
import array
import logging
import tempfile
import numpy
import fastcluster
import hcluster
from typing import (Iterable,
Dict,
cast,
List,
Set,
Generator,
Sequence,
Tuple)
from dedupe._typing import Clusters, RecordID, Links
logger = logging.getLogger(__name__)
def connected_components(edgelist: numpy.ndarray,
max_components: int) -> Generator[numpy.ndarray, None, None]:
if len(edgelist) == 0:
raise StopIteration()
unlabeled_edgelist = edgelist
# we are going to keep track of the connected components
# with another field in the record array of the edgelist.
# unfortunately, it's not straightforward to add another
# field to a memmapped record array so, we are going to
# have to create a new memmapped array with all the fields
# we want and copy things over.
with tempfile.TemporaryDirectory() as path:
filename = path + '/unlabeled_edgelist'
edgelist = numpy.memmap(filename,
dtype=(unlabeled_edgelist.dtype.descr
+ [('label', 'int32')]),
mode='w+',
shape=unlabeled_edgelist.shape)
if hasattr(unlabeled_edgelist, 'filename'):
copy_mmap_record_arrays(unlabeled_edgelist,
edgelist,
['pairs', 'score'])
else:
copy_to_mmap_record_array(unlabeled_edgelist,
edgelist,
['pairs', 'score'])
yield from _connected_components(edgelist, max_components)
edgelist._mmap.close()
def _connected_components(edgelist: numpy.ndarray,
max_components: int) -> Generator[numpy.ndarray, None, None]:
component_stops = union_find(edgelist)
start = 0
for stop in component_stops:
sub_graph = edgelist[start:stop]
start = stop
n_components = len(numpy.unique(sub_graph['pairs']))
if n_components > max_components:
min_score = numpy.min(sub_graph['score'])
min_score_logit = numpy.log(min_score) - numpy.log(1 - min_score)
threshold = 1 / (1 + numpy.exp(-min_score_logit - 1))
logger.warning('A component contained %s elements. '
'Components larger than %s are '
're-filtered. The threshold for this '
'filtering is %s' % (n_components,
max_components,
threshold))
# slices of memmaped arrays are also memmaped arrays,
# which is what we want. So, we sort and slice as oppose
# to selecting like `sub_graph[sub_graph['score'] >
# threshold]`, which would lead to an in memory copy being
# made
sub_graph.sort(order='score')
cut_point = numpy.searchsorted(sub_graph['score'], threshold)
filtered_sub_graph = sub_graph[max(cut_point, 2):]
for sub_graph in _connected_components(filtered_sub_graph,
max_components):
yield sub_graph[['pairs', 'score']]
else:
yield sub_graph[['pairs', 'score']]
def union_find(scored_pairs: numpy.ndarray) -> Sequence[int]:
root: Dict[RecordID, int] = {}
components = {}
edgelist = scored_pairs['pairs']
labels = scored_pairs['label']
it = numpy.nditer(edgelist, ['external_loop'])
for i, (a, b) in enumerate(it):
root_a = root.get(a)
root_b = root.get(b)
if root_a is None and root_b is None:
root[a] = root[b] = i
components[i] = array.array('I', [i])
elif root_a is None or root_b is None:
if root_a is None:
b = a
root_a = root_b
root_a = cast(int, root_a)
components[root_a].append(i)
root[b] = root_a
elif root_a != root_b:
if len(components[root_a]) < len(components[root_b]):
root_a, root_b = root_b, root_a
components[root_a].extend(components[root_b])
components[root_a].append(i)
component_b = numpy.unique(edgelist[components[root_b]])
for node in component_b:
root[node] = root_a
del components[root_b]
else:
components[root_a].append(i)
for label, component in components.items():
labels[component] = label
# we want our selections to remain memmapped arrays
# so we sort and get the indices where the components
# change. This will allow us to slice pieces of the
# memmapped array. Those slices will also be memmaped
# arrays.
scored_pairs.sort(order='label')
return numpy.cumsum(numpy.unique(scored_pairs['label'],
return_counts=True)[1])
def condensedDistance(dupes: numpy.ndarray) -> Tuple[Dict[int, RecordID],
numpy.ndarray,
int]:
'''
Convert the pairwise list of distances in dupes to "condensed
distance matrix" required by the hierarchical clustering
algorithms. Also return a dictionary that maps the distance matrix
to the record_ids.
The formula for an index of the condensed matrix is
index = {N choose 2}-{N-row choose 2} + (col-row-1)
= N*(N-1)/2 - (N-row)*(N-row-1)/2 + col - row - 1
^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
matrix_length row_step
where (row,col) is index of an uncondensed square N X N distance matrix.
See http://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.squareform.html
'''
candidate_set = numpy.unique(dupes['pairs'])
i_to_id = dict(enumerate(candidate_set))
ids = candidate_set.searchsorted(dupes['pairs'])
row = ids[:, 0]
col = ids[:, 1]
N = len(candidate_set)
matrix_length = N * (N - 1) / 2
row_step = (N - row) * (N - row - 1) / 2
index = matrix_length - row_step + col - row - 1
condensed_distances = numpy.ones(int(matrix_length), 'f4')
condensed_distances[index.astype(int)] = 1 - dupes['score']
return i_to_id, condensed_distances, N
def cluster(dupes: numpy.ndarray,
threshold: float = .5,
max_components: int = 30000) -> Clusters:
'''
Takes in a list of duplicate pairs and clusters them in to a
list records that all refer to the same entity based on a given
threshold
Keyword arguments:
threshold -- number betweent 0 and 1 (default is .5). lowering the
number will increase precision, raising it will increase
recall
'''
distance_threshold = 1 - threshold
dupe_sub_graphs = connected_components(dupes, max_components)
for sub_graph in dupe_sub_graphs:
if len(sub_graph) > 1:
i_to_id, condensed_distances, N = condensedDistance(sub_graph)
linkage = fastcluster.linkage(condensed_distances,
method='centroid',
preserve_input=True)
partition = hcluster.fcluster(linkage,
distance_threshold,
criterion='distance')
clusters: Dict[int, List[int]] = defaultdict(list)
for i, cluster_id in enumerate(partition):
clusters[cluster_id].append(i)
for cluster in clusters.values():
if len(cluster) > 1:
scores = confidences(cluster, condensed_distances, N)
yield tuple(i_to_id[i] for i in cluster), scores
else:
(ids, score), = sub_graph
if score > threshold:
yield tuple(ids), (score,) * 2
def confidences(cluster: Sequence[int],
condensed_distances: numpy.ndarray,
d: int) -> numpy.ndarray:
'''
We calculate a per record score that is similar to a standard
deviation. The main reason is that these record scores can be
used to calculate the standard deviation of an entire cluster,
which is a reasonable metric for clusters.
'''
scores_d = dict.fromkeys(cluster, 0.0)
squared_distances = condensed_distances ** 2
for i, j in itertools.combinations(cluster, 2):
index = d * (d - 1) / 2 - (d - i) * (d - i - 1) / 2 + j - i - 1
squared_dist = squared_distances[int(index)]
scores_d[i] += squared_dist
scores_d[j] += squared_dist
scores = numpy.array([score for _, score in sorted(scores_d.items())])
scores /= len(cluster) - 1
scores = numpy.sqrt(scores)
scores = 1 - scores
return scores
def greedyMatching(dupes: numpy.ndarray) -> Links:
A: Set[RecordID] = set()
B: Set[RecordID] = set()
dupes.sort(order='score')
dupes = dupes[::-1]
for (a, b), score in dupes:
if a not in A and b not in B:
A.add(a)
B.add(b)
yield (a, b), score
def gazetteMatching(scored_blocks: Iterable[numpy.ndarray],
threshold: float = 0,
n_matches: int = 1) -> Links:
for block in scored_blocks:
block = block[block['score'] > threshold]
block.sort(order='score')
block = block[::-1]
if len(block):
if n_matches:
yield block[:n_matches].copy()
else:
yield block.copy()
def pair_gazette_matching(scored_pairs: numpy.ndarray,
threshold: float = 0.0,
n_matches: int = 1) -> Links:
scored_pairs.sort(order='pairs')
group_key = scored_pairs['pairs'][:, 0]
change_points = numpy.where(numpy.roll(group_key, 1) != group_key)[0]
scored_blocks = numpy.split(scored_pairs, change_points)
for match in gazetteMatching(scored_blocks, threshold, n_matches):
if match:
yield from match
def copy_to_mmap_record_array(source, target, fields, chunksize=100000):
'''
Writing into a memmapped array allocates memory equivalent to the
amount that you are writing. With big arrays this is undesirable
so we write in chunks
'''
start = 0
stops = itertools.chain(range(chunksize, source.size, chunksize),
[source.size])
for stop in stops:
shape = (stop - start,)
source_slice = source[start:stop]
target_slice = numpy.memmap(target.filename,
dtype=target.dtype,
offset=(start * target.dtype.itemsize),
shape=shape)
target_slice[fields] = source_slice[fields]
start = stop
def copy_mmap_record_arrays(source, target, fields, chunksize=100000):
'''
Writing into a memmapped array allocates memory equivalent to the
amount that you are writing. With big arrays this is undesirable
so we write in chunks
'''
start = 0
stops = itertools.chain(range(chunksize, source.size, chunksize),
[source.size])
for stop in stops:
shape = (stop - start,)
source_slice = numpy.memmap(source.filename,
dtype=source.dtype,
offset=(start * source.dtype.itemsize),
shape=shape)
target_slice = numpy.memmap(target.filename,
dtype=target.dtype,
offset=(start * target.dtype.itemsize),
shape=shape)
target_slice[fields] = source_slice[fields]
start = stop
|
from homeassistant.components import weather
from homeassistant.setup import async_setup_component
# Example config snippet from documentation.
BASE_CONFIG = {
"weather": [
{
"platform": "buienradar",
"name": "volkel",
"latitude": 51.65,
"longitude": 5.7,
"forecast": True,
}
]
}
async def test_smoke_test_setup_component(hass):
"""Smoke test for successfully set-up with default config."""
assert await async_setup_component(hass, weather.DOMAIN, BASE_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("weather.volkel")
assert state.state == "unknown"
|
from typing import Optional
from aioesphomeapi import SwitchInfo, SwitchState
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up ESPHome switches based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="switch",
info_type=SwitchInfo,
entity_type=EsphomeSwitch,
state_type=SwitchState,
)
class EsphomeSwitch(EsphomeEntity, SwitchEntity):
"""A switch implementation for ESPHome."""
@property
def _static_info(self) -> SwitchInfo:
return super()._static_info
@property
def _state(self) -> Optional[SwitchState]:
return super()._state
@property
def icon(self) -> str:
"""Return the icon."""
return self._static_info.icon
@property
def assumed_state(self) -> bool:
"""Return true if we do optimistic updates."""
return self._static_info.assumed_state
# https://github.com/PyCQA/pylint/issues/3150 for @esphome_state_property
# pylint: disable=invalid-overridden-method
@esphome_state_property
def is_on(self) -> Optional[bool]:
"""Return true if the switch is on."""
return self._state.state
async def async_turn_on(self, **kwargs) -> None:
"""Turn the entity on."""
await self._client.switch_command(self._static_info.key, True)
async def async_turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
await self._client.switch_command(self._static_info.key, False)
|
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.statemachine import StringList
from mne.defaults import DEFAULTS
from mne.io.pick import (_PICK_TYPES_DATA_DICT, _DATA_CH_TYPES_SPLIT,
_DATA_CH_TYPES_ORDER_DEFAULT)
class MNESubstitution(Directive): # noqa: D101
has_content = False
required_arguments = 1
final_argument_whitespace = True
def run(self, **kwargs): # noqa: D102
env = self.state.document.settings.env
if self.arguments[0] == 'data channels list':
keys = list()
for key in _DATA_CH_TYPES_ORDER_DEFAULT:
if key in _DATA_CH_TYPES_SPLIT:
keys.append(key)
elif key not in ('meg', 'fnirs') and \
_PICK_TYPES_DATA_DICT.get(key, False):
keys.append(key)
rst = '- ' + '\n- '.join(
'``%r``: **%s** (scaled by %g to plot in *%s*)'
% (key, DEFAULTS['titles'][key], DEFAULTS['scalings'][key],
DEFAULTS['units'][key])
for key in keys)
else:
raise self.error(
'MNE directive unknown in %s: %r'
% (env.doc2path(env.docname, base=None),
self.arguments[0],))
node = nodes.compound(rst) # General(Body), Element
content = StringList(
rst.split('\n'), parent=self.content.parent,
parent_offset=self.content.parent_offset)
self.state.nested_parse(content, self.content_offset, node)
return [node]
def setup(app): # noqa: D103
app.add_directive('mne', MNESubstitution)
return {'version': '0.1',
'parallel_read_safe': True,
'parallel_write_safe': True}
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import roomba_reported_state
from .const import BLID, DOMAIN, ROOMBA_SESSION
from .irobot_base import IRobotEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the iRobot Roomba vacuum cleaner."""
domain_data = hass.data[DOMAIN][config_entry.entry_id]
roomba = domain_data[ROOMBA_SESSION]
blid = domain_data[BLID]
status = roomba_reported_state(roomba).get("bin", {})
if "full" in status:
roomba_vac = RoombaBinStatus(roomba, blid)
async_add_entities([roomba_vac], True)
class RoombaBinStatus(IRobotEntity, BinarySensorEntity):
"""Class to hold Roomba Sensor basic info."""
ICON = "mdi:delete-variant"
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} Bin Full"
@property
def unique_id(self):
"""Return the ID of this sensor."""
return f"bin_{self._blid}"
@property
def icon(self):
"""Return the icon of this sensor."""
return self.ICON
@property
def is_on(self):
"""Return the state of the sensor."""
return roomba_reported_state(self.vacuum).get("bin", {}).get("full", False)
def new_state_filter(self, new_state):
"""Filter the new state."""
return "bin" in new_state
|
from contextlib import contextmanager
from homeassistant.components.verisure import DOMAIN as VERISURE_DOMAIN
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
CONFIG = {
"verisure": {
"username": "test",
"password": "test",
"alarm": False,
"door_window": False,
"hygrometers": False,
"mouse": False,
"smartplugs": False,
"thermometers": False,
"smartcam": False,
}
}
@contextmanager
def mock_hub(config, response):
"""Extensively mock out a verisure hub."""
hub_prefix = "homeassistant.components.verisure.binary_sensor.hub"
verisure_prefix = "verisure.Session"
with patch(verisure_prefix) as session, patch(hub_prefix) as hub:
session.login.return_value = True
hub.config = config["verisure"]
hub.get.return_value = response
hub.get_first.return_value = response.get("ethernetConnectedNow", None)
yield hub
async def setup_verisure(hass, config, response):
"""Set up mock verisure."""
with mock_hub(config, response):
await async_setup_component(hass, VERISURE_DOMAIN, config)
await hass.async_block_till_done()
async def test_verisure_no_ethernet_status(hass):
"""Test no data from API."""
await setup_verisure(hass, CONFIG, {})
assert len(hass.states.async_all()) == 1
entity_id = hass.states.async_entity_ids()[0]
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
async def test_verisure_ethernet_status_disconnected(hass):
"""Test disconnected."""
await setup_verisure(hass, CONFIG, {"ethernetConnectedNow": False})
assert len(hass.states.async_all()) == 1
entity_id = hass.states.async_entity_ids()[0]
assert hass.states.get(entity_id).state == "off"
async def test_verisure_ethernet_status_connected(hass):
"""Test connected."""
await setup_verisure(hass, CONFIG, {"ethernetConnectedNow": True})
assert len(hass.states.async_all()) == 1
entity_id = hass.states.async_entity_ids()[0]
assert hass.states.get(entity_id).state == "on"
|
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import NEW_LIGHT, POWER_PLUGS, SIRENS
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches for deCONZ component.
Switches are based on the same device class as lights in deCONZ.
"""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_switch(lights):
"""Add switch from deCONZ."""
entities = []
for light in lights:
if (
light.type in POWER_PLUGS
and light.uniqueid not in gateway.entities[DOMAIN]
):
entities.append(DeconzPowerPlug(light, gateway))
elif (
light.type in SIRENS and light.uniqueid not in gateway.entities[DOMAIN]
):
entities.append(DeconzSiren(light, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_switch
)
)
async_add_switch(gateway.api.lights.values())
class DeconzPowerPlug(DeconzDevice, SwitchEntity):
"""Representation of a deCONZ power plug."""
TYPE = DOMAIN
@property
def is_on(self):
"""Return true if switch is on."""
return self._device.state
async def async_turn_on(self, **kwargs):
"""Turn on switch."""
data = {"on": True}
await self._device.async_set_state(data)
async def async_turn_off(self, **kwargs):
"""Turn off switch."""
data = {"on": False}
await self._device.async_set_state(data)
class DeconzSiren(DeconzDevice, SwitchEntity):
"""Representation of a deCONZ siren."""
TYPE = DOMAIN
@property
def is_on(self):
"""Return true if switch is on."""
return self._device.alert == "lselect"
async def async_turn_on(self, **kwargs):
"""Turn on switch."""
data = {"alert": "lselect"}
await self._device.async_set_state(data)
async def async_turn_off(self, **kwargs):
"""Turn off switch."""
data = {"alert": "none"}
await self._device.async_set_state(data)
|
from xs1_api_client.api_constants import ActuatorType
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE
from . import ACTUATORS, DOMAIN as COMPONENT_DOMAIN, SENSORS, XS1DeviceEntity
MIN_TEMP = 8
MAX_TEMP = 25
SUPPORT_HVAC = [HVAC_MODE_HEAT]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the XS1 thermostat platform."""
actuators = hass.data[COMPONENT_DOMAIN][ACTUATORS]
sensors = hass.data[COMPONENT_DOMAIN][SENSORS]
thermostat_entities = []
for actuator in actuators:
if actuator.type() == ActuatorType.TEMPERATURE:
# Search for a matching sensor (by name)
actuator_name = actuator.name()
matching_sensor = None
for sensor in sensors:
if actuator_name in sensor.name():
matching_sensor = sensor
break
thermostat_entities.append(XS1ThermostatEntity(actuator, matching_sensor))
add_entities(thermostat_entities)
class XS1ThermostatEntity(XS1DeviceEntity, ClimateEntity):
"""Representation of a XS1 thermostat."""
def __init__(self, device, sensor):
"""Initialize the actuator."""
super().__init__(device)
self.sensor = sensor
@property
def name(self):
"""Return the name of the device if any."""
return self.device.name()
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
return HVAC_MODE_HEAT
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return SUPPORT_HVAC
@property
def current_temperature(self):
"""Return the current temperature."""
if self.sensor is None:
return None
return self.sensor.value()
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return self.device.unit()
@property
def target_temperature(self):
"""Return the current target temperature."""
return self.device.new_value()
@property
def min_temp(self):
"""Return the minimum temperature."""
return MIN_TEMP
@property
def max_temp(self):
"""Return the maximum temperature."""
return MAX_TEMP
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
self.device.set_value(temp)
if self.sensor is not None:
self.schedule_update_ha_state()
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
async def async_update(self):
"""Also update the sensor when available."""
await super().async_update()
if self.sensor is not None:
await self.hass.async_add_executor_job(self.sensor.update)
|
from __future__ import print_function
import inspect
import traceback
import Foundation
from . import compat
from . import exceptions
def require_string(*objs):
for obj in objs:
if not isinstance(obj, compat.string_types):
raise TypeError(
'a string is required but given {0}, a {1}'.format(obj, type(obj).__name__)
)
def require_string_or_none(*objs):
for obj in objs:
if not(obj is None or isinstance(obj, compat.string_types)):
raise TypeError(
'a string or None is required but given {0}, a {1}'.format(obj, type(obj).__name__)
)
def call_as_function_or_method(func, event):
# The idea here is that when using decorators in a class, the functions passed are not bound so we have to
# determine later if the functions we have (those saved as callbacks) for particular events need to be passed
# 'self'.
#
# This works for an App subclass method or a standalone decorated function. Will attempt to find function as
# a bound method of the App instance. If it is found, use it, otherwise simply call function.
from . import rumps
try:
app = getattr(rumps.App, '*app_instance')
except AttributeError:
pass
else:
for name, method in inspect.getmembers(app, predicate=inspect.ismethod):
if method.__func__ is func:
return method(event)
return func(event)
def guard_unexpected_errors(func):
"""Decorator to be used in PyObjC callbacks where an error bubbling up
would cause a crash. Instead of crashing, print the error to stderr and
prevent passing to PyObjC layer.
For Python 3, print the exception using chaining. Accomplished by setting
the cause of :exc:`rumps.exceptions.InternalRumpsError` to the exception.
For Python 2, emulate exception chaining by printing the original exception
followed by :exc:`rumps.exceptions.InternalRumpsError`.
"""
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
internal_error = exceptions.InternalRumpsError(
'an unexpected error occurred within an internal callback'
)
if compat.PY2:
import sys
traceback.print_exc()
print('\nThe above exception was the direct cause of the following exception:\n', file=sys.stderr)
traceback.print_exception(exceptions.InternalRumpsError, internal_error, None)
else:
internal_error.__cause__ = e
traceback.print_exception(exceptions.InternalRumpsError, internal_error, None)
return wrapper
def string_to_objc(x):
if isinstance(x, compat.binary_type):
return Foundation.NSData.alloc().initWithData_(x)
elif isinstance(x, compat.string_types):
return Foundation.NSString.alloc().initWithString_(x)
else:
raise TypeError(
"expected a string or a bytes-like object but provided %s, "
"having type '%s'" % (
x,
type(x).__name__
)
)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.